The Polarity Integration Utils library provides a hierarchical caching system with three scopes: global, integration, and user. This guide explains how to use the cache interfaces effectively in your integrations.
The caching system is organized in three levels, from most general to most specific:
interface PolarityCache {
global: GlobalCache; // System-wide shared cache
integration: IntegrationCache; // Integration-specific cache
user: UserCache; // User-specific cache within integration
}
async function doLookup(entities, options, context) {
const cache = context?.cache;
if (!cache) return generateFreshData();
try {
// Check cache first
const cacheKey = `lookup_${entity.value}`;
const cached = await cache.integration.get(cacheKey);
if (cached) return cached;
// Generate and cache new data
const result = await apiCall(entity);
await cache.integration.set(
cacheKey,
result,
{ ttl: 300 } // 5 minutes
);
return result;
} catch (error) {
console.error('Cache error:', error);
return generateFreshData(); // Graceful fallback
}
}
async function getUserPreferences(context) {
const cache = context?.cache;
if (!cache) return getDefaultPreferences();
try {
const prefs = await cache.user.get('ui_preferences');
return prefs || getDefaultPreferences();
} catch (error) {
return getDefaultPreferences();
}
}
async function updateUserPreferences(preferences, context) {
const cache = context?.cache;
if (!cache) return;
try {
await cache.user.set(
'ui_preferences',
preferences,
{ ttl: 86400 } // 24 hours
);
} catch (error) {
console.error('Failed to save preferences:', error);
}
}
async function trackGlobalUsage(context) {
const cache = context?.cache;
if (!cache) return;
try {
// Increment global lookup counter
const current = ((await cache.global.get('total_lookups')) as number) || 0;
await cache.global.set('total_lookups', current + 1, { ttl: 86400 });
} catch (error) {
// Don't fail the lookup if stats tracking fails
console.warn('Failed to update global stats:', error);
}
}
Use this pattern to check caches from most specific to most general:
async function getLookupData(entity, context) {
const cache = context?.cache;
if (!cache) return fetchFreshData(entity);
try {
// 1. Check user-specific cache first
let result = await cache.user.get(`lookup_${entity.value}`);
if (result) return { source: 'user_cache', data: result };
// 2. Check integration cache
result = await cache.integration.get(`lookup_${entity.value}`);
if (result) return { source: 'integration_cache', data: result };
// 3. Check global cache for known entities
result = await cache.global.get(`known_entity_${entity.value}`);
if (result) return { source: 'global_cache', data: result };
// 4. Fetch fresh data and cache appropriately
const freshData = await fetchFreshData(entity);
// Cache in integration scope for all users
await cache.integration.set(`lookup_${entity.value}`, freshData, { ttl: 3600 });
return { source: 'fresh', data: freshData };
} catch (error) {
console.error('Cache error:', error);
return { source: 'fallback', data: await fetchFreshData(entity) };
}
}
All cache operations support optional configuration:
interface CacheOptions {
ttl?: number; // Time-to-live in seconds
}
// Examples
await cache.global.set('key', 'value'); // No expiration
await cache.global.set('key', 'value', { ttl: 300 }); // 5 minutes
await cache.global.set('key', 'value', { ttl: 86400 }); // 24 hours
try {
const cached = await cache.integration.get(key);
return cached || (await fetchFreshData());
} catch (error) {
console.error('Cache error:', error);
return await fetchFreshData(); // Always provide fallback
}
// Good - descriptive and unlikely to conflict
'config_api_endpoints';
'lookup_192.168.1.1';
'user_preferences_dashboard';
'rate_limit_counter_2024-01-15';
// Bad - vague and likely to conflict
'config';
'data';
'temp';
'result';
// Short-lived data (5-15 minutes)
{
ttl: 300;
} // API responses that change frequently
{
ttl: 900;
} // Rate limiting counters
// Medium-lived data (1-6 hours)
{
ttl: 3600;
} // Lookup results
{
ttl: 21600;
} // Configuration data
// Long-lived data (24+ hours)
{
ttl: 86400;
} // User preferences
{
ttl: 604800;
} // Weekly statistics
// Use consistent prefixes to organize keys
await cache.integration.set('config:api_endpoint', endpoint);
await cache.integration.set('config:timeout', timeout);
await cache.integration.set('stats:daily_lookups', count);
await cache.integration.set('temp:processing_batch_001', batch);
const cached = await cache.integration.get(key);
const data = cached || getDefaultValue(); // Always provide fallback
// Or use optional chaining for objects
const config = (cached as Config) || {
timeout: 30000,
retries: 3
};
Cache operations can fail for various reasons (network issues, storage limits, etc.). Always implement proper error handling:
async function robustCacheOperation(context) {
const cache = context?.cache;
// Graceful degradation if no cache available
if (!cache) {
return await fallbackOperation();
}
try {
const result = await cache.integration.get('key');
if (result) return result;
const freshData = await fetchData();
// Don't fail the operation if caching fails
try {
await cache.integration.set('key', freshData, { ttl: 300 });
} catch (cacheError) {
console.warn('Failed to cache result:', cacheError);
}
return freshData;
} catch (error) {
console.error('Cache operation failed:', error);
return await fallbackOperation();
}
}
The cache interfaces provide a powerful way to improve integration performance while maintaining data consistency and user experience.