Caching Strategies Guide
Learn how to implement effective caching strategies to optimize performance, reduce API calls, and improve user experience in your Unified Commerce applications.
Prerequisites
- Basic understanding of caching concepts
- Familiarity with GraphQL
- Knowledge of your application's data access patterns
- Redis or similar caching solution (optional)
Table of Contents
- Caching Overview
- Cache Layers
- Client-Side Caching
- Server-Side Caching
- CDN Caching
- GraphQL Specific Caching
- Cache Invalidation
- Performance Monitoring
- Best Practices
- Troubleshooting
Caching Overview
Cache Architecture
graph TD
A[Browser Cache] --> B[Service Worker]
B --> C[CDN Cache]
C --> D[API Gateway Cache]
D --> E[Application Cache]
E --> F[Database Cache]
F --> G[Database]
H[Cache Invalidation] --> A
H --> C
H --> D
H --> E
Cache Strategy Decision Tree
// Cache strategy selector
export function selectCacheStrategy(dataType: string): CacheStrategy {
const strategies: Record<string, CacheStrategy> = {
// Static content - aggressive caching
'static-assets': {
location: 'CDN',
ttl: 31536000, // 1 year
invalidation: 'version-based',
},
// Product catalog - moderate caching
'product-data': {
location: 'multi-layer',
ttl: 3600, // 1 hour
invalidation: 'event-based',
},
// Inventory - short caching with real-time updates
'inventory': {
location: 'memory',
ttl: 60, // 1 minute
invalidation: 'real-time',
},
// User-specific data - session-based caching
'user-data': {
location: 'session',
ttl: 1800, // 30 minutes
invalidation: 'on-mutation',
},
// Analytics - background refresh
'analytics': {
location: 'background',
ttl: 300, // 5 minutes
invalidation: 'stale-while-revalidate',
},
};
return strategies[dataType] || strategies['default'];
}
Cache Layers
Layer 1: Browser Cache
// browser-cache.ts
export class BrowserCache {
private cache: Cache | null = null;
private cacheName = 'unified-commerce-v1';
async initialize() {
if ('caches' in window) {
this.cache = await caches.open(this.cacheName);
}
}
async get(key: string): Promise<any | null> {
if (!this.cache) return null;
const response = await this.cache.match(key);
if (!response) return null;
const data = await response.json();
// Check expiration
if (data.expiresAt && Date.now() > data.expiresAt) {
await this.cache.delete(key);
return null;
}
return data.value;
}
async set(key: string, value: any, ttl: number = 3600) {
if (!this.cache) return;
const data = {
value,
cachedAt: Date.now(),
expiresAt: Date.now() + ttl * 1000,
};
const response = new Response(JSON.stringify(data), {
headers: {
'Content-Type': 'application/json',
'Cache-Control': `max-age=${ttl}`,
},
});
await this.cache.put(key, response);
}
async invalidate(pattern?: string) {
if (!this.cache) return;
const keys = await this.cache.keys();
for (const request of keys) {
if (!pattern || request.url.includes(pattern)) {
await this.cache.delete(request);
}
}
}
async clear() {
if ('caches' in window) {
await caches.delete(this.cacheName);
await this.initialize();
}
}
}
Layer 2: Memory Cache
// memory-cache.ts
export class MemoryCache {
private cache: Map<string, CacheEntry> = new Map();
private maxSize: number = 100; // Maximum number of entries
private maxMemory: number = 50 * 1024 * 1024; // 50MB
private currentMemory: number = 0;
get(key: string): any | null {
const entry = this.cache.get(key);
if (!entry) return null;
// Check expiration
if (entry.expiresAt && Date.now() > entry.expiresAt) {
this.delete(key);
return null;
}
// Update LRU
entry.lastAccessed = Date.now();
entry.accessCount++;
return entry.value;
}
set(key: string, value: any, options: CacheOptions = {}) {
const size = this.estimateSize(value);
// Check memory limit
if (size > this.maxMemory) {
console.warn(`Cache entry too large: ${size} bytes`);
return;
}
// Evict if necessary
while (this.currentMemory + size > this.maxMemory || this.cache.size >= this.maxSize) {
this.evictLRU();
}
const entry: CacheEntry = {
value,
size,
cachedAt: Date.now(),
expiresAt: options.ttl ? Date.now() + options.ttl * 1000 : null,
lastAccessed: Date.now(),
accessCount: 0,
tags: options.tags || [],
};
this.cache.set(key, entry);
this.currentMemory += size;
}
delete(key: string): boolean {
const entry = this.cache.get(key);
if (!entry) return false;
this.cache.delete(key);
this.currentMemory -= entry.size;
return true;
}
invalidateByTag(tag: string) {
const keysToDelete: string[] = [];
this.cache.forEach((entry, key) => {
if (entry.tags.includes(tag)) {
keysToDelete.push(key);
}
});
keysToDelete.forEach(key => this.delete(key));
}
private evictLRU() {
let lruKey: string | null = null;
let lruTime = Infinity;
this.cache.forEach((entry, key) => {
if (entry.lastAccessed < lruTime) {
lruTime = entry.lastAccessed;
lruKey = key;
}
});
if (lruKey) {
this.delete(lruKey);
}
}
private estimateSize(value: any): number {
// Rough estimation of object size
return JSON.stringify(value).length * 2; // 2 bytes per character
}
getStats(): CacheStats {
return {
entries: this.cache.size,
memoryUsed: this.currentMemory,
memoryLimit: this.maxMemory,
hitRate: this.calculateHitRate(),
};
}
}
Layer 3: Distributed Cache (Redis)
// redis-cache.ts
import Redis from 'ioredis';
export class RedisCache {
private client: Redis;
private prefix: string = 'uc:';
constructor() {
this.client = new Redis({
host: process.env.REDIS_HOST,
port: parseInt(process.env.REDIS_PORT || '6379'),
password: process.env.REDIS_PASSWORD,
db: parseInt(process.env.REDIS_DB || '0'),
retryStrategy: (times) => {
const delay = Math.min(times * 50, 2000);
return delay;
},
});
}
async get<T>(key: string): Promise<T | null> {
try {
const data = await this.client.get(this.prefix + key);
return data ? JSON.parse(data) : null;
} catch (error) {
console.error('Redis get error:', error);
return null;
}
}
async set(key: string, value: any, ttl: number = 3600): Promise<boolean> {
try {
const serialized = JSON.stringify(value);
if (ttl > 0) {
await this.client.setex(this.prefix + key, ttl, serialized);
} else {
await this.client.set(this.prefix + key, serialized);
}
return true;
} catch (error) {
console.error('Redis set error:', error);
return false;
}
}
async delete(key: string): Promise<boolean> {
try {
await this.client.del(this.prefix + key);
return true;
} catch (error) {
console.error('Redis delete error:', error);
return false;
}
}
async mget<T>(keys: string[]): Promise<(T | null)[]> {
if (keys.length === 0) return [];
const prefixedKeys = keys.map(k => this.prefix + k);
const values = await this.client.mget(prefixedKeys);
return values.map(v => v ? JSON.parse(v) : null);
}
async mset(entries: Array<{ key: string; value: any; ttl?: number }>): Promise<boolean> {
const pipeline = this.client.pipeline();
entries.forEach(({ key, value, ttl = 3600 }) => {
const serialized = JSON.stringify(value);
if (ttl > 0) {
pipeline.setex(this.prefix + key, ttl, serialized);
} else {
pipeline.set(this.prefix + key, serialized);
}
});
try {
await pipeline.exec();
return true;
} catch (error) {
console.error('Redis mset error:', error);
return false;
}
}
async invalidatePattern(pattern: string): Promise<number> {
const keys = await this.client.keys(this.prefix + pattern);
if (keys.length === 0) return 0;
return await this.client.del(...keys);
}
async flush(): Promise<void> {
await this.client.flushdb();
}
}
Client-Side Caching
Apollo Client Cache
// apollo-cache.ts
import { InMemoryCache, ApolloClient } from '@apollo/client';
// Configure Apollo cache
export const cache = new InMemoryCache({
typePolicies: {
Query: {
fields: {
// Cache products by ID
product: {
read(_, { args, toReference }) {
return toReference({
__typename: 'Product',
id: args?.id,
});
},
},
// Paginated products with cursor-based pagination
products: {
keyArgs: ['filters', 'sortBy', 'sortOrder'],
merge(existing, incoming, { args }) {
const merged = existing ? { ...existing } : {};
// Merge edges
if (incoming.edges) {
merged.edges = existing?.edges ? [...existing.edges] : [];
const existingIds = new Set(
merged.edges.map((e: any) => e.node.id)
);
incoming.edges.forEach((edge: any) => {
if (!existingIds.has(edge.node.id)) {
merged.edges.push(edge);
}
});
}
// Update pageInfo
merged.pageInfo = incoming.pageInfo;
merged.totalCount = incoming.totalCount;
return merged;
},
},
},
},
Product: {
fields: {
// Cache inventory separately with TTL
inventory: {
read(existing) {
// Check if cache is stale
if (existing && existing.cachedAt) {
const age = Date.now() - existing.cachedAt;
if (age > 60000) { // 1 minute
return undefined; // Force refetch
}
}
return existing;
},
merge(_, incoming) {
return {
...incoming,
cachedAt: Date.now(),
};
},
},
},
},
Cart: {
fields: {
// Always fetch fresh cart data
items: {
merge: false,
},
},
},
},
});
// Create Apollo client with cache
export const apolloClient = new ApolloClient({
uri: process.env.NEXT_PUBLIC_GRAPHQL_ENDPOINT,
cache,
defaultOptions: {
watchQuery: {
fetchPolicy: 'cache-first',
errorPolicy: 'all',
},
query: {
fetchPolicy: 'cache-first',
errorPolicy: 'all',
},
},
});
// Cache persistence
import { persistCache } from 'apollo3-cache-persist';
export async function initializeApolloCache() {
await persistCache({
cache,
storage: window.localStorage,
maxSize: 1048576, // 1MB
debug: process.env.NODE_ENV === 'development',
});
}
React Query Cache
// react-query-cache.ts
import { QueryClient } from '@tanstack/react-query';
import { persistQueryClient } from '@tanstack/react-query-persist-client';
import { createSyncStoragePersister } from '@tanstack/query-sync-storage-persister';
// Configure query client with caching
export const queryClient = new QueryClient({
defaultOptions: {
queries: {
// Cache timing
staleTime: 5 * 60 * 1000, // 5 minutes
cacheTime: 10 * 60 * 1000, // 10 minutes
// Retry logic
retry: (failureCount, error: any) => {
if (error.status === 404) return false;
return failureCount < 3;
},
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 30000),
// Background refetch
refetchOnWindowFocus: false,
refetchOnReconnect: 'always',
},
mutations: {
retry: false,
},
},
});
// Persist cache to localStorage
const persister = createSyncStoragePersister({
storage: window.localStorage,
throttleTime: 1000,
});
persistQueryClient({
queryClient,
persister,
maxAge: 1000 * 60 * 60 * 24, // 24 hours
});
// Custom hooks with caching
export function useProductQuery(productId: string) {
return useQuery({
queryKey: ['product', productId],
queryFn: () => fetchProduct(productId),
staleTime: 5 * 60 * 1000, // 5 minutes
cacheTime: 30 * 60 * 1000, // 30 minutes
});
}
export function useProductsQuery(filters: ProductFilters) {
return useInfiniteQuery({
queryKey: ['products', filters],
queryFn: ({ pageParam = null }) => fetchProducts({ ...filters, cursor: pageParam }),
getNextPageParam: (lastPage) => lastPage.pageInfo.endCursor,
staleTime: 2 * 60 * 1000, // 2 minutes
cacheTime: 10 * 60 * 1000, // 10 minutes
});
}
// Optimistic updates
export function useUpdateProductMutation() {
return useMutation({
mutationFn: updateProduct,
onMutate: async (newProduct) => {
// Cancel queries
await queryClient.cancelQueries({ queryKey: ['product', newProduct.id] });
// Snapshot previous value
const previousProduct = queryClient.getQueryData(['product', newProduct.id]);
// Optimistically update
queryClient.setQueryData(['product', newProduct.id], newProduct);
return { previousProduct };
},
onError: (err, newProduct, context) => {
// Rollback on error
queryClient.setQueryData(
['product', newProduct.id],
context?.previousProduct
);
},
onSettled: (data, error, variables) => {
// Invalidate and refetch
queryClient.invalidateQueries({ queryKey: ['product', variables.id] });
},
});
}
Service Worker Cache
// service-worker.js
const CACHE_NAME = 'unified-commerce-v1';
const API_CACHE = 'api-cache-v1';
// Cache strategies
const cacheStrategies = {
// Network first, fall back to cache
networkFirst: async (request) => {
try {
const networkResponse = await fetch(request);
if (networkResponse.ok) {
const cache = await caches.open(API_CACHE);
cache.put(request, networkResponse.clone());
}
return networkResponse;
} catch (error) {
const cachedResponse = await caches.match(request);
if (cachedResponse) {
return cachedResponse;
}
throw error;
}
},
// Cache first, fall back to network
cacheFirst: async (request) => {
const cachedResponse = await caches.match(request);
if (cachedResponse) {
// Update cache in background
fetch(request).then((response) => {
if (response.ok) {
caches.open(API_CACHE).then((cache) => {
cache.put(request, response);
});
}
});
return cachedResponse;
}
const networkResponse = await fetch(request);
if (networkResponse.ok) {
const cache = await caches.open(API_CACHE);
cache.put(request, networkResponse.clone());
}
return networkResponse;
},
// Stale while revalidate
staleWhileRevalidate: async (request) => {
const cachedResponse = await caches.match(request);
const fetchPromise = fetch(request).then((response) => {
if (response.ok) {
caches.open(API_CACHE).then((cache) => {
cache.put(request, response.clone());
});
}
return response;
});
return cachedResponse || fetchPromise;
},
};
// Fetch event handler
self.addEventListener('fetch', (event) => {
const { request } = event;
const url = new URL(request.url);
// Skip non-GET requests
if (request.method !== 'GET') return;
// Apply different strategies based on URL
if (url.pathname.startsWith('/api/products')) {
event.respondWith(cacheStrategies.staleWhileRevalidate(request));
} else if (url.pathname.startsWith('/api/cart')) {
event.respondWith(cacheStrategies.networkFirst(request));
} else if (url.pathname.match(/\.(js|css|png|jpg|jpeg|svg|gif)$/)) {
event.respondWith(cacheStrategies.cacheFirst(request));
}
});
// Cache cleanup
self.addEventListener('activate', (event) => {
event.waitUntil(
caches.keys().then((cacheNames) => {
return Promise.all(
cacheNames
.filter((cacheName) => cacheName !== CACHE_NAME && cacheName !== API_CACHE)
.map((cacheName) => caches.delete(cacheName))
);
})
);
});
Server-Side Caching
API Response Caching
// api-cache-middleware.ts
import { Request, Response, NextFunction } from 'express';
import { RedisCache } from './redis-cache';
const cache = new RedisCache();
export function cacheMiddleware(options: CacheOptions = {}) {
return async (req: Request, res: Response, next: NextFunction) => {
// Skip non-GET requests
if (req.method !== 'GET') {
return next();
}
// Generate cache key
const cacheKey = generateCacheKey(req);
// Check cache
const cached = await cache.get(cacheKey);
if (cached) {
res.set('X-Cache', 'HIT');
res.set('Cache-Control', `public, max-age=${options.ttl || 300}`);
return res.json(cached);
}
// Store original send function
const originalSend = res.json.bind(res);
// Override send to cache response
res.json = function(data: any) {
res.set('X-Cache', 'MISS');
// Cache successful responses
if (res.statusCode === 200) {
cache.set(cacheKey, data, options.ttl || 300);
}
return originalSend(data);
};
next();
};
}
function generateCacheKey(req: Request): string {
const { path, query } = req;
const queryString = JSON.stringify(sortObject(query));
return `api:${path}:${queryString}`;
}
function sortObject(obj: any): any {
if (!obj || typeof obj !== 'object') return obj;
return Object.keys(obj)
.sort()
.reduce((result: any, key) => {
result[key] = obj[key];
return result;
}, {});
}
Database Query Caching
// database-cache.ts
export class DatabaseCache {
private cache: RedisCache;
private queryCache: Map<string, any> = new Map();
constructor() {
this.cache = new RedisCache();
}
async query<T>(
sql: string,
params: any[],
options: QueryCacheOptions = {}
): Promise<T> {
const cacheKey = this.generateQueryKey(sql, params);
// Check memory cache first
if (this.queryCache.has(cacheKey)) {
const cached = this.queryCache.get(cacheKey);
if (Date.now() - cached.timestamp < (options.ttl || 60) * 1000) {
return cached.data;
}
}
// Check Redis cache
const redisCached = await this.cache.get<T>(cacheKey);
if (redisCached) {
// Populate memory cache
this.queryCache.set(cacheKey, {
data: redisCached,
timestamp: Date.now(),
});
return redisCached;
}
// Execute query
const result = await this.executeQuery<T>(sql, params);
// Cache result
if (options.cache !== false) {
await this.cache.set(cacheKey, result, options.ttl || 300);
this.queryCache.set(cacheKey, {
data: result,
timestamp: Date.now(),
});
}
return result;
}
async invalidateQuery(sql: string, params?: any[]) {
if (params) {
const key = this.generateQueryKey(sql, params);
this.queryCache.delete(key);
await this.cache.delete(key);
} else {
// Invalidate all queries matching the SQL pattern
const pattern = `query:${this.hashString(sql)}:*`;
await this.cache.invalidatePattern(pattern);
// Clear memory cache entries
for (const key of this.queryCache.keys()) {
if (key.startsWith(`query:${this.hashString(sql)}`)) {
this.queryCache.delete(key);
}
}
}
}
private generateQueryKey(sql: string, params: any[]): string {
const sqlHash = this.hashString(sql);
const paramsHash = this.hashString(JSON.stringify(params));
return `query:${sqlHash}:${paramsHash}`;
}
private hashString(str: string): string {
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32bit integer
}
return hash.toString(36);
}
private async executeQuery<T>(sql: string, params: any[]): Promise<T> {
// Execute actual database query
// Implementation depends on your database driver
throw new Error('Not implemented');
}
}
CDN Caching
Cloudflare Configuration
// cloudflare-cache.ts
export class CloudflareCache {
async purge(urls: string[]): Promise<void> {
const response = await fetch(
`https://api.cloudflare.com/client/v4/zones/${process.env.CF_ZONE_ID}/purge_cache`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.CF_API_TOKEN}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({ files: urls }),
}
);
if (!response.ok) {
throw new Error('Failed to purge Cloudflare cache');
}
}
async purgeEverything(): Promise<void> {
const response = await fetch(
`https://api.cloudflare.com/client/v4/zones/${process.env.CF_ZONE_ID}/purge_cache`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.CF_API_TOKEN}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({ purge_everything: true }),
}
);
if (!response.ok) {
throw new Error('Failed to purge Cloudflare cache');
}
}
async purgeByTag(tags: string[]): Promise<void> {
const response = await fetch(
`https://api.cloudflare.com/client/v4/zones/${process.env.CF_ZONE_ID}/purge_cache`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.CF_API_TOKEN}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({ tags }),
}
);
if (!response.ok) {
throw new Error('Failed to purge Cloudflare cache by tags');
}
}
}
// Edge Workers for dynamic caching
export const edgeWorker = `
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request))
})
async function handleRequest(request) {
const cache = caches.default
const cacheKey = new Request(request.url, request)
// Check cache
let response = await cache.match(cacheKey)
if (!response) {
// Fetch from origin
response = await fetch(request)
// Cache successful responses
if (response.status === 200) {
const headers = new Headers(response.headers)
headers.set('Cache-Control', 'public, max-age=300')
response = new Response(response.body, {
status: response.status,
statusText: response.statusText,
headers: headers
})
event.waitUntil(cache.put(cacheKey, response.clone()))
}
}
return response
}
`;
GraphQL Specific Caching
Query Result Caching
// graphql-cache.ts
import DataLoader from 'dataloader';
import { GraphQLResolveInfo } from 'graphql';
export class GraphQLCache {
private loaders: Map<string, DataLoader<any, any>> = new Map();
// Create DataLoader for batching and caching
createLoader<K, V>(
batchFn: (keys: readonly K[]) => Promise<V[]>,
options?: DataLoader.Options<K, V>
): DataLoader<K, V> {
return new DataLoader(batchFn, {
cache: true,
maxBatchSize: 100,
batchScheduleFn: (callback) => setTimeout(callback, 10),
...options,
});
}
// Cache directive implementation
cacheDirective(ttl: number = 300) {
return (
target: any,
propertyName: string,
descriptor: PropertyDescriptor
) => {
const originalMethod = descriptor.value;
descriptor.value = async function(...args: any[]) {
const info: GraphQLResolveInfo = args[3];
const cacheKey = generateCacheKey(info);
// Check cache
const cached = await cache.get(cacheKey);
if (cached) {
return cached;
}
// Execute resolver
const result = await originalMethod.apply(this, args);
// Cache result
await cache.set(cacheKey, result, ttl);
return result;
};
return descriptor;
};
}
// Automatic Persisted Queries
async handlePersistedQuery(
queryHash: string,
query?: string
): Promise<string | null> {
const cacheKey = `apq:${queryHash}`;
if (query) {
// Store query
await cache.set(cacheKey, query, 86400); // 24 hours
return query;
} else {
// Retrieve query
return await cache.get(cacheKey);
}
}
}
// Usage in resolvers
const graphQLCache = new GraphQLCache();
// Product loader with caching
const productLoader = graphQLCache.createLoader(
async (ids: readonly string[]) => {
const products = await fetchProductsByIds(ids);
return ids.map(id => products.find(p => p.id === id));
}
);
export const resolvers = {
Query: {
// Cached query
@graphQLCache.cacheDirective(600) // 10 minutes
async products(_, { filters }, { dataSources }) {
return dataSources.productAPI.getProducts(filters);
},
// Using DataLoader
async product(_, { id }) {
return productLoader.load(id);
},
},
Product: {
// Cached field resolver
@graphQLCache.cacheDirective(300) // 5 minutes
async inventory(product) {
return fetchInventory(product.id);
},
},
};
Cache Invalidation
Event-Based Invalidation
// cache-invalidator.ts
import { EventEmitter } from 'events';
export class CacheInvalidator extends EventEmitter {
private strategies: Map<string, InvalidationStrategy> = new Map();
constructor() {
super();
this.setupEventListeners();
}
private setupEventListeners() {
// Product events
this.on('product:created', (product) => {
this.invalidateProductCaches(product);
});
this.on('product:updated', (product) => {
this.invalidateProductCaches(product);
});
this.on('product:deleted', (productId) => {
this.invalidateProductCaches({ id: productId });
});
// Order events
this.on('order:created', (order) => {
this.invalidateOrderCaches(order);
this.invalidateInventoryCaches(order.items);
});
// Inventory events
this.on('inventory:updated', (inventory) => {
this.invalidateInventoryCaches([inventory]);
});
}
private async invalidateProductCaches(product: any) {
const patterns = [
`product:${product.id}`,
`products:*`,
`category:${product.categoryId}:*`,
];
await this.invalidatePatterns(patterns);
// Invalidate CDN
await this.invalidateCDN([
`/api/products/${product.id}`,
`/api/products`,
]);
}
private async invalidateOrderCaches(order: any) {
const patterns = [
`order:${order.id}`,
`orders:${order.customerId}:*`,
`analytics:*`,
];
await this.invalidatePatterns(patterns);
}
private async invalidateInventoryCaches(items: any[]) {
const patterns = items.flatMap(item => [
`inventory:${item.productId}`,
`product:${item.productId}:inventory`,
]);
await this.invalidatePatterns(patterns);
}
private async invalidatePatterns(patterns: string[]) {
for (const pattern of patterns) {
// Invalidate Redis
await redisCache.invalidatePattern(pattern);
// Invalidate memory cache
memoryCache.invalidatePattern(pattern);
// Notify other services
this.emit('cache:invalidated', { patterns });
}
}
private async invalidateCDN(paths: string[]) {
await cloudflareCache.purge(paths);
}
// Manual invalidation
async invalidate(keys: string | string[]) {
const keysArray = Array.isArray(keys) ? keys : [keys];
for (const key of keysArray) {
await redisCache.delete(key);
memoryCache.delete(key);
}
this.emit('cache:invalidated', { keys: keysArray });
}
// Smart invalidation based on dependencies
async invalidateWithDependencies(key: string) {
const dependencies = await this.getDependencies(key);
const allKeys = [key, ...dependencies];
await this.invalidate(allKeys);
}
private async getDependencies(key: string): Promise<string[]> {
// Implement dependency tracking
// This could be stored in Redis or a graph database
return [];
}
}
export const cacheInvalidator = new CacheInvalidator();
Performance Monitoring
Cache Metrics
// cache-metrics.ts
export class CacheMetrics {
private metrics: Map<string, Metric> = new Map();
private interval: NodeJS.Timeout | null = null;
startMonitoring() {
this.interval = setInterval(() => {
this.collectMetrics();
this.reportMetrics();
}, 60000); // Every minute
}
stopMonitoring() {
if (this.interval) {
clearInterval(this.interval);
this.interval = null;
}
}
private collectMetrics() {
// Memory cache metrics
const memoryStats = memoryCache.getStats();
this.recordMetric('memory_cache_size', memoryStats.entries);
this.recordMetric('memory_cache_memory', memoryStats.memoryUsed);
this.recordMetric('memory_cache_hit_rate', memoryStats.hitRate);
// Redis metrics
this.collectRedisMetrics();
// API cache metrics
this.collectAPICacheMetrics();
}
private async collectRedisMetrics() {
const info = await redisClient.info('stats');
const stats = this.parseRedisInfo(info);
this.recordMetric('redis_hits', stats.keyspace_hits);
this.recordMetric('redis_misses', stats.keyspace_misses);
this.recordMetric('redis_hit_rate',
stats.keyspace_hits / (stats.keyspace_hits + stats.keyspace_misses)
);
}
private recordMetric(name: string, value: number) {
if (!this.metrics.has(name)) {
this.metrics.set(name, {
name,
values: [],
total: 0,
count: 0,
});
}
const metric = this.metrics.get(name)!;
metric.values.push({ value, timestamp: Date.now() });
metric.total += value;
metric.count++;
// Keep only last hour of data
const oneHourAgo = Date.now() - 3600000;
metric.values = metric.values.filter(v => v.timestamp > oneHourAgo);
}
getMetrics(): MetricsSummary {
const summary: MetricsSummary = {};
this.metrics.forEach((metric, name) => {
summary[name] = {
current: metric.values[metric.values.length - 1]?.value || 0,
average: metric.total / metric.count,
min: Math.min(...metric.values.map(v => v.value)),
max: Math.max(...metric.values.map(v => v.value)),
};
});
return summary;
}
private reportMetrics() {
const metrics = this.getMetrics();
// Log to console
console.log('Cache Metrics:', metrics);
// Send to monitoring service
this.sendToMonitoring(metrics);
}
private sendToMonitoring(metrics: MetricsSummary) {
// Send to Datadog, CloudWatch, etc.
fetch('/api/metrics', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
service: 'cache',
metrics,
timestamp: Date.now(),
}),
});
}
}
Best Practices
1. Cache Key Strategy
// cache-keys.ts
export class CacheKeyGenerator {
// Consistent key generation
static generate(
type: string,
id: string,
params?: Record<string, any>
): string {
const sortedParams = params ? this.sortParams(params) : '';
return `${type}:${id}${sortedParams ? ':' + sortedParams : ''}`;
}
private static sortParams(params: Record<string, any>): string {
return Object.keys(params)
.sort()
.map(key => `${key}=${params[key]}`)
.join(':');
}
// Versioned keys for breaking changes
static versioned(key: string, version: number = 1): string {
return `v${version}:${key}`;
}
// User-specific keys
static userScoped(userId: string, key: string): string {
return `user:${userId}:${key}`;
}
// Time-based keys for rotating caches
static timeScoped(key: string, window: number = 3600): string {
const bucket = Math.floor(Date.now() / 1000 / window);
return `${key}:${bucket}`;
}
}
2. Cache Warming
// cache-warmer.ts
export class CacheWarmer {
async warmCache() {
console.log('Starting cache warming...');
// Warm product cache
await this.warmProductCache();
// Warm category cache
await this.warmCategoryCache();
// Warm popular queries
await this.warmPopularQueries();
console.log('Cache warming complete');
}
private async warmProductCache() {
const products = await fetchTopProducts(100);
for (const product of products) {
const key = CacheKeyGenerator.generate('product', product.id);
await cache.set(key, product, 3600);
}
}
private async warmCategoryCache() {
const categories = await fetchAllCategories();
for (const category of categories) {
const key = CacheKeyGenerator.generate('category', category.id);
await cache.set(key, category, 7200);
}
}
private async warmPopularQueries() {
const queries = [
{ type: 'products', filters: { featured: true } },
{ type: 'products', filters: { onSale: true } },
{ type: 'categories', filters: {} },
];
for (const query of queries) {
const result = await executeQuery(query);
const key = CacheKeyGenerator.generate(query.type, 'list', query.filters);
await cache.set(key, result, 1800);
}
}
}
Troubleshooting
Common Issues
1. Cache Stampede
Problem: Multiple requests hit origin when cache expires Solution:
// Implement lock-based cache refresh
async function getWithLock(key: string, fetcher: () => Promise<any>) {
const lockKey = `lock:${key}`;
const lock = await acquireLock(lockKey, 5000);
if (!lock) {
// Wait and retry
await sleep(100);
return cache.get(key) || getWithLock(key, fetcher);
}
try {
const value = await fetcher();
await cache.set(key, value);
return value;
} finally {
await releaseLock(lockKey);
}
}
2. Cache Inconsistency
Problem: Different cache layers have different data Solution:
// Implement cache versioning
const CACHE_VERSION = 2;
function versionedKey(key: string): string {
return `v${CACHE_VERSION}:${key}`;
}
// Invalidate old versions
async function invalidateOldVersions(key: string) {
for (let v = 1; v < CACHE_VERSION; v++) {
await cache.delete(`v${v}:${key}`);
}
}
3. Memory Leaks
Problem: Cache grows unbounded Solution:
// Implement TTL and size limits
class BoundedCache {
private maxSize = 1000;
private maxAge = 3600000; // 1 hour
async set(key: string, value: any) {
// Enforce size limit
if (this.size >= this.maxSize) {
await this.evictOldest();
}
// Set with TTL
await super.set(key, value, this.maxAge);
}
}