š¾
Intermediate
11 min readMemoization & Caching Strategies
Caching computed values and API responses for better performance
Understanding Memoization
Memoization is an optimization technique that stores the results of expensive function calls and returns the cached result when the same inputs occur again, avoiding redundant computations.
Basic Memoization
// ā Bad: Recomputing expensive operations
function fibonacci(n) {
if (n <= 1) return n;
return fibonacci(n - 1) + fibonacci(n - 2);
}
fibonacci(40); // Takes several seconds
// ā
Good: Memoize the function
function memoize(fn) {
const cache = new Map();
return function(...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
return cache.get(key);
}
const result = fn.apply(this, args);
cache.set(key, result);
return result;
};
}
const memoizedFib = memoize(function fibonacci(n) {
if (n <= 1) return n;
return memoizedFib(n - 1) + memoizedFib(n - 2);
});
memoizedFib(40); // Returns instantly after first call
Advanced Memoization with Options
function memoizeAdvanced(fn, options = {}) {
const {
maxSize = 100,
ttl = Infinity, // Time to live in ms
keyGenerator = JSON.stringify
} = options;
const cache = new Map();
const timestamps = new Map();
return function(...args) {
const key = keyGenerator(args);
const now = Date.now();
// Check if cached and not expired
if (cache.has(key)) {
const timestamp = timestamps.get(key);
if (now - timestamp < ttl) {
return cache.get(key);
} else {
// Expired - remove from cache
cache.delete(key);
timestamps.delete(key);
}
}
// Compute result
const result = fn.apply(this, args);
// Add to cache
cache.set(key, result);
timestamps.set(key, now);
// Enforce max size (LRU)
if (cache.size > maxSize) {
const firstKey = cache.keys().next().value;
cache.delete(firstKey);
timestamps.delete(firstKey);
}
return result;
};
}
// Usage with TTL
const expensiveAPI = memoizeAdvanced(fetchData, {
ttl: 5 * 60 * 1000, // 5 minutes
maxSize: 50
});
LRU Cache Implementation
class LRUCache {
constructor(capacity) {
this.capacity = capacity;
this.cache = new Map();
}
get(key) {
if (!this.cache.has(key)) {
return -1;
}
// Move to end (most recently used)
const value = this.cache.get(key);
this.cache.delete(key);
this.cache.set(key, value);
return value;
}
put(key, value) {
// Delete if exists (to update position)
if (this.cache.has(key)) {
this.cache.delete(key);
}
// Add to end
this.cache.set(key, value);
// Remove oldest if over capacity
if (this.cache.size > this.capacity) {
const firstKey = this.cache.keys().next().value;
this.cache.delete(firstKey);
}
}
clear() {
this.cache.clear();
}
}
// Usage
const cache = new LRUCache(100);
function getCachedData(id) {
let data = cache.get(id);
if (data === -1) {
data = fetchFromAPI(id);
cache.put(id, data);
}
return data;
}
React Memoization
import React, { useMemo, useCallback, memo } from 'react';
// 1. useMemo - Memoize expensive computations
function ExpensiveComponent({ data }) {
// ā Bad: Recomputes on every render
const processedData = processLargeDataset(data);
// ā
Good: Only recomputes when data changes
const processedData = useMemo(() => {
return processLargeDataset(data);
}, [data]);
return {processedData};
}
// 2. useCallback - Memoize function references
function ParentComponent() {
// ā Bad: New function created on every render
const handleClick = () => {
doSomething();
};
// ā
Good: Function reference stays the same
const handleClick = useCallback(() => {
doSomething();
}, []); // Empty deps = never changes
return ;
}
// 3. React.memo - Memoize entire component
// ā Bad: Re-renders even when props haven't changed
function ChildComponent({ name, age }) {
return {name} is {age};
}
// ā
Good: Only re-renders when props change
const ChildComponent = memo(function ChildComponent({ name, age }) {
return {name} is {age};
});
// With custom comparison
const ChildComponent = memo(
function ChildComponent({ user }) {
return {user.name};
},
(prevProps, nextProps) => {
// Return true if props are equal (skip render)
return prevProps.user.id === nextProps.user.id;
}
);
HTTP Caching Strategies
class HTTPCache {
constructor() {
this.cache = new Map();
this.pendingRequests = new Map();
}
async fetch(url, options = {}) {
const cacheKey = this.getCacheKey(url, options);
// Check cache first
if (this.cache.has(cacheKey)) {
const cached = this.cache.get(cacheKey);
// Check if still fresh
if (Date.now() - cached.timestamp < cached.maxAge) {
return cached.data;
} else {
this.cache.delete(cacheKey);
}
}
// Deduplicate concurrent requests
if (this.pendingRequests.has(cacheKey)) {
return this.pendingRequests.get(cacheKey);
}
// Make request
const requestPromise = fetch(url, options)
.then(response => response.json())
.then(data => {
// Cache the response
this.cache.set(cacheKey, {
data,
timestamp: Date.now(),
maxAge: this.getMaxAge(options)
});
this.pendingRequests.delete(cacheKey);
return data;
})
.catch(error => {
this.pendingRequests.delete(cacheKey);
throw error;
});
this.pendingRequests.set(cacheKey, requestPromise);
return requestPromise;
}
getCacheKey(url, options) {
return `${url}:${JSON.stringify(options)}`;
}
getMaxAge(options) {
return options.maxAge || 5 * 60 * 1000; // Default 5 minutes
}
invalidate(pattern) {
// Invalidate cache entries matching pattern
for (const key of this.cache.keys()) {
if (key.includes(pattern)) {
this.cache.delete(key);
}
}
}
}
// Usage
const httpCache = new HTTPCache();
async function getUserData(userId) {
return httpCache.fetch(`/api/users/${userId}`, {
maxAge: 10 * 60 * 1000 // 10 minutes
});
}
// Invalidate cache when user updates
function updateUser(userId, data) {
await fetch(`/api/users/${userId}`, {
method: 'PUT',
body: JSON.stringify(data)
});
httpCache.invalidate(`/api/users/${userId}`);
}
Service Worker Caching
// service-worker.js
const CACHE_NAME = 'v1';
const CACHE_URLS = [
'/',
'/styles.css',
'/app.js',
'/logo.png'
];
// Install - cache static assets
self.addEventListener('install', (event) => {
event.waitUntil(
caches.open(CACHE_NAME).then((cache) => {
return cache.addAll(CACHE_URLS);
})
);
});
// Fetch - serve from cache, fallback to network
self.addEventListener('fetch', (event) => {
event.respondWith(
caches.match(event.request).then((response) => {
// Cache hit - return response
if (response) {
return response;
}
// Clone request
const fetchRequest = event.request.clone();
return fetch(fetchRequest).then((response) => {
// Don't cache if not valid
if (!response || response.status !== 200) {
return response;
}
// Clone response
const responseToCache = response.clone();
// Cache response
caches.open(CACHE_NAME).then((cache) => {
cache.put(event.request, responseToCache);
});
return response;
});
})
);
});
Cache Invalidation Strategies
// 1. Time-based invalidation (TTL)
class TTLCache {
constructor(ttl = 60000) {
this.cache = new Map();
this.ttl = ttl;
}
set(key, value) {
const expiry = Date.now() + this.ttl;
this.cache.set(key, { value, expiry });
}
get(key) {
const item = this.cache.get(key);
if (!item) return null;
if (Date.now() > item.expiry) {
this.cache.delete(key);
return null;
}
return item.value;
}
}
// 2. Event-based invalidation
class EventCache {
constructor() {
this.cache = new Map();
this.dependencies = new Map();
}
set(key, value, deps = []) {
this.cache.set(key, value);
this.dependencies.set(key, deps);
}
invalidate(event) {
for (const [key, deps] of this.dependencies) {
if (deps.includes(event)) {
this.cache.delete(key);
this.dependencies.delete(key);
}
}
}
}
const cache = new EventCache();
cache.set('user:123', userData, ['user-update', 'user-delete']);
// When user updates, invalidate related caches
cache.invalidate('user-update');
Best Practices
- Use memoization for pure functions with expensive computations
- Implement LRU cache to limit memory usage
- Set appropriate TTL values - balance freshness vs performance
- Use
useMemoanduseCallbackwisely in React - don't over-optimize - Invalidate cache when underlying data changes
- Consider using libraries like
lru-cacheorreact-query - Use Service Workers for offline-first experiences
- Deduplicate concurrent API requests to same endpoint
- Monitor cache hit rates to validate effectiveness
- Use appropriate cache keys - consider all relevant parameters