March 12, 2026 · 6 min · 1173 words · Rob Washington
Table of Contents
The fastest database query is the one you don’t make. Caching is how you turn expensive operations into cheap lookups. Here’s how to do it without shooting yourself in the foot.
# Set cache headers for CDN@app.route('/api/products/<id>')defget_product(id):product=fetch_product(id)response=jsonify(product)# Cache publicly for 5 minutesresponse.headers['Cache-Control']='public, max-age=300'# Surrogate key for targeted invalidationresponse.headers['Surrogate-Key']=f'product-{id}'returnresponse
CDNs (Cloudflare, Fastly, CloudFront) cache based on these headers.
importrediscache=redis.Redis(host='localhost',port=6379,db=0)defget_user(user_id:str)->dict:# Try cache firstcached=cache.get(f"user:{user_id}")ifcached:returnjson.loads(cached)# Cache miss: fetch from databaseuser=db.query("SELECT * FROM users WHERE id = %s",user_id)# Store in cache with 1 hour TTLcache.setex(f"user:{user_id}",3600,json.dumps(user))returnuser
defget_product(product_id):# 1. Check cachecached=cache.get(f"product:{product_id}")ifcached:returnjson.loads(cached)# 2. Load from databaseproduct=db.get_product(product_id)# 3. Store in cachecache.setex(f"product:{product_id}",3600,json.dumps(product))returnproduct
Pros: Only caches what’s needed
Cons: Cache miss penalty, potential inconsistency
defget_product(product_id):cached=cache.get(f"product:{product_id}")ttl=cache.ttl(f"product:{product_id}")ifcachedandttl>300:# More than 5 min leftreturnjson.loads(cached)# Refresh in backgroundifcached:queue.enqueue('refresh_product',product_id)returnjson.loads(cached)# Cache missreturnfetch_and_cache_product(product_id)
# On product updatedefupdate_product(product_id,data):db.update_product(product_id,data)cache.delete(f"product:{product_id}")# Or publish eventdefupdate_product(product_id,data):db.update_product(product_id,data)events.publish("product:updated",{"id":product_id})# Subscriber invalidates cache@events.subscribe("product:updated")definvalidate_product_cache(event):cache.delete(f"product:{event['id']}")
defget_cache_version():returncache.get("cache:version")or"1"defget_product(product_id):version=get_cache_version()returncache.get(f"product:{product_id}:v{version}")definvalidate_all():cache.incr("cache:version")# All old keys now orphaned
Nuclear option: increment version to invalidate everything.
defwarm_cache():"""Pre-populate cache with hot data"""popular_products=db.query("""
SELECT id FROM products
ORDER BY views DESC
LIMIT 1000
""")forproduct_idinpopular_products:product=db.get_product(product_id)cache.setex(f"product:{product_id}",3600,json.dumps(product))
cache.hset("user:123",mapping={"name":"Alice","email":"alice@example.com","role":"admin"})cache.hget("user:123","name")# Get single fieldcache.hgetall("user:123")# Get all fields
More memory efficient than JSON strings for partial reads.
defget_product_safe(product_id):cached=cache.get(f"product:{product_id}")ifcached:returnjson.loads(cached)# Lock to prevent thundering herdlock=cache.lock(f"lock:product:{product_id}",timeout=10)iflock.acquire(blocking=True,blocking_timeout=5):try:# Double-check cache after acquiring lockcached=cache.get(f"product:{product_id}")ifcached:returnjson.loads(cached)product=db.get_product(product_id)cache.setex(f"product:{product_id}",3600,json.dumps(product))returnproductfinally:lock.release()else:# Couldn't get lock, try cache again or return stalereturnjson.loads(cache.get(f"product:{product_id}")or'{}')