redis-cache-aside-pattern

code_execution · unverified · null · json · download .py

Redis connection URL e.g. "redis://localhost:6379"

import sys
import os
import subprocess
import time
import urllib.request
import json

# ─────────────────────────────────────────
# PRE_EXECUTION
# ─────────────────────────────────────────

for attempt in range(2):
    try:
        req = urllib.request.Request(
            "https://checklist.day/api/registry/redis",
            headers={"User-Agent": "checklist-agent/1.0"}
        )
        with urllib.request.urlopen(req, timeout=10) as resp:
            registry = json.loads(resp.read())
            break
    except Exception as e:
        if attempt == 1:
            print(f"ABORT: registry unreachable — {e}")
            sys.exit(1)
        time.sleep(2)

warnings = registry.get("warnings", [])
if warnings:
    print("[redis] WARNINGS:")
    for w in warnings if isinstance(warnings, list) else [warnings]:
        print(f"  ⚠ {w}")

# ─────────────────────────────────────────
# EXECUTION
# ─────────────────────────────────────────

subprocess.check_call([sys.executable, "-m", "pip", "install", "-q", "redis>=5.0.0"])

import redis as redis_lib

REDIS_URL = os.environ.get("REDIS_URL")
if not REDIS_URL:
    print("ABORT: REDIS_URL env var not set")
    sys.exit(1)

client = redis_lib.Redis.from_url(REDIS_URL, decode_responses=True, socket_connect_timeout=10)
CACHE_KEY = "checklist:cache:user:42"
CACHE_TTL = 60

# Simulated data source (database/API)
def fetch_from_source(user_id: int) -> dict:
    time.sleep(0.05)  # simulate DB latency
    return {"id": user_id, "name": "Alice", "email": "alice@example.com"}

def get_user(user_id: int) -> dict:
    """Cache-aside pattern: check cache, on miss load from source and populate cache."""
    cached = client.get(CACHE_KEY)
    if cached:
        # FOOTGUN: cache returns string — must deserialize
        return json.loads(cached), "hit"

    # Cache miss — fetch from source
    data = fetch_from_source(user_id)

    # FOOTGUN: serialize to string before storing — Redis can't store dicts natively
    client.set(CACHE_KEY, json.dumps(data), ex=CACHE_TTL)
    return data, "miss"

try:
    # Cleanup
    client.delete(CACHE_KEY)

    # 1. Cache miss — should fetch from source
    t0 = time.perf_counter()
    result1, outcome1 = get_user(42)
    miss_latency_ms = round((time.perf_counter() - t0) * 1000, 1)
    cache_miss_ok = outcome1 == "miss" and result1["name"] == "Alice"
    print(f"  1st request: {outcome1} ({miss_latency_ms}ms) → {result1['name']}")

    # 2. Cache hit — should return from Redis
    t0 = time.perf_counter()
    result2, outcome2 = get_user(42)
    hit_latency_ms = round((time.perf_counter() - t0) * 1000, 1)
    cache_hit_ok = outcome2 == "hit" and result2["name"] == "Alice"
    cache_hit_faster = hit_latency_ms < miss_latency_ms
    print(f"  2nd request: {outcome2} ({hit_latency_ms}ms) → {result2['name']}")
    print(f"  cache speedup: {round(miss_latency_ms / hit_latency_ms, 1)}x faster")

    # 3. Cache invalidation
    client.delete(CACHE_KEY)
    _, outcome3 = get_user(42)
    invalidation_ok = outcome3 == "miss"
    print(f"  after invalidation: {outcome3} (expected miss = {invalidation_ok})")

    # Cleanup
    client.delete(CACHE_KEY)

finally:
    client.close()

# ─────────────────────────────────────────
# POST_EXECUTION
# ─────────────────────────────────────────

assert cache_miss_ok, "FAIL: first request should be a cache miss"
assert cache_hit_ok, "FAIL: second request should be a cache hit"
assert invalidation_ok, "FAIL: after delete, request should be a cache miss again"

result = {
    "cache_miss_ok":    cache_miss_ok,
    "cache_hit_ok":     cache_hit_ok,
    "cache_hit_faster": cache_hit_faster,
    "invalidation_ok":  invalidation_ok,
    "miss_latency_ms":  miss_latency_ms,
    "hit_latency_ms":   hit_latency_ms,
}
print(json.dumps(result, indent=2))
print("PASS")