mono/packages/ui/docs/db-caching.md
2026-02-08 15:09:32 +01:00

5.2 KiB

Short Term DB Caching Proposal

Objective

Reduce database load and improve response times for high-traffic, read-heavy routes by implementing a short-term caching layer using a Generically Safe Decorator Pattern.

Proposed Solution

Implement a Generic CachedHandler Utility (server/src/commons/decorators.ts) that:

  1. Auto-Generates Keys: Defaults to URL + Query.
  2. Auth Protection: Skips caching for Authenticated requests by default.
  3. Size Protection: Skips caching for responses larger than a threshold (e.g. 1MB).
  4. Memory Protection: Enforces LRU/Limits in MemoryCache.

1. Functional Decorator

import { Context } from 'hono';
import { getCache } from '../commons/cache/index.js';

type KeyGenerator = (c: Context) => string;

const defaultKeyInfo = (c: Context) => {
    const url = new URL(c.req.url);
    // Deterministic Sort: key=a&key=b vs key=b&key=a
    // 1. Sort keys
    url.searchParams.sort(); 
    return `auto-cache:${c.req.method}:${url.pathname}${url.search}`;
};

export const CachedHandler = (
    handler: (c: Context) => Promise<Response>, 
    options: {
        ttl: number,
        keyGenerator?: KeyGenerator,
        skipAuth?: boolean, // Default true
        maxSizeBytes?: number // Default: 1MB
    }
) => async (c: Context) => {
    // defaults
    const ttl = options.ttl;
    const skipAuth = options.skipAuth !== false;
    const maxSizeBytes = options.maxSizeBytes || 1024 * 1024; // 1MB
    const keyGen = options.keyGenerator || defaultKeyInfo;

    // 1. Auth Bypass
    if (skipAuth && c.req.header('Authorization')) {
        return handler(c);
    }

    const cache = getCache();
    const key = keyGen(c);
    const bypass = c.req.query('cache') === 'false';

    // 2. Hit
    if (!bypass) {
        const cached = await cache.get(key);
        if (cached) {
            c.header('X-Cache', 'HIT');
            if (cached.contentType) c.header('Content-Type', cached.contentType);
            return c.body(cached.data);
        }
    }

    // 3. Miss
    const response = await handler(c);
    
    // 4. Save
    if (response instanceof Response && response.ok) {
        const cloned = response.clone();
        try {
            const contentType = response.headers.get('Content-Type') || 'application/json';
            let data: any;
            
            // Check content length if available
            const contentLength = cloned.headers.get('Content-Length');
            if (contentLength && parseInt(contentLength) > maxSizeBytes) {
                 // Too big, skip cache
                 return response;
            }

            if (contentType.includes('application/json')) {
                const jsonObj = await cloned.json();
                data = JSON.stringify(jsonObj); 
            } else {
                data = await cloned.text();
            }
            
            // Double check actual size after reading
            if (data.length > maxSizeBytes) {
                // Too big, skip cache
                return response;
            }

            await cache.set(key, { data, contentType }, ttl);
            c.header('X-Cache', bypass ? 'BYPASS' : 'MISS');
        } catch (e) {
            console.error('Cache interception failed', e);
        }
    }
    
    return response;
}

2. Usage Implementation

In server/src/products/serving/index.ts:

// 5 minute cache, auto-key, skip if auth, max 500kb
this.routes.push({ 
    definition: getApiUserPageRoute, 
    handler: CachedHandler(handleGetApiUserPage, { ttl: 300, maxSizeBytes: 500 * 1024 }) 
});

3. MemoryCache Protection (Limit)

Update server/src/commons/cache/MemoryCache.ts:

// Add limit
const MAX_KEYS = 1000;

async set(key: string, value: any, ttlSeconds: number): Promise<void> {
    this.prune();
    if (this.cache.size >= MAX_KEYS) {
        const first = this.cache.keys().next().value;
        this.cache.delete(first);
    }
    // ... set logic
}

4. Summary of Protections

Protection Mechanism Benefit
Data Leak skipAuth: true Prevents private data being cached/served to public.
Stale Data ttl Ensures updates propagate eventually.
OOM (Large Item) maxSizeBytes Prevents caching huge responses (e.g. giant JSONs).
OOM (Many Items) MAX_KEYS Prevents unlimited growth of the cache map.
Performance X-Cache Visibility into hit rates.

5. Sequence Diagram (Final)

sequenceDiagram
    participant Client
    participant Dec as CachedHandler
    participant Cache as MemoryCache
    participant H as Handler

    Client->>Dec: GET /api/data
    Dec->>Dec: Check Auth Header?
    opt Authenticated
        Dec->>H: Invoke Handler Directly
        H-->>Client: Returns Private Data
    end

    Dec->>Cache: get(key)
    alt Hit
        Cache-->>Client: Returns Data (HIT)
    else Miss
        Dec->>H: Invoke Handler
        H-->>Dec: Returns Response
        Dec->>Dec: Check Size < 1MB?
        alt Small Enough
            Dec->>Cache: set(key, data)
            Dec-->>Client: Returns (MISS)
        else Too Big
            Dec-->>Client: Returns (MISS - No Cache)
        end
    end