|
| 1 | +import type { |
| 2 | + FastifyInstance, |
| 3 | + FastifyPluginAsync, |
| 4 | + FastifyReply, |
| 5 | + FastifyRequest, |
| 6 | +} from 'fastify'; |
| 7 | +import fp from 'fastify-plugin'; |
| 8 | +import { ENV } from '../../env'; |
| 9 | +import { encode } from '../../libs/encode'; |
| 10 | +import { createRedisConnection } from '../../libs/utils/redis'; |
| 11 | + |
| 12 | +const cacheRedisConnection = createRedisConnection( |
| 13 | + ENV.REDIS_URL, |
| 14 | + ENV.REDIS_CLUSTER_MODE, |
| 15 | + { |
| 16 | + keyPrefix: 'indexer-cache:', |
| 17 | + }, |
| 18 | +); |
| 19 | + |
| 20 | +type RedisClient = ReturnType<typeof createRedisConnection>; |
| 21 | + |
| 22 | +export type RouteCacheOptions = { |
| 23 | + /** Enable/disable cache for this route (default: true if cache is set) */ |
| 24 | + enabled?: boolean; |
| 25 | + /** TTL for this route in seconds (default: plugin defaultTtlSeconds or 60) */ |
| 26 | + ttlSeconds?: number; |
| 27 | + /** |
| 28 | + * Serve stale responses for this many extra seconds while |
| 29 | + * a background refresh runs (stale-while-revalidate window). |
| 30 | + */ |
| 31 | + staleTtlSeconds?: number; |
| 32 | + /** |
| 33 | + * Enable/disable background revalidation (default: true if staleTtlSeconds set). |
| 34 | + */ |
| 35 | + backgroundRevalidate?: boolean; |
| 36 | + /** Build a custom cache key based on request */ |
| 37 | + key?: (req: FastifyRequest) => string; |
| 38 | +}; |
| 39 | + |
| 40 | +export type RedisCachePluginOptions = { |
| 41 | + /** Existing Redis client instance (if you already manage it elsewhere) */ |
| 42 | + redisClient?: RedisClient; |
| 43 | + /** Or pass Redis URL, e.g. redis://localhost:6379 */ |
| 44 | + redisUrl?: string; |
| 45 | + /** Default TTL in seconds for all cached routes */ |
| 46 | + defaultTtlSeconds?: number; |
| 47 | + /** Default stale TTL in seconds for all cached routes */ |
| 48 | + defaultStaleTtlSeconds?: number; |
| 49 | + /** Prefix for all cache keys */ |
| 50 | + keyPrefix?: string; |
| 51 | +}; |
| 52 | + |
| 53 | +type CacheEntry = { |
| 54 | + payload: unknown; |
| 55 | + headers?: Record<string, unknown>; |
| 56 | + statusCode: number; |
| 57 | + storedAt: number; // ms since epoch |
| 58 | + ttlSeconds: number; |
| 59 | +}; |
| 60 | + |
| 61 | +declare module 'fastify' { |
| 62 | + interface FastifyInstance { |
| 63 | + cacheRedis: RedisClient; |
| 64 | + } |
| 65 | + |
| 66 | + interface FastifyRequest { |
| 67 | + __cacheKey?: string; |
| 68 | + __cacheHit?: boolean; |
| 69 | + } |
| 70 | + |
| 71 | + // interface RouteShorthandOptions { |
| 72 | + // /** |
| 73 | + // * If present, enables response caching for this route (default false). |
| 74 | + // * - `true` -> use defaults |
| 75 | + // * - `false` -> no cache |
| 76 | + // * - object -> fine-grained control |
| 77 | + // */ |
| 78 | + // cache?: RouteCacheOptions | boolean; |
| 79 | + // } |
| 80 | + |
| 81 | + interface FastifyContextConfig { |
| 82 | + /** |
| 83 | + * If present, enables response caching for this route (default false). |
| 84 | + * - `true` -> use defaults |
| 85 | + * - `false` -> no cache |
| 86 | + * - object -> fine-grained control |
| 87 | + */ |
| 88 | + cache?: RouteCacheOptions | boolean; |
| 89 | + } |
| 90 | +} |
| 91 | + |
| 92 | +const redisCachePlugin: FastifyPluginAsync<RedisCachePluginOptions> = async ( |
| 93 | + fastify: FastifyInstance, |
| 94 | + opts: RedisCachePluginOptions, |
| 95 | +) => { |
| 96 | + const redis = opts.redisClient ?? cacheRedisConnection; |
| 97 | + |
| 98 | + const defaultTtl = opts.defaultTtlSeconds ?? 60; |
| 99 | + const defaultStaleTtl = opts.defaultStaleTtlSeconds ?? 600; // 10 minutes |
| 100 | + const keyPrefix = opts.keyPrefix ?? 'route-cache'; |
| 101 | + |
| 102 | + // @ts-expect-error declare decorator |
| 103 | + fastify.decorate('cacheRedis', redis); |
| 104 | + |
| 105 | + const getRouteCacheConfig = ( |
| 106 | + req: FastifyRequest, |
| 107 | + ): RouteCacheOptions | null => { |
| 108 | + const rawCfg = req.routeOptions.config.cache; |
| 109 | + if (!rawCfg) return null; |
| 110 | + |
| 111 | + if (typeof rawCfg === 'boolean') { |
| 112 | + if (!rawCfg) return null; |
| 113 | + return { enabled: true }; |
| 114 | + } |
| 115 | + |
| 116 | + if (rawCfg.enabled === false) return null; |
| 117 | + |
| 118 | + return { enabled: true, ...rawCfg }; |
| 119 | + }; |
| 120 | + |
| 121 | + // 1) Try to serve from cache |
| 122 | + fastify.addHook( |
| 123 | + 'preHandler', |
| 124 | + async (req: FastifyRequest, reply: FastifyReply) => { |
| 125 | + const cfg = getRouteCacheConfig(req); |
| 126 | + |
| 127 | + if (!cfg) return; |
| 128 | + |
| 129 | + // Internal revalidation request: do not serve from cache |
| 130 | + if (req.headers['x-cache-revalidate'] === '1') { |
| 131 | + return; |
| 132 | + } |
| 133 | + |
| 134 | + const ttl = cfg.ttlSeconds ?? defaultTtl; |
| 135 | + const staleTtl = cfg.staleTtlSeconds ?? defaultStaleTtl; |
| 136 | + const routeUrl = |
| 137 | + req.routeOptions.url ?? req.raw.url?.split('?')[0] ?? 'unknown'; |
| 138 | + |
| 139 | + const key = |
| 140 | + keyPrefix + |
| 141 | + ':' + |
| 142 | + (cfg.key?.(req) ?? |
| 143 | + encode.sha256( |
| 144 | + `${routeUrl}:${req.raw.method}:` + |
| 145 | + `${JSON.stringify(req.query ?? {})}:${JSON.stringify(req.body ?? {})}`, |
| 146 | + )); |
| 147 | + |
| 148 | + req.__cacheKey = key; |
| 149 | + |
| 150 | + const cached = await redis.get(key); |
| 151 | + if (!cached) return; |
| 152 | + |
| 153 | + const entry: CacheEntry = JSON.parse(cached); |
| 154 | + const ageSec = (Date.now() - entry.storedAt) / 1000; |
| 155 | + |
| 156 | + const isFresh = ageSec <= entry.ttlSeconds; |
| 157 | + const isWithinStale = |
| 158 | + !isFresh && staleTtl > 0 && ageSec <= entry.ttlSeconds + staleTtl; |
| 159 | + |
| 160 | + if (!isFresh && !isWithinStale) { |
| 161 | + // Hard expired: ignore cache |
| 162 | + return; |
| 163 | + } |
| 164 | + |
| 165 | + req.__cacheHit = true; |
| 166 | + |
| 167 | + if (entry.headers) { |
| 168 | + for (const [hKey, hVal] of Object.entries(entry.headers)) { |
| 169 | + // do not override critical hop-by-hop headers if you don't want to |
| 170 | + if (hKey.toLowerCase() === 'content-length') continue; |
| 171 | + if (hKey.toLowerCase() === 'x-cache') continue; |
| 172 | + reply.header(hKey, hVal); |
| 173 | + } |
| 174 | + } |
| 175 | + |
| 176 | + reply.header('x-cache', isFresh ? 'HIT' : 'HIT-STALE'); |
| 177 | + reply.code(entry.statusCode); |
| 178 | + reply.send(entry.payload); |
| 179 | + |
| 180 | + // Background revalidation for stale entries |
| 181 | + if (isWithinStale && (cfg.backgroundRevalidate ?? true)) { |
| 182 | + const lockKey = `${key}:revalidate-lock`; |
| 183 | + const lockTtl = Math.max(5, Math.floor(ttl / 2)); // seconds |
| 184 | + |
| 185 | + // Try to acquire revalidation lock |
| 186 | + try { |
| 187 | + const lockResult = await redis.setnx(lockKey, '1'); |
| 188 | + if (lockResult === 1) { |
| 189 | + await redis.expire(lockKey, lockTtl); |
| 190 | + // Fire-and-forget background refresh |
| 191 | + (async () => { |
| 192 | + try { |
| 193 | + await fastify.inject({ |
| 194 | + // @ts-expect-error bad type |
| 195 | + method: req.raw.method, |
| 196 | + url: req.raw.url ?? routeUrl, |
| 197 | + // @ts-expect-error bad type |
| 198 | + payload: req.body, |
| 199 | + // @ts-expect-error bad type |
| 200 | + query: req.query, |
| 201 | + headers: { |
| 202 | + ...req.headers, |
| 203 | + 'x-cache-revalidate': '1', |
| 204 | + }, |
| 205 | + }); |
| 206 | + } finally { |
| 207 | + // Let the lock expire naturally; optional explicit delete: |
| 208 | + // await redis.del(lockKey); |
| 209 | + } |
| 210 | + })().catch((err) => { |
| 211 | + fastify.log.error({ err }, 'cache revalidation failed'); |
| 212 | + }); |
| 213 | + } |
| 214 | + } catch (err) { |
| 215 | + fastify.log.error({ err }, 'failed to acquire revalidate lock'); |
| 216 | + } |
| 217 | + } |
| 218 | + }, |
| 219 | + ); |
| 220 | + |
| 221 | + // 2) Store response into cache |
| 222 | + fastify.addHook( |
| 223 | + 'onSend', |
| 224 | + async (req: FastifyRequest, reply: FastifyReply, payload) => { |
| 225 | + const rawCfg = req.routeOptions.config.cache; |
| 226 | + if (!rawCfg) return payload; |
| 227 | + |
| 228 | + const cfg: RouteCacheOptions = |
| 229 | + typeof rawCfg === 'boolean' ? { enabled: rawCfg } : rawCfg; |
| 230 | + |
| 231 | + if (cfg.enabled === false) return payload; |
| 232 | + |
| 233 | + if (req.__cacheHit) { |
| 234 | + return payload; |
| 235 | + } |
| 236 | + |
| 237 | + const key = req.__cacheKey; |
| 238 | + |
| 239 | + if (!key) return payload; |
| 240 | + |
| 241 | + // By default, don't cache server error responses (5xx) |
| 242 | + if (reply.statusCode >= 500) return payload; |
| 243 | + |
| 244 | + const ttl = cfg.ttlSeconds ?? defaultTtl; |
| 245 | + |
| 246 | + const headers = reply.getHeaders() as Record<string, unknown>; |
| 247 | + for (const h of Object.keys(headers)) { |
| 248 | + if (h.toLowerCase() === 'x-cache') { |
| 249 | + delete headers[h]; |
| 250 | + } |
| 251 | + } |
| 252 | + |
| 253 | + const entry: CacheEntry = { |
| 254 | + payload, |
| 255 | + headers, |
| 256 | + statusCode: reply.statusCode, |
| 257 | + storedAt: Date.now(), |
| 258 | + ttlSeconds: ttl, |
| 259 | + }; |
| 260 | + |
| 261 | + const expireSeconds = ttl + (cfg.staleTtlSeconds ?? defaultStaleTtl); |
| 262 | + |
| 263 | + await redis.setex(key, expireSeconds, JSON.stringify(entry)); |
| 264 | + |
| 265 | + // For "real" client requests (not internal revalidation), set MISS header |
| 266 | + if (req.headers['x-cache-revalidate'] !== '1') { |
| 267 | + reply.header('x-cache', 'MISS'); |
| 268 | + } |
| 269 | + |
| 270 | + return payload; |
| 271 | + }, |
| 272 | + ); |
| 273 | +}; |
| 274 | + |
| 275 | +export default fp(redisCachePlugin, { |
| 276 | + name: 'cache-plugin', |
| 277 | +}); |
0 commit comments