UNPKG

@matters/apollo-response-cache

Version:

Caching and invalidation mechanisms (plugins, directives) of Apollo GraphQL

208 lines (207 loc) 11.3 kB
import { HeaderMap, } from '@apollo/server'; import { createHash } from '@apollo/utils.createhash'; import { CACHE_KEY_PREFIX_FQC } from '../enums.js'; import { recordNodeFQCMapping } from '../utils.js'; var SessionMode; (function (SessionMode) { SessionMode[SessionMode["NoSession"] = 0] = "NoSession"; SessionMode[SessionMode["Private"] = 1] = "Private"; SessionMode[SessionMode["AuthenticatedPublic"] = 2] = "AuthenticatedPublic"; })(SessionMode || (SessionMode = {})); function sha(s) { return createHash('sha256').update(s).digest('hex'); } function isGraphQLQuery(requestContext) { return requestContext.operation?.operation === 'query'; } export default function plugin(options = Object.create(null)) { return { async requestDidStart() { const redis = options.redis; const generateCacheKey = options.generateCacheKey ?? ((_, key) => sha(JSON.stringify(key))); let sessionId = null; let baseCacheKey = null; let age = null; return { async responseForOperation(requestContext) { requestContext.metrics.responseCacheHit = false; /** * Inject redis instance `__redis` and `__nodeFQCKeySet` to context, * used by `@logCache`, `@purgeCache`, * and `willSendResponse` below. */ requestContext.contextValue.__redis = options.redis; requestContext.contextValue.__nodeFQCKeySet = new Set(); if (!isGraphQLQuery(requestContext)) { return null; } async function cacheGet(contextualCacheKeyFields) { const cacheKeyData = { ...baseCacheKey, ...contextualCacheKeyFields, }; const key = generateCacheKey(requestContext, cacheKeyData); const serializedValue = await redis.get(CACHE_KEY_PREFIX_FQC + key); if (serializedValue === null) { return null; } const value = JSON.parse(serializedValue); // Use cache policy from the cache (eg, to calculate HTTP response // headers). requestContext.overallCachePolicy.replace(value.cachePolicy); requestContext.metrics.responseCacheHit = true; age = Math.round((+new Date() - value.cacheTime) / 1000); return { body: { kind: 'single', singleResult: { data: value.data } }, http: { status: undefined, headers: new HeaderMap(), }, }; } // Call hooks. Save values which will be used in willSendResponse as well. let extraCacheKeyData = null; if (options.sessionId) { sessionId = await options.sessionId(requestContext); } if (options.extraCacheKeyData) { extraCacheKeyData = await options.extraCacheKeyData(requestContext); } baseCacheKey = { source: requestContext.source, operationName: requestContext.operationName, // Defensive copy just in case it somehow gets mutated. variables: { ...(requestContext.request.variables || {}) }, extra: extraCacheKeyData, }; // Note that we set up sessionId and baseCacheKey before doing this // check, so that we can still write the result to the cache even if // we are told not to read from the cache. if (options.shouldReadFromCache) { const shouldReadFromCache = await options.shouldReadFromCache(requestContext); if (!shouldReadFromCache) return null; } if (sessionId === null) { return cacheGet({ sessionMode: SessionMode.NoSession }); } else { const privateResponse = await cacheGet({ sessionId, sessionMode: SessionMode.Private, }); if (privateResponse !== null) { return privateResponse; } return cacheGet({ sessionMode: SessionMode.AuthenticatedPublic }); } }, async willSendResponse(requestContext) { const logger = requestContext.logger || console; // We don't support caching incremental delivery responses (ie, // responses that use @defer or @stream) now. (It might be useful to // do so: after all, deferred responses might benefit the most from // caching! But we don't right now.) if (requestContext.response.body.kind !== 'single') { return; } if (!isGraphQLQuery(requestContext)) { return; } if (requestContext.metrics.responseCacheHit) { // Never write back to the cache what we just read from it. But do set the Age header! const http = requestContext.response.http; if (http && age !== null) { http.headers.set('age', age.toString()); } return; } if (options.shouldWriteToCache) { const shouldWriteToCache = await options.shouldWriteToCache(requestContext); if (!shouldWriteToCache) return; } const { data, errors } = requestContext.response.body.singleResult; const policyIfCacheable = requestContext.overallCachePolicy.policyIfCacheable(); if (errors || !data || !policyIfCacheable) { // This plugin never caches errors or anything without a cache policy. // // There are two reasons we don't cache errors. The user-level // reason is that we think that in general errors are less cacheable // than real results, since they might indicate something transient // like a failure to talk to a backend. (If you need errors to be // cacheable, represent the erroneous condition explicitly in data // instead of out-of-band as an error.) The implementation reason is // that this lets us avoid complexities around serialization and // deserialization of GraphQL errors, and the distinction between // formatted and unformatted errors, etc. return; } // We're pretty sure that any path that calls willSendResponse with a // non-error response will have already called our execute hook above, // but let's just double-check that, since accidentally ignoring // sessionId could be a big security hole. if (!baseCacheKey) { throw new Error('willSendResponse called without error, but execute not called?'); } const cacheSetInBackground = (contextualCacheKeyFields) => { const cacheKeyData = { ...baseCacheKey, ...contextualCacheKeyFields, }; const key = generateCacheKey(requestContext, cacheKeyData); const value = { data, cachePolicy: policyIfCacheable, cacheTime: +new Date(), }; const serializedValue = JSON.stringify(value); // Note that this function converts key and response to strings before // doing anything asynchronous, so it can run in parallel with user code // without worrying about anything being mutated out from under it. // // Also note that the test suite assumes that this asynchronous function // still calls `cache.set` synchronously (ie, that it writes to // InMemoryLRUCache synchronously). redis .set(CACHE_KEY_PREFIX_FQC + key, serializedValue, 'EX', policyIfCacheable.maxAge) .catch(logger.warn); const { __nodeFQCKeySet, __redis } = requestContext.contextValue; if (__nodeFQCKeySet && __redis) { recordNodeFQCMapping({ nodeFQCKeys: __nodeFQCKeySet, fqcKey: key, ttl: options.nodeFQCTTL, redis: __redis, }); } }; const isPrivate = policyIfCacheable.scope === 'PRIVATE'; if (isPrivate) { if (!options.sessionId) { logger.warn('A GraphQL response used @cacheControl or setCacheHint to set cache hints with scope ' + "Private, but you didn't define the sessionId hook for " + '@thematters/apollo-response-cache. Not caching.'); return; } if (sessionId === null) { // Private data shouldn't be cached for logged-out users. return; } cacheSetInBackground({ sessionId, sessionMode: SessionMode.Private, }); } else { cacheSetInBackground({ sessionMode: sessionId === null ? SessionMode.NoSession : SessionMode.AuthenticatedPublic, }); } }, }; }, }; }