UNPKG

@apollo/client

Version:

A fully-featured caching GraphQL client.

472 lines 24.2 kB
import { __assign, __extends } from "tslib"; import { invariant } from "../../utilities/globals/index.js"; // Make builtins like Map and Set safe to use with non-extensible objects. import "./fixPolyfills.js"; import { wrap } from "optimism"; import { equal } from "@wry/equality"; import { ApolloCache } from "../core/cache.js"; import { MissingFieldError } from "../core/types/common.js"; import { addTypenameToDocument, isReference, DocumentTransform, canonicalStringify, print, cacheSizes, } from "../../utilities/index.js"; import { StoreReader } from "./readFromStore.js"; import { StoreWriter } from "./writeToStore.js"; import { EntityStore, supportsResultCaching } from "./entityStore.js"; import { makeVar, forgetCache, recallCache } from "./reactiveVars.js"; import { Policies } from "./policies.js"; import { hasOwn, normalizeConfig, shouldCanonizeResults } from "./helpers.js"; import { getInMemoryCacheMemoryInternals } from "../../utilities/caching/getMemoryInternals.js"; var InMemoryCache = /** @class */ (function (_super) { __extends(InMemoryCache, _super); function InMemoryCache(config) { if (config === void 0) { config = {}; } var _this = _super.call(this) || this; _this.watches = new Set(); _this.addTypenameTransform = new DocumentTransform(addTypenameToDocument); // Override the default value, since InMemoryCache result objects are frozen // in development and expected to remain logically immutable in production. _this.assumeImmutableResults = true; _this.makeVar = makeVar; _this.txCount = 0; _this.config = normalizeConfig(config); _this.addTypename = !!_this.config.addTypename; _this.policies = new Policies({ cache: _this, dataIdFromObject: _this.config.dataIdFromObject, possibleTypes: _this.config.possibleTypes, typePolicies: _this.config.typePolicies, }); _this.init(); return _this; } InMemoryCache.prototype.init = function () { // Passing { resultCaching: false } in the InMemoryCache constructor options // will completely disable dependency tracking, which will improve memory // usage but worsen the performance of repeated reads. var rootStore = (this.data = new EntityStore.Root({ policies: this.policies, resultCaching: this.config.resultCaching, })); // When no optimistic writes are currently active, cache.optimisticData === // cache.data, so there are no additional layers on top of the actual data. // When an optimistic update happens, this.optimisticData will become a // linked list of EntityStore Layer objects that terminates with the // original this.data cache object. this.optimisticData = rootStore.stump; this.resetResultCache(); }; InMemoryCache.prototype.resetResultCache = function (resetResultIdentities) { var _this = this; var previousReader = this.storeReader; var fragments = this.config.fragments; // The StoreWriter is mostly stateless and so doesn't really need to be // reset, but it does need to have its writer.storeReader reference updated, // so it's simpler to update this.storeWriter as well. this.storeWriter = new StoreWriter(this, (this.storeReader = new StoreReader({ cache: this, addTypename: this.addTypename, resultCacheMaxSize: this.config.resultCacheMaxSize, canonizeResults: shouldCanonizeResults(this.config), canon: resetResultIdentities ? void 0 : (previousReader && previousReader.canon), fragments: fragments, })), fragments); this.maybeBroadcastWatch = wrap(function (c, options) { return _this.broadcastWatch(c, options); }, { max: this.config.resultCacheMaxSize || cacheSizes["inMemoryCache.maybeBroadcastWatch"] || 5000 /* defaultCacheSizes["inMemoryCache.maybeBroadcastWatch"] */, makeCacheKey: function (c) { // Return a cache key (thus enabling result caching) only if we're // currently using a data store that can track cache dependencies. var store = c.optimistic ? _this.optimisticData : _this.data; if (supportsResultCaching(store)) { var optimistic = c.optimistic, id = c.id, variables = c.variables; return store.makeCacheKey(c.query, // Different watches can have the same query, optimistic // status, rootId, and variables, but if their callbacks are // different, the (identical) result needs to be delivered to // each distinct callback. The easiest way to achieve that // separation is to include c.callback in the cache key for // maybeBroadcastWatch calls. See issue #5733. c.callback, canonicalStringify({ optimistic: optimistic, id: id, variables: variables })); } }, }); // Since we have thrown away all the cached functions that depend on the // CacheGroup dependencies maintained by EntityStore, we should also reset // all CacheGroup dependency information. new Set([this.data.group, this.optimisticData.group]).forEach(function (group) { return group.resetCaching(); }); }; InMemoryCache.prototype.restore = function (data) { this.init(); // Since calling this.init() discards/replaces the entire StoreReader, along // with the result caches it maintains, this.data.replace(data) won't have // to bother deleting the old data. if (data) this.data.replace(data); return this; }; InMemoryCache.prototype.extract = function (optimistic) { if (optimistic === void 0) { optimistic = false; } return (optimistic ? this.optimisticData : this.data).extract(); }; InMemoryCache.prototype.read = function (options) { var // Since read returns data or null, without any additional metadata // about whether/where there might have been missing fields, the // default behavior cannot be returnPartialData = true (like it is // for the diff method), since defaulting to true would violate the // integrity of the T in the return type. However, partial data may // be useful in some cases, so returnPartialData:true may be // specified explicitly. _a = options.returnPartialData, // Since read returns data or null, without any additional metadata // about whether/where there might have been missing fields, the // default behavior cannot be returnPartialData = true (like it is // for the diff method), since defaulting to true would violate the // integrity of the T in the return type. However, partial data may // be useful in some cases, so returnPartialData:true may be // specified explicitly. returnPartialData = _a === void 0 ? false : _a; try { return (this.storeReader.diffQueryAgainstStore(__assign(__assign({}, options), { store: options.optimistic ? this.optimisticData : this.data, config: this.config, returnPartialData: returnPartialData })).result || null); } catch (e) { if (e instanceof MissingFieldError) { // Swallow MissingFieldError and return null, so callers do not need to // worry about catching "normal" exceptions resulting from incomplete // cache data. Unexpected errors will be re-thrown. If you need more // information about which fields were missing, use cache.diff instead, // and examine diffResult.missing. return null; } throw e; } }; InMemoryCache.prototype.write = function (options) { try { ++this.txCount; return this.storeWriter.writeToStore(this.data, options); } finally { if (!--this.txCount && options.broadcast !== false) { this.broadcastWatches(); } } }; InMemoryCache.prototype.modify = function (options) { if (hasOwn.call(options, "id") && !options.id) { // To my knowledge, TypeScript does not currently provide a way to // enforce that an optional property?:type must *not* be undefined // when present. That ability would be useful here, because we want // options.id to default to ROOT_QUERY only when no options.id was // provided. If the caller attempts to pass options.id with a // falsy/undefined value (perhaps because cache.identify failed), we // should not assume the goal was to modify the ROOT_QUERY object. // We could throw, but it seems natural to return false to indicate // that nothing was modified. return false; } var store = ((options.optimistic) // Defaults to false. ) ? this.optimisticData : this.data; try { ++this.txCount; return store.modify(options.id || "ROOT_QUERY", options.fields); } finally { if (!--this.txCount && options.broadcast !== false) { this.broadcastWatches(); } } }; InMemoryCache.prototype.diff = function (options) { return this.storeReader.diffQueryAgainstStore(__assign(__assign({}, options), { store: options.optimistic ? this.optimisticData : this.data, rootId: options.id || "ROOT_QUERY", config: this.config })); }; InMemoryCache.prototype.watch = function (watch) { var _this = this; if (!this.watches.size) { // In case we previously called forgetCache(this) because // this.watches became empty (see below), reattach this cache to any // reactive variables on which it previously depended. It might seem // paradoxical that we're able to recall something we supposedly // forgot, but the point of calling forgetCache(this) is to silence // useless broadcasts while this.watches is empty, and to allow the // cache to be garbage collected. If, however, we manage to call // recallCache(this) here, this cache object must not have been // garbage collected yet, and should resume receiving updates from // reactive variables, now that it has a watcher to notify. recallCache(this); } this.watches.add(watch); if (watch.immediate) { this.maybeBroadcastWatch(watch); } return function () { // Once we remove the last watch from this.watches, cache.broadcastWatches // no longer does anything, so we preemptively tell the reactive variable // system to exclude this cache from future broadcasts. if (_this.watches.delete(watch) && !_this.watches.size) { forgetCache(_this); } // Remove this watch from the LRU cache managed by the // maybeBroadcastWatch OptimisticWrapperFunction, to prevent memory // leaks involving the closure of watch.callback. _this.maybeBroadcastWatch.forget(watch); }; }; InMemoryCache.prototype.gc = function (options) { var _a; canonicalStringify.reset(); print.reset(); this.addTypenameTransform.resetCache(); (_a = this.config.fragments) === null || _a === void 0 ? void 0 : _a.resetCaches(); var ids = this.optimisticData.gc(); if (options && !this.txCount) { if (options.resetResultCache) { this.resetResultCache(options.resetResultIdentities); } else if (options.resetResultIdentities) { this.storeReader.resetCanon(); } } return ids; }; // Call this method to ensure the given root ID remains in the cache after // garbage collection, along with its transitive child entities. Note that // the cache automatically retains all directly written entities. By default, // the retainment persists after optimistic updates are removed. Pass true // for the optimistic argument if you would prefer for the retainment to be // discarded when the top-most optimistic layer is removed. Returns the // resulting (non-negative) retainment count. InMemoryCache.prototype.retain = function (rootId, optimistic) { return (optimistic ? this.optimisticData : this.data).retain(rootId); }; // Call this method to undo the effect of the retain method, above. Once the // retainment count falls to zero, the given ID will no longer be preserved // during garbage collection, though it may still be preserved by other safe // entities that refer to it. Returns the resulting (non-negative) retainment // count, in case that's useful. InMemoryCache.prototype.release = function (rootId, optimistic) { return (optimistic ? this.optimisticData : this.data).release(rootId); }; // Returns the canonical ID for a given StoreObject, obeying typePolicies // and keyFields (and dataIdFromObject, if you still use that). At minimum, // the object must contain a __typename and any primary key fields required // to identify entities of that type. If you pass a query result object, be // sure that none of the primary key fields have been renamed by aliasing. // If you pass a Reference object, its __ref ID string will be returned. InMemoryCache.prototype.identify = function (object) { if (isReference(object)) return object.__ref; try { return this.policies.identify(object)[0]; } catch (e) { globalThis.__DEV__ !== false && invariant.warn(e); } }; InMemoryCache.prototype.evict = function (options) { if (!options.id) { if (hasOwn.call(options, "id")) { // See comment in modify method about why we return false when // options.id exists but is falsy/undefined. return false; } options = __assign(__assign({}, options), { id: "ROOT_QUERY" }); } try { // It's unlikely that the eviction will end up invoking any other // cache update operations while it's running, but {in,de}crementing // this.txCount still seems like a good idea, for uniformity with // the other update methods. ++this.txCount; // Pass this.data as a limit on the depth of the eviction, so evictions // during optimistic updates (when this.data is temporarily set equal to // this.optimisticData) do not escape their optimistic Layer. return this.optimisticData.evict(options, this.data); } finally { if (!--this.txCount && options.broadcast !== false) { this.broadcastWatches(); } } }; InMemoryCache.prototype.reset = function (options) { var _this = this; this.init(); canonicalStringify.reset(); if (options && options.discardWatches) { // Similar to what happens in the unsubscribe function returned by // cache.watch, applied to all current watches. this.watches.forEach(function (watch) { return _this.maybeBroadcastWatch.forget(watch); }); this.watches.clear(); forgetCache(this); } else { // Calling this.init() above unblocks all maybeBroadcastWatch caching, so // this.broadcastWatches() triggers a broadcast to every current watcher // (letting them know their data is now missing). This default behavior is // convenient because it means the watches do not have to be manually // reestablished after resetting the cache. To prevent this broadcast and // cancel all watches, pass true for options.discardWatches. this.broadcastWatches(); } return Promise.resolve(); }; InMemoryCache.prototype.removeOptimistic = function (idToRemove) { var newOptimisticData = this.optimisticData.removeLayer(idToRemove); if (newOptimisticData !== this.optimisticData) { this.optimisticData = newOptimisticData; this.broadcastWatches(); } }; InMemoryCache.prototype.batch = function (options) { var _this = this; var update = options.update, _a = options.optimistic, optimistic = _a === void 0 ? true : _a, removeOptimistic = options.removeOptimistic, onWatchUpdated = options.onWatchUpdated; var updateResult; var perform = function (layer) { var _a = _this, data = _a.data, optimisticData = _a.optimisticData; ++_this.txCount; if (layer) { _this.data = _this.optimisticData = layer; } try { return (updateResult = update(_this)); } finally { --_this.txCount; _this.data = data; _this.optimisticData = optimisticData; } }; var alreadyDirty = new Set(); if (onWatchUpdated && !this.txCount) { // If an options.onWatchUpdated callback is provided, we want to call it // with only the Cache.WatchOptions objects affected by options.update, // but there might be dirty watchers already waiting to be broadcast that // have nothing to do with the update. To prevent including those watchers // in the post-update broadcast, we perform this initial broadcast to // collect the dirty watchers, so we can re-dirty them later, after the // post-update broadcast, allowing them to receive their pending // broadcasts the next time broadcastWatches is called, just as they would // if we never called cache.batch. this.broadcastWatches(__assign(__assign({}, options), { onWatchUpdated: function (watch) { alreadyDirty.add(watch); return false; } })); } if (typeof optimistic === "string") { // Note that there can be multiple layers with the same optimistic ID. // When removeOptimistic(id) is called for that id, all matching layers // will be removed, and the remaining layers will be reapplied. this.optimisticData = this.optimisticData.addLayer(optimistic, perform); } else if (optimistic === false) { // Ensure both this.data and this.optimisticData refer to the root // (non-optimistic) layer of the cache during the update. Note that // this.data could be a Layer if we are currently executing an optimistic // update function, but otherwise will always be an EntityStore.Root // instance. perform(this.data); } else { // Otherwise, leave this.data and this.optimisticData unchanged and run // the update with broadcast batching. perform(); } if (typeof removeOptimistic === "string") { this.optimisticData = this.optimisticData.removeLayer(removeOptimistic); } // Note: if this.txCount > 0, then alreadyDirty.size === 0, so this code // takes the else branch and calls this.broadcastWatches(options), which // does nothing when this.txCount > 0. if (onWatchUpdated && alreadyDirty.size) { this.broadcastWatches(__assign(__assign({}, options), { onWatchUpdated: function (watch, diff) { var result = onWatchUpdated.call(this, watch, diff); if (result !== false) { // Since onWatchUpdated did not return false, this diff is // about to be broadcast to watch.callback, so we don't need // to re-dirty it with the other alreadyDirty watches below. alreadyDirty.delete(watch); } return result; } })); // Silently re-dirty any watches that were already dirty before the update // was performed, and were not broadcast just now. if (alreadyDirty.size) { alreadyDirty.forEach(function (watch) { return _this.maybeBroadcastWatch.dirty(watch); }); } } else { // If alreadyDirty is empty or we don't have an onWatchUpdated // function, we don't need to go to the trouble of wrapping // options.onWatchUpdated. this.broadcastWatches(options); } return updateResult; }; InMemoryCache.prototype.performTransaction = function (update, optimisticId) { return this.batch({ update: update, optimistic: optimisticId || optimisticId !== null, }); }; InMemoryCache.prototype.transformDocument = function (document) { return this.addTypenameToDocument(this.addFragmentsToDocument(document)); }; InMemoryCache.prototype.broadcastWatches = function (options) { var _this = this; if (!this.txCount) { this.watches.forEach(function (c) { return _this.maybeBroadcastWatch(c, options); }); } }; InMemoryCache.prototype.addFragmentsToDocument = function (document) { var fragments = this.config.fragments; return fragments ? fragments.transform(document) : document; }; InMemoryCache.prototype.addTypenameToDocument = function (document) { if (this.addTypename) { return this.addTypenameTransform.transformDocument(document); } return document; }; // This method is wrapped by maybeBroadcastWatch, which is called by // broadcastWatches, so that we compute and broadcast results only when // the data that would be broadcast might have changed. It would be // simpler to check for changes after recomputing a result but before // broadcasting it, but this wrapping approach allows us to skip both // the recomputation and the broadcast, in most cases. InMemoryCache.prototype.broadcastWatch = function (c, options) { var lastDiff = c.lastDiff; // Both WatchOptions and DiffOptions extend ReadOptions, and DiffOptions // currently requires no additional properties, so we can use c (a // WatchOptions object) as DiffOptions, without having to allocate a new // object, and without having to enumerate the relevant properties (query, // variables, etc.) explicitly. There will be some additional properties // (lastDiff, callback, etc.), but cache.diff ignores them. var diff = this.diff(c); if (options) { if (c.optimistic && typeof options.optimistic === "string") { diff.fromOptimisticTransaction = true; } if (options.onWatchUpdated && options.onWatchUpdated.call(this, c, diff, lastDiff) === false) { // Returning false from the onWatchUpdated callback will prevent // calling c.callback(diff) for this watcher. return; } } if (!lastDiff || !equal(lastDiff.result, diff.result)) { c.callback((c.lastDiff = diff), lastDiff); } }; return InMemoryCache; }(ApolloCache)); export { InMemoryCache }; if (globalThis.__DEV__ !== false) { InMemoryCache.prototype.getMemoryInternals = getInMemoryCacheMemoryInternals; } //# sourceMappingURL=inMemoryCache.js.map