@apollo/client
Version:
A fully-featured caching GraphQL client.
580 lines (578 loc) • 28.3 kB
JavaScript
;;
const {
__DEV__
} = require("@apollo/client/utilities/environment");
Object.defineProperty(exports, "__esModule", { value: true });
exports.StoreWriter = void 0;
const equality_1 = require("@wry/equality");
const trie_1 = require("@wry/trie");
const graphql_1 = require("graphql");
const utilities_1 = require("@apollo/client/utilities");
const environment_1 = require("@apollo/client/utilities/environment");
const internal_1 = require("@apollo/client/utilities/internal");
const invariant_1 = require("@apollo/client/utilities/invariant");
const helpers_js_1 = require("./helpers.cjs");
const policies_js_1 = require("./policies.cjs");
// Since there are only four possible combinations of context.clientOnly and
// context.deferred values, we should need at most four "flavors" of any given
// WriteContext. To avoid creating multiple copies of the same context, we cache
// the contexts in the context.flavors Map (shared by all flavors) according to
// their clientOnly and deferred values (always in that order).
function getContextFlavor(context, clientOnly, deferred) {
const key = `${clientOnly}${deferred}`;
let flavored = context.flavors.get(key);
if (!flavored) {
context.flavors.set(key, (flavored =
context.clientOnly === clientOnly && context.deferred === deferred ?
context
: {
...context,
clientOnly,
deferred,
}));
}
return flavored;
}
class StoreWriter {
cache;
reader;
fragments;
constructor(cache, reader, fragments) {
this.cache = cache;
this.reader = reader;
this.fragments = fragments;
}
writeToStore(store, { query, result, dataId, variables, overwrite, }) {
const operationDefinition = (0, internal_1.getOperationDefinition)(query);
const merger = (0, helpers_js_1.makeProcessedFieldsMerger)();
variables = {
...(0, internal_1.getDefaultValues)(operationDefinition),
...variables,
};
const context = {
store,
written: {},
merge(existing, incoming) {
return merger.merge(existing, incoming);
},
variables: variables,
varString: (0, utilities_1.canonicalStringify)(variables),
...(0, helpers_js_1.extractFragmentContext)(query, this.fragments),
overwrite: !!overwrite,
incomingById: new Map(),
clientOnly: false,
deferred: false,
flavors: new Map(),
};
const ref = this.processSelectionSet({
result: result || {},
dataId,
selectionSet: operationDefinition.selectionSet,
mergeTree: { map: new Map() },
context,
});
if (!(0, utilities_1.isReference)(ref)) {
throw (0, invariant_1.newInvariantError)(106, result);
}
// So far, the store has not been modified, so now it's time to process
// context.incomingById and merge those incoming fields into context.store.
context.incomingById.forEach(({ storeObject, mergeTree, fieldNodeSet }, dataId) => {
const entityRef = (0, internal_1.makeReference)(dataId);
if (mergeTree && mergeTree.map.size) {
const applied = this.applyMerges(mergeTree, entityRef, storeObject, context);
if ((0, utilities_1.isReference)(applied)) {
// Assume References returned by applyMerges have already been merged
// into the store. See makeMergeObjectsFunction in policies.ts for an
// example of how this can happen.
return;
}
// Otherwise, applyMerges returned a StoreObject, whose fields we should
// merge into the store (see store.merge statement below).
storeObject = applied;
}
if (environment_1.__DEV__ && !context.overwrite) {
const fieldsWithSelectionSets = {};
fieldNodeSet.forEach((field) => {
if (field.selectionSet) {
fieldsWithSelectionSets[field.name.value] = true;
}
});
const hasSelectionSet = (storeFieldName) => fieldsWithSelectionSets[(0, helpers_js_1.fieldNameFromStoreName)(storeFieldName)] ===
true;
const hasMergeFunction = (storeFieldName) => {
const childTree = mergeTree && mergeTree.map.get(storeFieldName);
return Boolean(childTree && childTree.info && childTree.info.merge);
};
Object.keys(storeObject).forEach((storeFieldName) => {
// If a merge function was defined for this field, trust that it
// did the right thing about (not) clobbering data. If the field
// has no selection set, it's a scalar field, so it doesn't need
// a merge function (even if it's an object, like JSON data).
if (hasSelectionSet(storeFieldName) &&
!hasMergeFunction(storeFieldName)) {
warnAboutDataLoss(entityRef, storeObject, storeFieldName, context.store);
}
});
}
store.merge(dataId, storeObject);
});
// Any IDs written explicitly to the cache will be retained as
// reachable root IDs for garbage collection purposes. Although this
// logic includes root IDs like ROOT_QUERY and ROOT_MUTATION, their
// retainment counts are effectively ignored because cache.gc() always
// includes them in its root ID set.
store.retain(ref.__ref);
return ref;
}
processSelectionSet({ dataId, result, selectionSet, context,
// This object allows processSelectionSet to report useful information
// to its callers without explicitly returning that information.
mergeTree, }) {
const { policies } = this.cache;
// This variable will be repeatedly updated using context.merge to
// accumulate all fields that need to be written into the store.
let incoming = {};
// If typename was not passed in, infer it. Note that typename is
// always passed in for tricky-to-infer cases such as "Query" for
// ROOT_QUERY.
const typename = (dataId && policies.rootTypenamesById[dataId]) ||
getTypenameFromResult(result, selectionSet, context.fragmentMap) ||
(dataId && context.store.get(dataId, "__typename"));
if ("string" === typeof typename) {
incoming.__typename = typename;
}
// This readField function will be passed as context.readField in the
// KeyFieldsContext object created within policies.identify (called below).
// In addition to reading from the existing context.store (thanks to the
// policies.readField(options, context) line at the very bottom), this
// version of readField can read from Reference objects that are currently
// pending in context.incomingById, which is important whenever keyFields
// need to be extracted from a child object that processSelectionSet has
// turned into a Reference.
const readField = (...args) => {
const options = (0, policies_js_1.normalizeReadFieldOptions)(args, incoming, context.variables);
if ((0, utilities_1.isReference)(options.from)) {
const info = context.incomingById.get(options.from.__ref);
if (info) {
const result = policies.readField({
...options,
from: info.storeObject,
}, context);
if (result !== void 0) {
return result;
}
}
}
return policies.readField(options, context);
};
const fieldNodeSet = new Set();
this.flattenFields(selectionSet, result,
// This WriteContext will be the default context value for fields returned
// by the flattenFields method, but some fields may be assigned a modified
// context, depending on the presence of @client and other directives.
context, typename).forEach((context, field) => {
const resultFieldKey = (0, internal_1.resultKeyNameFromField)(field);
const value = result[resultFieldKey];
fieldNodeSet.add(field);
if (value !== void 0) {
const storeFieldName = policies.getStoreFieldName({
typename,
fieldName: field.name.value,
field,
variables: context.variables,
});
const childTree = getChildMergeTree(mergeTree, storeFieldName);
let incomingValue = this.processFieldValue(value, field,
// Reset context.clientOnly and context.deferred to their default
// values before processing nested selection sets.
field.selectionSet ?
getContextFlavor(context, false, false)
: context, childTree);
// To determine if this field holds a child object with a merge function
// defined in its type policy (see PR #7070), we need to figure out the
// child object's __typename.
let childTypename;
// The field's value can be an object that has a __typename only if the
// field has a selection set. Otherwise incomingValue is scalar.
if (field.selectionSet &&
((0, utilities_1.isReference)(incomingValue) || (0, helpers_js_1.storeValueIsStoreObject)(incomingValue))) {
childTypename = readField("__typename", incomingValue);
}
const merge = policies.getMergeFunction(typename, field.name.value, childTypename);
if (merge) {
childTree.info = {
// TODO Check compatibility against any existing childTree.field?
field,
typename,
merge,
};
}
else {
maybeRecycleChildMergeTree(mergeTree, storeFieldName);
}
incoming = context.merge(incoming, {
[storeFieldName]: incomingValue,
});
}
else if (environment_1.__DEV__ &&
!context.clientOnly &&
!context.deferred &&
!utilities_1.addTypenameToDocument.added(field) &&
// If the field has a read function, it may be a synthetic field or
// provide a default value, so its absence from the written data should
// not be cause for alarm.
!policies.getReadFunction(typename, field.name.value)) {
invariant_1.invariant.error(107, (0, internal_1.resultKeyNameFromField)(field), result);
}
});
// Identify the result object, even if dataId was already provided,
// since we always need keyObject below.
try {
const [id, keyObject] = policies.identify(result, {
typename,
selectionSet,
fragmentMap: context.fragmentMap,
storeObject: incoming,
readField,
});
// If dataId was not provided, fall back to the id just generated by
// policies.identify.
dataId = dataId || id;
// Write any key fields that were used during identification, even if
// they were not mentioned in the original query.
if (keyObject) {
// TODO Reverse the order of the arguments?
incoming = context.merge(incoming, keyObject);
}
}
catch (e) {
// If dataId was provided, tolerate failure of policies.identify.
if (!dataId)
throw e;
}
if ("string" === typeof dataId) {
const dataRef = (0, internal_1.makeReference)(dataId);
// Avoid processing the same entity object using the same selection
// set more than once. We use an array instead of a Set since most
// entity IDs will be written using only one selection set, so the
// size of this array is likely to be very small, meaning indexOf is
// likely to be faster than Set.prototype.has.
const sets = context.written[dataId] || (context.written[dataId] = []);
if (sets.indexOf(selectionSet) >= 0)
return dataRef;
sets.push(selectionSet);
// If we're about to write a result object into the store, but we
// happen to know that the exact same (===) result object would be
// returned if we were to reread the result with the same inputs,
// then we can skip the rest of the processSelectionSet work for
// this object, and immediately return a Reference to it.
if (this.reader &&
this.reader.isFresh(result, dataRef, selectionSet, context)) {
return dataRef;
}
const previous = context.incomingById.get(dataId);
if (previous) {
previous.storeObject = context.merge(previous.storeObject, incoming);
previous.mergeTree = mergeMergeTrees(previous.mergeTree, mergeTree);
fieldNodeSet.forEach((field) => previous.fieldNodeSet.add(field));
}
else {
context.incomingById.set(dataId, {
storeObject: incoming,
// Save a reference to mergeTree only if it is not empty, because
// empty MergeTrees may be recycled by maybeRecycleChildMergeTree and
// reused for entirely different parts of the result tree.
mergeTree: mergeTreeIsEmpty(mergeTree) ? void 0 : mergeTree,
fieldNodeSet,
});
}
return dataRef;
}
return incoming;
}
processFieldValue(value, field, context, mergeTree) {
if (!field.selectionSet || value === null) {
// In development, we need to clone scalar values so that they can be
// safely frozen with maybeDeepFreeze in readFromStore.ts. In production,
// it's cheaper to store the scalar values directly in the cache.
return environment_1.__DEV__ ? (0, internal_1.cloneDeep)(value) : value;
}
if ((0, internal_1.isArray)(value)) {
return value.map((item, i) => {
const value = this.processFieldValue(item, field, context, getChildMergeTree(mergeTree, i));
maybeRecycleChildMergeTree(mergeTree, i);
return value;
});
}
return this.processSelectionSet({
result: value,
selectionSet: field.selectionSet,
context,
mergeTree,
});
}
// Implements https://spec.graphql.org/draft/#sec-Field-Collection, but with
// some additions for tracking @client and @defer directives.
flattenFields(selectionSet, result, context, typename = getTypenameFromResult(result, selectionSet, context.fragmentMap)) {
const fieldMap = new Map();
const { policies } = this.cache;
const limitingTrie = new trie_1.Trie(false); // No need for WeakMap, since limitingTrie does not escape.
(function flatten(selectionSet, inheritedContext) {
const visitedNode = limitingTrie.lookup(selectionSet,
// Because we take inheritedClientOnly and inheritedDeferred into
// consideration here (in addition to selectionSet), it's possible for
// the same selection set to be flattened more than once, if it appears
// in the query with different @client and/or @directive configurations.
inheritedContext.clientOnly, inheritedContext.deferred);
if (visitedNode.visited)
return;
visitedNode.visited = true;
selectionSet.selections.forEach((selection) => {
if (!(0, internal_1.shouldInclude)(selection, context.variables))
return;
let { clientOnly, deferred } = inheritedContext;
if (
// Since the presence of @client or @defer on this field can only
// cause clientOnly or deferred to become true, we can skip the
// forEach loop if both clientOnly and deferred are already true.
!(clientOnly && deferred) &&
(0, internal_1.isNonEmptyArray)(selection.directives)) {
selection.directives.forEach((dir) => {
const name = dir.name.value;
if (name === "client")
clientOnly = true;
if (name === "defer") {
const args = (0, internal_1.argumentsObjectFromField)(dir, context.variables);
// The @defer directive takes an optional args.if boolean
// argument, similar to @include(if: boolean). Note that
// @defer(if: false) does not make context.deferred false, but
// instead behaves as if there was no @defer directive.
if (!args || args.if !== false) {
deferred = true;
}
// TODO In the future, we may want to record args.label using
// context.deferred, if a label is specified.
}
});
}
if ((0, internal_1.isField)(selection)) {
const existing = fieldMap.get(selection);
if (existing) {
// If this field has been visited along another recursive path
// before, the final context should have clientOnly or deferred set
// to true only if *all* paths have the directive (hence the &&).
clientOnly = clientOnly && existing.clientOnly;
deferred = deferred && existing.deferred;
}
fieldMap.set(selection, getContextFlavor(context, clientOnly, deferred));
}
else {
const fragment = (0, internal_1.getFragmentFromSelection)(selection, context.lookupFragment);
if (!fragment && selection.kind === graphql_1.Kind.FRAGMENT_SPREAD) {
throw (0, invariant_1.newInvariantError)(108, selection.name.value);
}
if (fragment &&
policies.fragmentMatches(fragment, typename, result, context.variables)) {
flatten(fragment.selectionSet, getContextFlavor(context, clientOnly, deferred));
}
}
});
})(selectionSet, context);
return fieldMap;
}
applyMerges(mergeTree, existing, incoming, context, getStorageArgs) {
if (mergeTree.map.size && !(0, utilities_1.isReference)(incoming)) {
const e =
// Items in the same position in different arrays are not
// necessarily related to each other, so when incoming is an array
// we process its elements as if there was no existing data.
(!(0, internal_1.isArray)(incoming) &&
// Likewise, existing must be either a Reference or a StoreObject
// in order for its fields to be safe to merge with the fields of
// the incoming object.
((0, utilities_1.isReference)(existing) || (0, helpers_js_1.storeValueIsStoreObject)(existing))) ?
existing
: void 0;
// This narrowing is implied by mergeTree.map.size > 0 and
// !isReference(incoming), though TypeScript understandably cannot
// hope to infer this type.
const i = incoming;
// The options.storage objects provided to read and merge functions
// are derived from the identity of the parent object plus a
// sequence of storeFieldName strings/numbers identifying the nested
// field name path of each field value to be merged.
if (e && !getStorageArgs) {
getStorageArgs = [(0, utilities_1.isReference)(e) ? e.__ref : e];
}
// It's possible that applying merge functions to this subtree will
// not change the incoming data, so this variable tracks the fields
// that did change, so we can create a new incoming object when (and
// only when) at least one incoming field has changed. We use a Map
// to preserve the type of numeric keys.
let changedFields;
const getValue = (from, name) => {
return ((0, internal_1.isArray)(from) ?
typeof name === "number" ?
from[name]
: void 0
: context.store.getFieldValue(from, String(name)));
};
mergeTree.map.forEach((childTree, storeFieldName) => {
const eVal = getValue(e, storeFieldName);
const iVal = getValue(i, storeFieldName);
// If we have no incoming data, leave any existing data untouched.
if (void 0 === iVal)
return;
if (getStorageArgs) {
getStorageArgs.push(storeFieldName);
}
const aVal = this.applyMerges(childTree, eVal, iVal, context, getStorageArgs);
if (aVal !== iVal) {
changedFields = changedFields || new Map();
changedFields.set(storeFieldName, aVal);
}
if (getStorageArgs) {
(0, invariant_1.invariant)(getStorageArgs.pop() === storeFieldName);
}
});
if (changedFields) {
// Shallow clone i so we can add changed fields to it.
incoming = ((0, internal_1.isArray)(i) ? i.slice(0) : { ...i });
changedFields.forEach((value, name) => {
incoming[name] = value;
});
}
}
if (mergeTree.info) {
return this.cache.policies.runMergeFunction(existing, incoming, mergeTree.info, context, getStorageArgs && context.store.getStorage(...getStorageArgs));
}
return incoming;
}
}
exports.StoreWriter = StoreWriter;
const emptyMergeTreePool = [];
function getChildMergeTree({ map }, name) {
if (!map.has(name)) {
map.set(name, emptyMergeTreePool.pop() || { map: new Map() });
}
return map.get(name);
}
function mergeMergeTrees(left, right) {
if (left === right || !right || mergeTreeIsEmpty(right))
return left;
if (!left || mergeTreeIsEmpty(left))
return right;
const info = left.info && right.info ?
{
...left.info,
...right.info,
}
: left.info || right.info;
const needToMergeMaps = left.map.size && right.map.size;
const map = needToMergeMaps ? new Map()
: left.map.size ? left.map
: right.map;
const merged = { info, map };
if (needToMergeMaps) {
const remainingRightKeys = new Set(right.map.keys());
left.map.forEach((leftTree, key) => {
merged.map.set(key, mergeMergeTrees(leftTree, right.map.get(key)));
remainingRightKeys.delete(key);
});
remainingRightKeys.forEach((key) => {
merged.map.set(key, mergeMergeTrees(right.map.get(key), left.map.get(key)));
});
}
return merged;
}
function mergeTreeIsEmpty(tree) {
return !tree || !(tree.info || tree.map.size);
}
function maybeRecycleChildMergeTree({ map }, name) {
const childTree = map.get(name);
if (childTree && mergeTreeIsEmpty(childTree)) {
emptyMergeTreePool.push(childTree);
map.delete(name);
}
}
const warnings = new Set();
// Note that this function is unused in production, and thus should be
// pruned by any well-configured minifier.
function warnAboutDataLoss(existingRef, incomingObj, storeFieldName, store) {
const getChild = (objOrRef) => {
const child = store.getFieldValue(objOrRef, storeFieldName);
return typeof child === "object" && child;
};
const existing = getChild(existingRef);
if (!existing)
return;
const incoming = getChild(incomingObj);
if (!incoming)
return;
// It's always safe to replace a reference, since it refers to data
// safely stored elsewhere.
if ((0, utilities_1.isReference)(existing))
return;
// If the values are structurally equivalent, we do not need to worry
// about incoming replacing existing.
if ((0, equality_1.equal)(existing, incoming))
return;
// If we're replacing every key of the existing object, then the
// existing data would be overwritten even if the objects were
// normalized, so warning would not be helpful here.
if (Object.keys(existing).every((key) => store.getFieldValue(incoming, key) !== void 0)) {
return;
}
const parentType = store.getFieldValue(existingRef, "__typename") ||
store.getFieldValue(incomingObj, "__typename");
const fieldName = (0, helpers_js_1.fieldNameFromStoreName)(storeFieldName);
const typeDotName = `${parentType}.${fieldName}`;
// Avoid warning more than once for the same type and field name.
if (warnings.has(typeDotName))
return;
warnings.add(typeDotName);
const childTypenames = [];
// Arrays do not have __typename fields, and always need a custom merge
// function, even if their elements are normalized entities.
if (!(0, internal_1.isArray)(existing) && !(0, internal_1.isArray)(incoming)) {
[existing, incoming].forEach((child) => {
const typename = store.getFieldValue(child, "__typename");
if (typeof typename === "string" && !childTypenames.includes(typename)) {
childTypenames.push(typename);
}
});
}
__DEV__ && invariant_1.invariant.warn(109, fieldName, parentType, childTypenames.length ?
"either ensure all objects of type " +
childTypenames.join(" and ") +
" have an ID or a custom merge function, or "
: "", typeDotName, { ...existing }, { ...incoming });
}
function getTypenameFromResult(result, selectionSet, fragmentMap) {
let fragments;
for (const selection of selectionSet.selections) {
if ((0, internal_1.isField)(selection)) {
if (selection.name.value === "__typename") {
return result[(0, internal_1.resultKeyNameFromField)(selection)];
}
}
else if (fragments) {
fragments.push(selection);
}
else {
fragments = [selection];
}
}
if (typeof result.__typename === "string") {
return result.__typename;
}
if (fragments) {
for (const selection of fragments) {
const typename = getTypenameFromResult(result, (0, internal_1.getFragmentFromSelection)(selection, fragmentMap).selectionSet, fragmentMap);
if (typeof typename === "string") {
return typename;
}
}
}
}
//# sourceMappingURL=writeToStore.cjs.map