@apollo/client
Version:
A fully-featured caching GraphQL client.
616 lines (614 loc) • 31.2 kB
JavaScript
"use strict";;
const {
__DEV__
} = require("@apollo/client/utilities/environment");
Object.defineProperty(exports, "__esModule", { value: true });
exports.Policies = void 0;
exports.normalizeReadFieldOptions = normalizeReadFieldOptions;
const masking_1 = require("@apollo/client/masking");
const utilities_1 = require("@apollo/client/utilities");
const environment_1 = require("@apollo/client/utilities/environment");
const internal_1 = require("@apollo/client/utilities/internal");
const invariant_1 = require("@apollo/client/utilities/invariant");
const helpers_js_1 = require("./helpers.cjs");
const key_extractor_js_1 = require("./key-extractor.cjs");
const reactiveVars_js_1 = require("./reactiveVars.cjs");
function argsFromFieldSpecifier(spec) {
return (spec.args !== void 0 ? spec.args
: spec.field ? (0, internal_1.argumentsObjectFromField)(spec.field, spec.variables)
: null);
}
const nullKeyFieldsFn = () => void 0;
const simpleKeyArgsFn = (_args, context) => context.fieldName;
// These merge functions can be selected by specifying merge:true or
// merge:false in a field policy.
const mergeTrueFn = (existing, incoming, { mergeObjects }) => mergeObjects(existing, incoming);
const mergeFalseFn = (_, incoming) => incoming;
class Policies {
config;
typePolicies = {};
toBeAdded = {};
// Map from subtype names to sets of supertype names. Note that this
// representation inverts the structure of possibleTypes (whose keys are
// supertypes and whose values are arrays of subtypes) because it tends
// to be much more efficient to search upwards than downwards.
supertypeMap = new Map();
// Any fuzzy subtypes specified by possibleTypes will be converted to
// RegExp objects and recorded here. Every key of this map can also be
// found in supertypeMap. In many cases this Map will be empty, which
// means no fuzzy subtype checking will happen in fragmentMatches.
fuzzySubtypes = new Map();
cache;
rootIdsByTypename = {};
rootTypenamesById = {};
usingPossibleTypes = false;
constructor(config) {
this.config = config;
this.config = {
dataIdFromObject: helpers_js_1.defaultDataIdFromObject,
...config,
};
this.cache = this.config.cache;
this.setRootTypename("Query");
this.setRootTypename("Mutation");
this.setRootTypename("Subscription");
if (config.possibleTypes) {
this.addPossibleTypes(config.possibleTypes);
}
if (config.typePolicies) {
this.addTypePolicies(config.typePolicies);
}
}
identify(object, partialContext) {
const policies = this;
const typename = (partialContext &&
(partialContext.typename || partialContext.storeObject?.__typename)) ||
object.__typename;
// It should be possible to write root Query fields with writeFragment,
// using { __typename: "Query", ... } as the data, but it does not make
// sense to allow the same identification behavior for the Mutation and
// Subscription types, since application code should never be writing
// directly to (or reading directly from) those root objects.
if (typename === this.rootTypenamesById.ROOT_QUERY) {
return ["ROOT_QUERY"];
}
// Default context.storeObject to object if not otherwise provided.
const storeObject = (partialContext && partialContext.storeObject) || object;
const context = {
...partialContext,
typename,
storeObject,
readField: (partialContext && partialContext.readField) ||
((...args) => {
const options = normalizeReadFieldOptions(args, storeObject);
return policies.readField(options, {
store: policies.cache["data"],
variables: options.variables,
});
}),
};
let id;
const policy = typename && this.getTypePolicy(typename);
let keyFn = (policy && policy.keyFn) || this.config.dataIdFromObject;
masking_1.disableWarningsSlot.withValue(true, () => {
while (keyFn) {
const specifierOrId = keyFn({ ...object, ...storeObject }, context);
if ((0, internal_1.isArray)(specifierOrId)) {
keyFn = (0, key_extractor_js_1.keyFieldsFnFromSpecifier)(specifierOrId);
}
else {
id = specifierOrId;
break;
}
}
});
id = id ? String(id) : void 0;
return context.keyObject ? [id, context.keyObject] : [id];
}
addTypePolicies(typePolicies) {
Object.keys(typePolicies).forEach((typename) => {
const { queryType, mutationType, subscriptionType, ...incoming } = typePolicies[typename];
// Though {query,mutation,subscription}Type configurations are rare,
// it's important to call setRootTypename as early as possible,
// since these configurations should apply consistently for the
// entire lifetime of the cache. Also, since only one __typename can
// qualify as one of these root types, these three properties cannot
// be inherited, unlike the rest of the incoming properties. That
// restriction is convenient, because the purpose of this.toBeAdded
// is to delay the processing of type/field policies until the first
// time they're used, allowing policies to be added in any order as
// long as all relevant policies (including policies for supertypes)
// have been added by the time a given policy is used for the first
// time. In other words, since inheritance doesn't matter for these
// properties, there's also no need to delay their processing using
// the this.toBeAdded queue.
if (queryType)
this.setRootTypename("Query", typename);
if (mutationType)
this.setRootTypename("Mutation", typename);
if (subscriptionType)
this.setRootTypename("Subscription", typename);
if (helpers_js_1.hasOwn.call(this.toBeAdded, typename)) {
this.toBeAdded[typename].push(incoming);
}
else {
this.toBeAdded[typename] = [incoming];
}
});
}
updateTypePolicy(typename, incoming, existingFieldPolicies) {
const existing = this.getTypePolicy(typename);
const { keyFields, fields } = incoming;
function setMerge(existing, merge) {
existing.merge =
typeof merge === "function" ? merge
// Pass merge:true as a shorthand for a merge implementation
// that returns options.mergeObjects(existing, incoming).
: merge === true ? mergeTrueFn
// Pass merge:false to make incoming always replace existing
// without any warnings about data clobbering.
: merge === false ? mergeFalseFn
: existing.merge;
}
// Type policies can define merge functions, as an alternative to
// using field policies to merge child objects.
setMerge(existing, incoming.merge);
existing.keyFn =
// Pass false to disable normalization for this typename.
keyFields === false ? nullKeyFieldsFn
// Pass an array of strings to use those fields to compute a
// composite ID for objects of this typename.
: (0, internal_1.isArray)(keyFields) ? (0, key_extractor_js_1.keyFieldsFnFromSpecifier)(keyFields)
// Pass a function to take full control over identification.
: typeof keyFields === "function" ? keyFields
// Leave existing.keyFn unchanged if above cases fail.
: existing.keyFn;
if (fields) {
Object.keys(fields).forEach((fieldName) => {
let existing = existingFieldPolicies[fieldName];
// Field policy inheritance is atomic/shallow: you can't inherit a
// field policy and then override just its read function, since read
// and merge functions often need to cooperate, so changing only one
// of them would be a recipe for inconsistency.
// So here we avoid merging an inherited field policy with an updated one.
if (!existing || existing?.typename !== typename) {
existing = existingFieldPolicies[fieldName] = { typename };
}
const incoming = fields[fieldName];
if (typeof incoming === "function") {
existing.read = incoming;
}
else {
const { keyArgs, read, merge } = incoming;
existing.keyFn =
// Pass false to disable argument-based differentiation of
// field identities.
keyArgs === false ? simpleKeyArgsFn
// Pass an array of strings to use named arguments to
// compute a composite identity for the field.
: (0, internal_1.isArray)(keyArgs) ? (0, key_extractor_js_1.keyArgsFnFromSpecifier)(keyArgs)
// Pass a function to take full control over field identity.
: typeof keyArgs === "function" ? keyArgs
// Leave existing.keyFn unchanged if above cases fail.
: existing.keyFn;
if (typeof read === "function") {
existing.read = read;
}
setMerge(existing, merge);
}
if (existing.read && existing.merge) {
// If we have both a read and a merge function, assume
// keyArgs:false, because read and merge together can take
// responsibility for interpreting arguments in and out. This
// default assumption can always be overridden by specifying
// keyArgs explicitly in the FieldPolicy.
existing.keyFn = existing.keyFn || simpleKeyArgsFn;
}
});
}
}
setRootTypename(which, typename = which) {
const rootId = "ROOT_" + which.toUpperCase();
const old = this.rootTypenamesById[rootId];
if (typename !== old) {
(0, invariant_1.invariant)(!old || old === which, 100, which);
// First, delete any old __typename associated with this rootId from
// rootIdsByTypename.
if (old)
delete this.rootIdsByTypename[old];
// Now make this the only __typename that maps to this rootId.
this.rootIdsByTypename[typename] = rootId;
// Finally, update the __typename associated with this rootId.
this.rootTypenamesById[rootId] = typename;
}
}
addPossibleTypes(possibleTypes) {
this.usingPossibleTypes = true;
Object.keys(possibleTypes).forEach((supertype) => {
// Make sure all types have an entry in this.supertypeMap, even if
// their supertype set is empty, so we can return false immediately
// from policies.fragmentMatches for unknown supertypes.
this.getSupertypeSet(supertype, true);
possibleTypes[supertype].forEach((subtype) => {
this.getSupertypeSet(subtype, true).add(supertype);
const match = subtype.match(helpers_js_1.TypeOrFieldNameRegExp);
if (!match || match[0] !== subtype) {
// TODO Don't interpret just any invalid typename as a RegExp.
this.fuzzySubtypes.set(subtype, new RegExp(subtype));
}
});
});
}
getTypePolicy(typename) {
if (!helpers_js_1.hasOwn.call(this.typePolicies, typename)) {
const policy = (this.typePolicies[typename] = {});
policy.fields = {};
// When the TypePolicy for typename is first accessed, instead of
// starting with an empty policy object, inherit any properties or
// fields from the type policies of the supertypes of typename.
//
// Any properties or fields defined explicitly within the TypePolicy
// for typename will take precedence, and if there are multiple
// supertypes, the properties of policies whose types were added
// later via addPossibleTypes will take precedence over those of
// earlier supertypes. TODO Perhaps we should warn about these
// conflicts in development, and recommend defining the property
// explicitly in the subtype policy?
//
// Field policy inheritance is atomic/shallow: you can't inherit a
// field policy and then override just its read function, since read
// and merge functions often need to cooperate, so changing only one
// of them would be a recipe for inconsistency.
//
// Once the TypePolicy for typename has been accessed, its properties can
// still be updated directly using addTypePolicies, but future changes to
// inherited supertype policies will not be reflected in this subtype
// policy, because this code runs at most once per typename.
let supertypes = this.supertypeMap.get(typename);
if (!supertypes && this.fuzzySubtypes.size) {
// To make the inheritance logic work for unknown typename strings that
// may have fuzzy supertypes, we give this typename an empty supertype
// set and then populate it with any fuzzy supertypes that match.
supertypes = this.getSupertypeSet(typename, true);
// This only works for typenames that are directly matched by a fuzzy
// supertype. What if there is an intermediate chain of supertypes?
// While possible, that situation can only be solved effectively by
// specifying the intermediate relationships via possibleTypes, manually
// and in a non-fuzzy way.
this.fuzzySubtypes.forEach((regExp, fuzzy) => {
if (regExp.test(typename)) {
// The fuzzy parameter is just the original string version of regExp
// (not a valid __typename string), but we can look up the
// associated supertype(s) in this.supertypeMap.
const fuzzySupertypes = this.supertypeMap.get(fuzzy);
if (fuzzySupertypes) {
fuzzySupertypes.forEach((supertype) => supertypes.add(supertype));
}
}
});
}
if (supertypes && supertypes.size) {
supertypes.forEach((supertype) => {
const { fields, ...rest } = this.getTypePolicy(supertype);
Object.assign(policy, rest);
Object.assign(policy.fields, fields);
});
}
}
const inbox = this.toBeAdded[typename];
if (inbox && inbox.length) {
// Merge the pending policies into this.typePolicies, in the order they
// were originally passed to addTypePolicy.
inbox.splice(0).forEach((policy) => {
this.updateTypePolicy(typename, policy, this.typePolicies[typename].fields);
});
}
return this.typePolicies[typename];
}
getFieldPolicy(typename, fieldName) {
if (typename) {
return this.getTypePolicy(typename).fields[fieldName];
}
}
getSupertypeSet(subtype, createIfMissing) {
let supertypeSet = this.supertypeMap.get(subtype);
if (!supertypeSet && createIfMissing) {
this.supertypeMap.set(subtype, (supertypeSet = new Set()));
}
return supertypeSet;
}
fragmentMatches(fragment, typename, result, variables) {
if (!fragment.typeCondition)
return true;
// If the fragment has a type condition but the object we're matching
// against does not have a __typename, the fragment cannot match.
if (!typename)
return false;
const supertype = fragment.typeCondition.name.value;
// Common case: fragment type condition and __typename are the same.
if (typename === supertype)
return true;
if (this.usingPossibleTypes && this.supertypeMap.has(supertype)) {
const typenameSupertypeSet = this.getSupertypeSet(typename, true);
const workQueue = [typenameSupertypeSet];
const maybeEnqueue = (subtype) => {
const supertypeSet = this.getSupertypeSet(subtype, false);
if (supertypeSet &&
supertypeSet.size &&
workQueue.indexOf(supertypeSet) < 0) {
workQueue.push(supertypeSet);
}
};
// We need to check fuzzy subtypes only if we encountered fuzzy
// subtype strings in addPossibleTypes, and only while writing to
// the cache, since that's when selectionSetMatchesResult gives a
// strong signal of fragment matching. The StoreReader class calls
// policies.fragmentMatches without passing a result object, so
// needToCheckFuzzySubtypes is always false while reading.
let needToCheckFuzzySubtypes = !!(result && this.fuzzySubtypes.size);
let checkingFuzzySubtypes = false;
// It's important to keep evaluating workQueue.length each time through
// the loop, because the queue can grow while we're iterating over it.
for (let i = 0; i < workQueue.length; ++i) {
const supertypeSet = workQueue[i];
if (supertypeSet.has(supertype)) {
if (!typenameSupertypeSet.has(supertype)) {
if (checkingFuzzySubtypes) {
__DEV__ && invariant_1.invariant.warn(101, typename, supertype);
}
// Record positive results for faster future lookup.
// Unfortunately, we cannot safely cache negative results,
// because new possibleTypes data could always be added to the
// Policies class.
typenameSupertypeSet.add(supertype);
}
return true;
}
supertypeSet.forEach(maybeEnqueue);
if (needToCheckFuzzySubtypes &&
// Start checking fuzzy subtypes only after exhausting all
// non-fuzzy subtypes (after the final iteration of the loop).
i === workQueue.length - 1 &&
// We could wait to compare fragment.selectionSet to result
// after we verify the supertype, but this check is often less
// expensive than that search, and we will have to do the
// comparison anyway whenever we find a potential match.
(0, helpers_js_1.selectionSetMatchesResult)(fragment.selectionSet, result, variables)) {
// We don't always need to check fuzzy subtypes (if no result
// was provided, or !this.fuzzySubtypes.size), but, when we do,
// we only want to check them once.
needToCheckFuzzySubtypes = false;
checkingFuzzySubtypes = true;
// If we find any fuzzy subtypes that match typename, extend the
// workQueue to search through the supertypes of those fuzzy
// subtypes. Otherwise the for-loop will terminate and we'll
// return false below.
this.fuzzySubtypes.forEach((regExp, fuzzyString) => {
const match = typename.match(regExp);
if (match && match[0] === typename) {
maybeEnqueue(fuzzyString);
}
});
}
}
}
return false;
}
hasKeyArgs(typename, fieldName) {
const policy = this.getFieldPolicy(typename, fieldName);
return !!(policy && policy.keyFn);
}
getStoreFieldName(fieldSpec) {
const { typename, fieldName } = fieldSpec;
const policy = this.getFieldPolicy(typename, fieldName);
let storeFieldName;
let keyFn = policy && policy.keyFn;
if (keyFn && typename) {
const context = {
typename,
fieldName,
field: fieldSpec.field || null,
variables: fieldSpec.variables,
};
const args = argsFromFieldSpecifier(fieldSpec);
while (keyFn) {
const specifierOrString = keyFn(args, context);
if ((0, internal_1.isArray)(specifierOrString)) {
keyFn = (0, key_extractor_js_1.keyArgsFnFromSpecifier)(specifierOrString);
}
else {
// If the custom keyFn returns a falsy value, fall back to
// fieldName instead.
storeFieldName = specifierOrString || fieldName;
break;
}
}
}
if (storeFieldName === void 0) {
storeFieldName =
fieldSpec.field ?
(0, internal_1.storeKeyNameFromField)(fieldSpec.field, fieldSpec.variables)
: (0, internal_1.getStoreKeyName)(fieldName, argsFromFieldSpecifier(fieldSpec));
}
// Returning false from a keyArgs function is like configuring
// keyArgs: false, but more dynamic.
if (storeFieldName === false) {
return fieldName;
}
// Make sure custom field names start with the actual field.name.value
// of the field, so we can always figure out which properties of a
// StoreObject correspond to which original field names.
return fieldName === (0, helpers_js_1.fieldNameFromStoreName)(storeFieldName) ? storeFieldName
: fieldName + ":" + storeFieldName;
}
readField(options, context) {
const objectOrReference = options.from;
if (!objectOrReference)
return;
const nameOrField = options.field || options.fieldName;
if (!nameOrField)
return;
if (options.typename === void 0) {
const typename = context.store.getFieldValue(objectOrReference, "__typename");
if (typename)
options.typename = typename;
}
const storeFieldName = this.getStoreFieldName(options);
const fieldName = (0, helpers_js_1.fieldNameFromStoreName)(storeFieldName);
const existing = context.store.getFieldValue(objectOrReference, storeFieldName);
const policy = this.getFieldPolicy(options.typename, fieldName);
const read = policy && policy.read;
if (read) {
const readOptions = makeFieldFunctionOptions(this, objectOrReference, options, context, context.store.getStorage((0, utilities_1.isReference)(objectOrReference) ?
objectOrReference.__ref
: objectOrReference, storeFieldName));
// Call read(existing, readOptions) with cacheSlot holding this.cache.
return reactiveVars_js_1.cacheSlot.withValue(this.cache, read, [
existing,
readOptions,
]);
}
return existing;
}
getReadFunction(typename, fieldName) {
const policy = this.getFieldPolicy(typename, fieldName);
return policy && policy.read;
}
getMergeFunction(parentTypename, fieldName, childTypename) {
let policy = this.getFieldPolicy(parentTypename, fieldName);
let merge = policy && policy.merge;
if (!merge && childTypename) {
policy = this.getTypePolicy(childTypename);
merge = policy && policy.merge;
}
return merge;
}
runMergeFunction(existing, incoming, { field, typename, merge }, context, storage) {
if (merge === mergeTrueFn) {
// Instead of going to the trouble of creating a full
// FieldFunctionOptions object and calling mergeTrueFn, we can
// simply call mergeObjects, as mergeTrueFn would.
return makeMergeObjectsFunction(context.store)(existing, incoming);
}
if (merge === mergeFalseFn) {
// Likewise for mergeFalseFn, whose implementation is even simpler.
return incoming;
}
// If cache.writeQuery or cache.writeFragment was called with
// options.overwrite set to true, we still call merge functions, but
// the existing data is always undefined, so the merge function will
// not attempt to combine the incoming data with the existing data.
if (context.overwrite) {
existing = void 0;
}
return merge(existing, incoming, makeFieldFunctionOptions(this,
// Unlike options.readField for read functions, we do not fall
// back to the current object if no foreignObjOrRef is provided,
// because it's not clear what the current object should be for
// merge functions: the (possibly undefined) existing object, or
// the incoming object? If you think your merge function needs
// to read sibling fields in order to produce a new value for
// the current field, you might want to rethink your strategy,
// because that's a recipe for making merge behavior sensitive
// to the order in which fields are written into the cache.
// However, readField(name, ref) is useful for merge functions
// that need to deduplicate child objects and references.
void 0, {
typename,
fieldName: field.name.value,
field,
variables: context.variables,
}, context, storage || {}));
}
}
exports.Policies = Policies;
function makeFieldFunctionOptions(policies, objectOrReference, fieldSpec, context, storage) {
const storeFieldName = policies.getStoreFieldName(fieldSpec);
const fieldName = (0, helpers_js_1.fieldNameFromStoreName)(storeFieldName);
const variables = fieldSpec.variables || context.variables;
const { toReference, canRead } = context.store;
return {
args: argsFromFieldSpecifier(fieldSpec),
field: fieldSpec.field || null,
fieldName,
storeFieldName,
variables,
isReference: utilities_1.isReference,
toReference,
storage,
cache: policies.cache,
canRead,
readField(...args) {
return policies.readField(normalizeReadFieldOptions(args, objectOrReference, variables), context);
},
mergeObjects: makeMergeObjectsFunction(context.store),
};
}
function normalizeReadFieldOptions(readFieldArgs, objectOrReference, variables) {
const { 0: fieldNameOrOptions, 1: from, length: argc } = readFieldArgs;
let options;
if (typeof fieldNameOrOptions === "string") {
options = {
fieldName: fieldNameOrOptions,
// Default to objectOrReference only when no second argument was
// passed for the from parameter, not when undefined is explicitly
// passed as the second argument.
from: argc > 1 ? from : objectOrReference,
};
}
else {
options = { ...fieldNameOrOptions };
// Default to objectOrReference only when fieldNameOrOptions.from is
// actually omitted, rather than just undefined.
if (!helpers_js_1.hasOwn.call(options, "from")) {
options.from = objectOrReference;
}
}
if (environment_1.__DEV__ && options.from === void 0) {
__DEV__ && invariant_1.invariant.warn(102, (0, internal_1.stringifyForDisplay)(Array.from(readFieldArgs)));
}
if (void 0 === options.variables) {
options.variables = variables;
}
return options;
}
function makeMergeObjectsFunction(store) {
return function mergeObjects(existing, incoming) {
if ((0, internal_1.isArray)(existing) || (0, internal_1.isArray)(incoming)) {
throw (0, invariant_1.newInvariantError)(103);
}
// These dynamic checks are necessary because the parameters of a
// custom merge function can easily have the any type, so the type
// system cannot always enforce the StoreObject | Reference parameter
// types of options.mergeObjects.
if ((0, internal_1.isNonNullObject)(existing) && (0, internal_1.isNonNullObject)(incoming)) {
const eType = store.getFieldValue(existing, "__typename");
const iType = store.getFieldValue(incoming, "__typename");
const typesDiffer = eType && iType && eType !== iType;
if (typesDiffer) {
return incoming;
}
if ((0, utilities_1.isReference)(existing) && (0, helpers_js_1.storeValueIsStoreObject)(incoming)) {
// Update the normalized EntityStore for the entity identified by
// existing.__ref, preferring/overwriting any fields contributed by the
// newer incoming StoreObject.
store.merge(existing.__ref, incoming);
return existing;
}
if ((0, helpers_js_1.storeValueIsStoreObject)(existing) && (0, utilities_1.isReference)(incoming)) {
// Update the normalized EntityStore for the entity identified by
// incoming.__ref, taking fields from the older existing object only if
// those fields are not already present in the newer StoreObject
// identified by incoming.__ref.
store.merge(existing, incoming.__ref);
return incoming;
}
if ((0, helpers_js_1.storeValueIsStoreObject)(existing) &&
(0, helpers_js_1.storeValueIsStoreObject)(incoming)) {
return { ...existing, ...incoming };
}
}
return incoming;
};
}
//# sourceMappingURL=policies.cjs.map