@urql/exchange-graphcache
Version:
A normalized and configurable cache exchange for urql
1,260 lines (1,198 loc) • 94.4 kB
JavaScript
Object.defineProperty(exports, '__esModule', { value: true });
var core = require('@urql/core');
var graphql_web = require('@0no-co/graphql.web');
var wonka = require('wonka');
// These are guards that are used throughout the codebase to warn or error on
// unexpected behaviour or conditions.
// Every warning and error comes with a number that uniquely identifies them.
// You can read more about the messages themselves in `docs/graphcache/errors.md`
// URL unfurls to https://formidable.com/open-source/urql/docs/graphcache/errors/
var helpUrl = '\nhttps://bit.ly/2XbVrpR#';
var cache = new Set();
var currentDebugStack = [];
var popDebugNode = () => currentDebugStack.pop();
var pushDebugNode = (typename, node) => {
var identifier = '';
if (node.kind === graphql_web.Kind.INLINE_FRAGMENT) {
identifier = typename ? `Inline Fragment on "${typename}"` : 'Inline Fragment';
} else if (node.kind === graphql_web.Kind.OPERATION_DEFINITION) {
var name = node.name ? `"${node.name.value}"` : 'Unnamed';
identifier = `${name} ${node.operation}`;
} else if (node.kind === graphql_web.Kind.FRAGMENT_DEFINITION) {
identifier = `"${node.name.value}" Fragment`;
}
if (identifier) {
currentDebugStack.push(identifier);
}
};
var getDebugOutput = () => currentDebugStack.length ? '\n(Caused At: ' + currentDebugStack.join(', ') + ')' : '';
function invariant(condition, message, code) {
if (!condition) {
var errorMessage = message || 'Minfied Error #' + code + '\n';
if (process.env.NODE_ENV !== 'production') {
errorMessage += getDebugOutput();
}
var error = new Error(errorMessage + helpUrl + code);
error.name = 'Graphcache Error';
throw error;
}
}
function warn(message, code, logger) {
if (!cache.has(message)) {
if (logger) {
logger('warn', message + getDebugOutput() + helpUrl + code);
} else {
console.warn(message + getDebugOutput() + helpUrl + code);
}
cache.add(message);
}
}
var EMPTY_DIRECTIVES = {};
/** Returns the directives dictionary of a given node */
var getDirectives = node => node._directives || EMPTY_DIRECTIVES;
/** Returns the name of a given node */
var getName = node => node.name.value;
var getFragmentTypeName = node => node.typeCondition.name.value;
/** Returns either the field's name or the field's alias */
var getFieldAlias = node => node.alias ? node.alias.value : node.name.value;
var emptySelectionSet = [];
/** Returns the SelectionSet for a given inline or defined fragment node */
var getSelectionSet = node => node.selectionSet ? node.selectionSet.selections : emptySelectionSet;
var getTypeCondition = node => node.typeCondition ? node.typeCondition.name.value : null;
/** Evaluates a fields arguments taking vars into account */
var getFieldArguments = (node, vars) => {
var args = null;
if (node.arguments) {
for (var i = 0, l = node.arguments.length; i < l; i++) {
var arg = node.arguments[i];
var value = graphql_web.valueFromASTUntyped(arg.value, vars);
if (value !== undefined && value !== null) {
if (!args) args = {};
args[getName(arg)] = value;
}
}
}
return args;
};
/** Returns a filtered form of variables with values missing that the query doesn't require */
var filterVariables = (node, input) => {
if (!input || !node.variableDefinitions) {
return undefined;
}
var vars = {};
for (var i = 0, l = node.variableDefinitions.length; i < l; i++) {
var name = getName(node.variableDefinitions[i].variable);
vars[name] = input[name];
}
return vars;
};
/** Returns a normalized form of variables with defaulted values */
var normalizeVariables = (node, input) => {
var vars = {};
if (!input) return vars;
if (node.variableDefinitions) {
for (var i = 0, l = node.variableDefinitions.length; i < l; i++) {
var def = node.variableDefinitions[i];
var name = getName(def.variable);
vars[name] = input[name] === undefined && def.defaultValue ? graphql_web.valueFromASTUntyped(def.defaultValue, input) : input[name];
}
}
for (var key in input) {
if (!(key in vars)) vars[key] = input[key];
}
return vars;
};
/** Returns the main operation's definition */
function getMainOperation(doc) {
for (var i = 0; i < doc.definitions.length; i++) {
if (doc.definitions[i].kind === graphql_web.Kind.OPERATION_DEFINITION) {
return doc.definitions[i];
}
}
invariant(false, process.env.NODE_ENV !== "production" ? 'Invalid GraphQL document: All GraphQL documents must contain an OperationDefinition' + 'node for a query, subscription, or mutation.' : "", 1);
}
/** Returns a mapping from fragment names to their selections */
var getFragments = doc => {
var fragments = {};
for (var i = 0; i < doc.definitions.length; i++) {
var node = doc.definitions[i];
if (node.kind === graphql_web.Kind.FRAGMENT_DEFINITION) {
fragments[getName(node)] = node;
}
}
return fragments;
};
/** Resolves @include and @skip directives to determine whether field is included. */
var shouldInclude = (node, vars) => {
var directives = getDirectives(node);
if (directives.include || directives.skip) {
// Finds any @include or @skip directive that forces the node to be skipped
for (var name in directives) {
var directive = directives[name];
if (directive && (name === 'include' || name === 'skip') && directive.arguments && directive.arguments[0] && getName(directive.arguments[0]) === 'if') {
// Return whether this directive forces us to skip
// `@include(if: false)` or `@skip(if: true)`
var value = graphql_web.valueFromASTUntyped(directive.arguments[0].value, vars);
return name === 'include' ? !!value : !value;
}
}
}
return true;
};
/** Resolves @defer directive to determine whether a fragment is potentially skipped. */
var isDeferred = (node, vars) => {
var {
defer
} = getDirectives(node);
if (defer) {
for (var argument of defer.arguments || []) {
if (getName(argument) === 'if') {
// Return whether `@defer(if: )` is enabled
return !!graphql_web.valueFromASTUntyped(argument.value, vars);
}
}
return true;
}
return false;
};
/** Resolves @_optional and @_required directive to determine whether the fields in a fragment are conaidered optional. */
var isOptional = node => {
var {
optional,
required
} = getDirectives(node);
if (required) {
return false;
}
if (optional) {
return true;
}
return undefined;
};
var buildClientSchema = ({
__schema
}) => {
var typemap = new Map();
var buildNameMap = arr => {
var map;
return () => {
if (!map) {
map = {};
for (var i = 0; i < arr.length; i++) map[arr[i].name] = arr[i];
}
return map;
};
};
var buildType = type => {
switch (type.kind) {
case 'OBJECT':
case 'INTERFACE':
return {
name: type.name,
kind: type.kind,
interfaces: buildNameMap(type.interfaces || []),
fields: buildNameMap(type.fields.map(field => ({
name: field.name,
type: field.type,
args: buildNameMap(field.args)
})))
};
case 'UNION':
return {
name: type.name,
kind: type.kind,
types: buildNameMap(type.possibleTypes || [])
};
}
};
var schema = {
query: __schema.queryType ? __schema.queryType.name : null,
mutation: __schema.mutationType ? __schema.mutationType.name : null,
subscription: __schema.subscriptionType ? __schema.subscriptionType.name : null,
types: undefined,
isSubType(abstract, possible) {
var abstractType = typemap.get(abstract);
var possibleType = typemap.get(possible);
if (!abstractType || !possibleType) {
return false;
} else if (abstractType.kind === 'UNION') {
return !!abstractType.types()[possible];
} else if (abstractType.kind !== 'OBJECT' && possibleType.kind === 'OBJECT') {
return !!possibleType.interfaces()[abstract];
} else {
return abstract === possible;
}
}
};
if (__schema.types) {
schema.types = typemap;
for (var i = 0; i < __schema.types.length; i++) {
var type = __schema.types[i];
if (type && type.name) {
var out = buildType(type);
if (out) typemap.set(type.name, out);
}
}
}
return schema;
};
var BUILTIN_NAME = '__';
var isFieldNullable = (schema, typename, fieldName, logger) => {
var field = getField(schema, typename, fieldName, logger);
return !!field && field.type.kind !== 'NON_NULL';
};
var isListNullable = (schema, typename, fieldName, logger) => {
var field = getField(schema, typename, fieldName, logger);
if (!field) return false;
var ofType = field.type.kind === 'NON_NULL' ? field.type.ofType : field.type;
return ofType.kind === 'LIST' && ofType.ofType.kind !== 'NON_NULL';
};
var isFieldAvailableOnType = (schema, typename, fieldName, logger) => fieldName.indexOf(BUILTIN_NAME) === 0 || typename.indexOf(BUILTIN_NAME) === 0 || !!getField(schema, typename, fieldName, logger);
var isInterfaceOfType = (schema, node, typename) => {
if (!typename) return false;
var typeCondition = getTypeCondition(node);
if (!typeCondition || typename === typeCondition) {
return true;
} else if (schema.types.has(typeCondition) && schema.types.get(typeCondition).kind === 'OBJECT') {
return typeCondition === typename;
}
expectAbstractType(schema, typeCondition);
expectObjectType(schema, typename);
return schema.isSubType(typeCondition, typename);
};
var getField = (schema, typename, fieldName, logger) => {
if (fieldName.indexOf(BUILTIN_NAME) === 0 || typename.indexOf(BUILTIN_NAME) === 0) return;
expectObjectType(schema, typename);
var object = schema.types.get(typename);
var field = object.fields()[fieldName];
if (process.env.NODE_ENV !== 'production') {
if (!field) {
warn('Invalid field: The field `' + fieldName + '` does not exist on `' + typename + '`, ' + 'but the GraphQL document expects it to exist.\n' + 'Traversal will continue, however this may lead to undefined behavior!', 4, logger);
}
}
return field;
};
function expectObjectType(schema, typename) {
invariant(schema.types.has(typename) && schema.types.get(typename).kind === 'OBJECT', process.env.NODE_ENV !== "production" ? 'Invalid Object type: The type `' + typename + '` is not an object in the defined schema, ' + 'but the GraphQL document is traversing it.' : "", 3);
}
function expectAbstractType(schema, typename) {
invariant(schema.types.has(typename) && (schema.types.get(typename).kind === 'INTERFACE' || schema.types.get(typename).kind === 'UNION'), process.env.NODE_ENV !== "production" ? 'Invalid Abstract type: The type `' + typename + '` is not an Interface or Union type in the defined schema, ' + 'but a fragment in the GraphQL document is using it as a type condition.' : "", 5);
}
function expectValidKeyingConfig(schema, keys, logger) {
if (process.env.NODE_ENV !== 'production') {
for (var key in keys) {
if (process.env.NODE_ENV !== 'production') {
if (!schema.types.has(key)) {
warn('Invalid Object type: The type `' + key + '` is not an object in the defined schema, but the `keys` option is referencing it.', 20, logger);
}
}
}
}
}
function expectValidUpdatesConfig(schema, updates, logger) {
if (process.env.NODE_ENV === 'production') {
return;
}
for (var typename in updates) {
if (!updates[typename]) {
continue;
} else if (!schema.types.has(typename)) {
var addition = '';
if (typename === 'Mutation' && schema.mutation && schema.mutation !== 'Mutation') {
addition += '\nMaybe your config should reference `' + schema.mutation + '`?';
} else if (typename === 'Subscription' && schema.subscription && schema.subscription !== 'Subscription') {
addition += '\nMaybe your config should reference `' + schema.subscription + '`?';
}
return process.env.NODE_ENV !== 'production' ? warn('Invalid updates type: The type `' + typename + '` is not an object in the defined schema, but the `updates` config is referencing it.' + addition, 21, logger) : void 0;
}
var fields = schema.types.get(typename).fields();
for (var fieldName in updates[typename]) {
if (process.env.NODE_ENV !== 'production') {
if (!fields[fieldName]) {
warn('Invalid updates field: `' + fieldName + '` on `' + typename + '` is not in the defined schema, but the `updates` config is referencing it.', 22, logger);
}
}
}
}
}
function warnAboutResolver(name, logger) {
process.env.NODE_ENV !== 'production' ? warn(`Invalid resolver: \`${name}\` is not in the defined schema, but the \`resolvers\` option is referencing it.`, 23, logger) : void 0;
}
function warnAboutAbstractResolver(name, kind, logger) {
process.env.NODE_ENV !== 'production' ? warn(`Invalid resolver: \`${name}\` does not match to a concrete type in the schema, but the \`resolvers\` option is referencing it. Implement the resolver for the types that ${kind === 'UNION' ? 'make up the union' : 'implement the interface'} instead.`, 26, logger) : void 0;
}
function expectValidResolversConfig(schema, resolvers, logger) {
if (process.env.NODE_ENV === 'production') {
return;
}
for (var key in resolvers) {
if (key === 'Query') {
if (schema.query) {
var validQueries = schema.types.get(schema.query).fields();
for (var resolverQuery in resolvers.Query || {}) {
if (!validQueries[resolverQuery]) {
warnAboutResolver('Query.' + resolverQuery, logger);
}
}
} else {
warnAboutResolver('Query', logger);
}
} else {
if (!schema.types.has(key)) {
warnAboutResolver(key, logger);
} else if (schema.types.get(key).kind === 'INTERFACE' || schema.types.get(key).kind === 'UNION') {
warnAboutAbstractResolver(key, schema.types.get(key).kind, logger);
} else {
var validTypeProperties = schema.types.get(key).fields();
for (var resolverProperty in resolvers[key] || {}) {
if (!validTypeProperties[resolverProperty]) {
warnAboutResolver(key + '.' + resolverProperty, logger);
}
}
}
}
}
}
function expectValidOptimisticMutationsConfig(schema, optimisticMutations, logger) {
if (process.env.NODE_ENV === 'production') {
return;
}
if (schema.mutation) {
var validMutations = schema.types.get(schema.mutation).fields();
for (var mutation in optimisticMutations) {
if (process.env.NODE_ENV !== 'production') {
if (!validMutations[mutation]) {
warn(`Invalid optimistic mutation field: \`${mutation}\` is not a mutation field in the defined schema, but the \`optimistic\` option is referencing it.`, 24, logger);
}
}
}
}
}
var keyOfField = (fieldName, args) => args ? `${fieldName}(${core.stringifyVariables(args)})` : fieldName;
var joinKeys = (parentKey, key) => `${parentKey}.${key}`;
var fieldInfoOfKey = fieldKey => {
var parenIndex = fieldKey.indexOf('(');
if (parenIndex > -1) {
return {
fieldKey,
fieldName: fieldKey.slice(0, parenIndex),
arguments: JSON.parse(fieldKey.slice(parenIndex + 1, -1))
};
} else {
return {
fieldKey,
fieldName: fieldKey,
arguments: null
};
}
};
var serializeKeys = (entityKey, fieldKey) => `${entityKey.replace(/\./g, '%2e')}.${fieldKey}`;
var deserializeKeyInfo = key => {
var dotIndex = key.indexOf('.');
var entityKey = key.slice(0, dotIndex).replace(/%2e/g, '.');
var fieldKey = key.slice(dotIndex + 1);
return {
entityKey,
fieldKey
};
};
var currentOwnership = null;
var currentDataMapping = null;
var currentData = null;
var currentOptimisticKey = null;
var currentOperation = null;
var currentDependencies = null;
var currentForeignData = false;
var currentOptimistic = false;
/** Creates a new data object unless it's been created in this data run */
function makeData(data, isArray) {
var newData;
if (data) {
if (currentOwnership.has(data)) return data;
newData = currentDataMapping.get(data);
}
if (newData == null) {
newData = isArray ? [] : {};
}
if (data) {
currentDataMapping.set(data, newData);
}
currentOwnership.add(newData);
return newData;
}
var ownsData = data => !!data && currentOwnership.has(data);
/** Before reading or writing the global state needs to be initialised */
var initDataState = (operationType, data, layerKey, isOptimistic, isForeignData) => {
currentOwnership = new WeakSet();
currentDataMapping = new WeakMap();
currentOperation = operationType;
currentData = data;
currentDependencies = new Set();
currentOptimistic = !!isOptimistic;
currentForeignData = !!isForeignData;
if (process.env.NODE_ENV !== 'production') {
currentDebugStack.length = 0;
}
if (!layerKey) {
currentOptimisticKey = null;
} else if (currentOperation === 'read') {
// We don't create new layers for read operations and instead simply
// apply the currently available layer, if any
currentOptimisticKey = layerKey;
} else if (isOptimistic || data.hydrating || data.optimisticOrder.length > 1) {
// If this operation isn't optimistic and we see it for the first time,
// then it must've been optimistic in the past, so we can proactively
// clear the optimistic data before writing
if (!isOptimistic && !data.commutativeKeys.has(layerKey)) {
reserveLayer(data, layerKey);
} else if (isOptimistic) {
if (data.optimisticOrder.indexOf(layerKey) !== -1 && !data.commutativeKeys.has(layerKey)) {
data.optimisticOrder.splice(data.optimisticOrder.indexOf(layerKey), 1);
}
// NOTE: This optimally shouldn't happen as it implies that an optimistic
// write is being performed after a concrete write.
data.commutativeKeys.delete(layerKey);
}
// An optimistic update of a mutation may force an optimistic layer,
// or this Query update may be applied optimistically since it's part
// of a commutative chain
currentOptimisticKey = layerKey;
createLayer(data, layerKey);
} else {
// Otherwise we don't create an optimistic layer and clear the
// operation's one if it already exists
// We also do this when only one layer exists to avoid having to squash
// any layers at the end of writing this layer
currentOptimisticKey = null;
deleteLayer(data, layerKey);
}
};
/** Reset the data state after read/write is complete */
var clearDataState = () => {
// NOTE: This is only called to check for the invariant to pass
if (process.env.NODE_ENV !== 'production') {
getCurrentDependencies();
}
var data = currentData;
var layerKey = currentOptimisticKey;
currentOptimistic = false;
currentOptimisticKey = null;
// Determine whether the current operation has been a commutative layer
if (!data.hydrating && layerKey && data.optimisticOrder.indexOf(layerKey) > -1) {
// Squash all layers in reverse order (low priority upwards) that have
// been written already
var i = data.optimisticOrder.length;
while (--i >= 0 && data.dirtyKeys.has(data.optimisticOrder[i]) && data.commutativeKeys.has(data.optimisticOrder[i])) squashLayer(data.optimisticOrder[i]);
}
currentOwnership = null;
currentDataMapping = null;
currentOperation = null;
currentData = null;
currentDependencies = null;
if (process.env.NODE_ENV !== 'production') {
currentDebugStack.length = 0;
}
if (process.env.NODE_ENV !== 'test') {
// Schedule deferred tasks if we haven't already, and if either a persist or GC run
// are likely to be needed
if (!data.defer && (data.storage || !data.optimisticOrder.length)) {
data.defer = true;
setTimeout(() => {
initDataState('read', data, null);
gc();
persistData();
clearDataState();
data.defer = false;
});
}
}
};
/** Initialises then resets the data state, which may squash this layer if necessary */
var noopDataState = (data, layerKey, isOptimistic) => {
if (layerKey && !isOptimistic) data.deferredKeys.delete(layerKey);
initDataState('write', data, layerKey, isOptimistic);
clearDataState();
};
/** As we're writing, we keep around all the records and links we've read or have written to */
var getCurrentDependencies = () => {
invariant(currentDependencies !== null, process.env.NODE_ENV !== "production" ? 'Invalid Cache call: The cache may only be accessed or mutated during' + 'operations like write or query, or as part of its resolvers, updaters, ' + 'or optimistic configs.' : "", 2);
return currentDependencies;
};
var DEFAULT_EMPTY_SET = new Set();
var make = queryRootKey => ({
hydrating: false,
defer: false,
gc: new Set(),
types: new Map(),
persist: new Set(),
queryRootKey,
refCount: new Map(),
links: {
optimistic: new Map(),
base: new Map()
},
abstractToConcreteMap: new Map(),
records: {
optimistic: new Map(),
base: new Map()
},
deferredKeys: new Set(),
commutativeKeys: new Set(),
dirtyKeys: new Set(),
optimisticOrder: [],
storage: null
});
/** Adds a node value to a NodeMap (taking optimistic values into account */
var setNode = (map, entityKey, fieldKey, value) => {
if (process.env.NODE_ENV !== 'production') {
invariant(currentOperation !== 'read', process.env.NODE_ENV !== "production" ? 'Invalid Cache write: You may not write to the cache during cache reads. ' + ' Accesses to `cache.writeFragment`, `cache.updateQuery`, and `cache.link` may ' + ' not be made inside `resolvers` for instance.' : "", 27);
}
// Optimistic values are written to a map in the optimistic dict
// All other values are written to the base map
var keymap = currentOptimisticKey ? map.optimistic.get(currentOptimisticKey) : map.base;
// On the map itself we get or create the entity as a dict
var entity = keymap.get(entityKey);
if (entity === undefined) {
keymap.set(entityKey, entity = Object.create(null));
}
// If we're setting undefined we delete the node's entry
// On optimistic layers we actually set undefined so it can
// override the base value
if (value === undefined && !currentOptimisticKey) {
delete entity[fieldKey];
} else {
entity[fieldKey] = value;
}
};
/** Gets a node value from a NodeMap (taking optimistic values into account */
var getNode = (map, entityKey, fieldKey) => {
var node;
// A read may be initialised to skip layers until its own, which is useful for
// reading back written data. It won't skip over optimistic layers however
var skip = !currentOptimistic && currentOperation === 'read' && currentOptimisticKey && currentData.commutativeKeys.has(currentOptimisticKey);
// This first iterates over optimistic layers (in order)
for (var i = 0, l = currentData.optimisticOrder.length; i < l; i++) {
var layerKey = currentData.optimisticOrder[i];
var optimistic = map.optimistic.get(layerKey);
// If we're reading starting from a specific layer, we skip until a match
skip = skip && layerKey !== currentOptimisticKey;
// If the node and node value exists it is returned, including undefined
if (optimistic && (!skip || !currentData.commutativeKeys.has(layerKey)) && (!currentOptimistic || currentOperation === 'write' || currentData.commutativeKeys.has(layerKey)) && (node = optimistic.get(entityKey)) !== undefined && fieldKey in node) {
return node[fieldKey];
}
}
// Otherwise we read the non-optimistic base value
node = map.base.get(entityKey);
return node !== undefined ? node[fieldKey] : undefined;
};
function getRefCount(entityKey) {
return currentData.refCount.get(entityKey) || 0;
}
/** Adjusts the reference count of an entity on a refCount dict by "by" and updates the gc */
var updateRCForEntity = (entityKey, by) => {
// Retrieve the reference count and adjust it by "by"
var count = getRefCount(entityKey);
var newCount = count + by > 0 ? count + by : 0;
currentData.refCount.set(entityKey, newCount);
// Add it to the garbage collection batch if it needs to be deleted or remove it
// from the batch if it needs to be kept
if (!newCount) currentData.gc.add(entityKey);else if (!count && newCount) currentData.gc.delete(entityKey);
};
/** Adjusts the reference counts of all entities of a link on a refCount dict by "by" and updates the gc */
var updateRCForLink = (link, by) => {
if (Array.isArray(link)) {
for (var i = 0, l = link.length; i < l; i++) updateRCForLink(link[i], by);
} else if (typeof link === 'string') {
updateRCForEntity(link, by);
}
};
/** Writes all parsed FieldInfo objects of a given node dict to a given array if it hasn't been seen */
var extractNodeFields = (fieldInfos, seenFieldKeys, node) => {
if (node !== undefined) {
for (var fieldKey in node) {
if (!seenFieldKeys.has(fieldKey)) {
// If the node hasn't been seen the serialized fieldKey is turnt back into
// a rich FieldInfo object that also contains the field's name and arguments
fieldInfos.push(fieldInfoOfKey(fieldKey));
seenFieldKeys.add(fieldKey);
}
}
}
};
/** Writes all parsed FieldInfo objects of all nodes in a NodeMap to a given array */
var extractNodeMapFields = (fieldInfos, seenFieldKeys, entityKey, map) => {
// Extracts FieldInfo for the entity in the base map
extractNodeFields(fieldInfos, seenFieldKeys, map.base.get(entityKey));
// Then extracts FieldInfo for the entity from the optimistic maps
for (var i = 0, l = currentData.optimisticOrder.length; i < l; i++) {
var optimistic = map.optimistic.get(currentData.optimisticOrder[i]);
if (optimistic !== undefined) {
extractNodeFields(fieldInfos, seenFieldKeys, optimistic.get(entityKey));
}
}
};
/** Garbage collects all entities that have been marked as having no references */
var gc = () => {
// If we're currently awaiting deferred results, abort GC run
if (currentData.optimisticOrder.length) return;
// Iterate over all entities that have been marked for deletion
// Entities have been marked for deletion in `updateRCForEntity` if
// their reference count dropped to 0
for (var entityKey of currentData.gc.keys()) {
// Remove the current key from the GC batch
currentData.gc.delete(entityKey);
// Check first whether the entity has any references,
// if so, we skip it from the GC run
var rc = getRefCount(entityKey);
if (rc > 0) continue;
var record = currentData.records.base.get(entityKey);
// Delete the reference count, and delete the entity from the GC batch
currentData.refCount.delete(entityKey);
currentData.records.base.delete(entityKey);
var typename = record && record.__typename;
if (typename) {
var type = currentData.types.get(typename);
if (type) type.delete(entityKey);
}
var linkNode = currentData.links.base.get(entityKey);
if (linkNode) {
currentData.links.base.delete(entityKey);
for (var fieldKey in linkNode) updateRCForLink(linkNode[fieldKey], -1);
}
}
};
var updateDependencies = (entityKey, fieldKey) => {
if (entityKey !== currentData.queryRootKey) {
currentDependencies.add(entityKey);
} else if (fieldKey !== undefined && fieldKey !== '__typename') {
currentDependencies.add(joinKeys(entityKey, fieldKey));
}
};
var updatePersist = (entityKey, fieldKey) => {
if (!currentOptimistic && currentData.storage) {
currentData.persist.add(serializeKeys(entityKey, fieldKey));
}
};
/** Reads an entity's field (a "record") from data */
var readRecord = (entityKey, fieldKey) => {
if (currentOperation === 'read') {
updateDependencies(entityKey, fieldKey);
}
return getNode(currentData.records, entityKey, fieldKey);
};
/** Reads an entity's link from data */
var readLink = (entityKey, fieldKey) => {
if (currentOperation === 'read') {
updateDependencies(entityKey, fieldKey);
}
return getNode(currentData.links, entityKey, fieldKey);
};
var getEntitiesForType = typename => currentData.types.get(typename) || DEFAULT_EMPTY_SET;
var writeType = (typename, entityKey) => {
var existingTypes = currentData.types.get(typename);
if (!existingTypes) {
var typeSet = new Set();
typeSet.add(entityKey);
currentData.types.set(typename, typeSet);
} else {
existingTypes.add(entityKey);
}
};
var getConcreteTypes = typename => currentData.abstractToConcreteMap.get(typename) || DEFAULT_EMPTY_SET;
var isSeenConcreteType = typename => currentData.types.has(typename);
var writeConcreteType = (abstractType, concreteType) => {
var existingTypes = currentData.abstractToConcreteMap.get(abstractType);
if (!existingTypes) {
var typeSet = new Set();
typeSet.add(concreteType);
currentData.abstractToConcreteMap.set(abstractType, typeSet);
} else {
existingTypes.add(concreteType);
}
};
/** Writes an entity's field (a "record") to data */
var writeRecord = (entityKey, fieldKey, value) => {
var existing = getNode(currentData.records, entityKey, fieldKey);
if (!isEqualLinkOrScalar(existing, value)) {
updateDependencies(entityKey, fieldKey);
updatePersist(entityKey, fieldKey);
}
setNode(currentData.records, entityKey, fieldKey, value);
};
var hasField = (entityKey, fieldKey) => readRecord(entityKey, fieldKey) !== undefined || readLink(entityKey, fieldKey) !== undefined;
/** Writes an entity's link to data */
var writeLink = (entityKey, fieldKey, link) => {
// Retrieve the link NodeMap from either an optimistic or the base layer
var links = currentOptimisticKey ? currentData.links.optimistic.get(currentOptimisticKey) : currentData.links.base;
// Update the reference count for the link
if (!currentOptimisticKey) {
var entityLinks = links && links.get(entityKey);
updateRCForLink(entityLinks && entityLinks[fieldKey], -1);
updateRCForLink(link, 1);
}
var existing = getNode(currentData.links, entityKey, fieldKey);
if (!isEqualLinkOrScalar(existing, link)) {
updateDependencies(entityKey, fieldKey);
updatePersist(entityKey, fieldKey);
}
// Update the link
setNode(currentData.links, entityKey, fieldKey, link);
};
/** Reserves an optimistic layer and preorders it */
var reserveLayer = (data, layerKey, hasNext) => {
// Find the current index for the layer, and remove it from
// the order if it exists already
var index = data.optimisticOrder.indexOf(layerKey);
if (index > -1) data.optimisticOrder.splice(index, 1);
if (hasNext) {
data.deferredKeys.add(layerKey);
// If the layer has future results then we'll move it past any layer that's
// still empty, so currently pending operations will take precedence over it
for (index = index > -1 ? index : 0; index < data.optimisticOrder.length && !data.deferredKeys.has(data.optimisticOrder[index]) && (!data.dirtyKeys.has(data.optimisticOrder[index]) || !data.commutativeKeys.has(data.optimisticOrder[index])); index++);
} else {
data.deferredKeys.delete(layerKey);
// Protect optimistic layers from being turned into non-optimistic layers
// while preserving optimistic data
if (index > -1 && !data.commutativeKeys.has(layerKey)) clearLayer(data, layerKey);
index = 0;
}
// Register the layer with the deferred or "top" index and
// mark it as commutative
data.optimisticOrder.splice(index, 0, layerKey);
data.commutativeKeys.add(layerKey);
};
/** Checks whether a given layer exists */
var hasLayer = (data, layerKey) => data.commutativeKeys.has(layerKey) || data.optimisticOrder.indexOf(layerKey) > -1;
/** Creates an optimistic layer of links and records */
var createLayer = (data, layerKey) => {
if (data.optimisticOrder.indexOf(layerKey) === -1) {
data.optimisticOrder.unshift(layerKey);
}
if (!data.dirtyKeys.has(layerKey)) {
data.dirtyKeys.add(layerKey);
data.links.optimistic.set(layerKey, new Map());
data.records.optimistic.set(layerKey, new Map());
}
};
/** Clears all links and records of an optimistic layer */
var clearLayer = (data, layerKey) => {
if (data.dirtyKeys.has(layerKey)) {
data.dirtyKeys.delete(layerKey);
data.records.optimistic.delete(layerKey);
data.links.optimistic.delete(layerKey);
data.deferredKeys.delete(layerKey);
}
};
/** Deletes links and records of an optimistic layer, and the layer itself */
var deleteLayer = (data, layerKey) => {
var index = data.optimisticOrder.indexOf(layerKey);
if (index > -1) {
data.optimisticOrder.splice(index, 1);
data.commutativeKeys.delete(layerKey);
}
clearLayer(data, layerKey);
};
/** Merges an optimistic layer of links and records into the base data */
var squashLayer = layerKey => {
// Hide current dependencies from squashing operations
var previousDependencies = currentDependencies;
currentDependencies = new Set();
currentOperation = 'write';
var links = currentData.links.optimistic.get(layerKey);
if (links) {
for (var entry of links.entries()) {
var entityKey = entry[0];
var keyMap = entry[1];
for (var fieldKey in keyMap) {
writeLink(entityKey, fieldKey, keyMap[fieldKey]);
}
}
}
var records = currentData.records.optimistic.get(layerKey);
if (records) {
for (var _entry of records.entries()) {
var _entityKey = _entry[0];
var _keyMap = _entry[1];
for (var _fieldKey in _keyMap) {
writeRecord(_entityKey, _fieldKey, _keyMap[_fieldKey]);
}
}
}
currentDependencies = previousDependencies;
deleteLayer(currentData, layerKey);
};
/** Return an array of FieldInfo (info on all the fields and their arguments) for a given entity */
var inspectFields = entityKey => {
var {
links,
records
} = currentData;
var fieldInfos = [];
var seenFieldKeys = new Set();
// Update dependencies
updateDependencies(entityKey);
// Extract FieldInfos to the fieldInfos array for links and records
// This also deduplicates by keeping track of fieldKeys in the seenFieldKeys Set
extractNodeMapFields(fieldInfos, seenFieldKeys, entityKey, links);
extractNodeMapFields(fieldInfos, seenFieldKeys, entityKey, records);
return fieldInfos;
};
var persistData = () => {
if (currentData.storage) {
currentOptimistic = true;
currentOperation = 'read';
var entries = {};
for (var key of currentData.persist.keys()) {
var {
entityKey,
fieldKey
} = deserializeKeyInfo(key);
var x = void 0;
if ((x = readLink(entityKey, fieldKey)) !== undefined) {
entries[key] = `:${core.stringifyVariables(x)}`;
} else if ((x = readRecord(entityKey, fieldKey)) !== undefined) {
entries[key] = core.stringifyVariables(x);
} else {
entries[key] = undefined;
}
}
currentOptimistic = false;
currentData.storage.writeData(entries);
currentData.persist.clear();
}
};
var hydrateData = (data, storage, entries) => {
initDataState('write', data, null);
for (var key in entries) {
var value = entries[key];
if (value !== undefined) {
var {
entityKey,
fieldKey
} = deserializeKeyInfo(key);
if (value[0] === ':') {
if (readLink(entityKey, fieldKey) === undefined) writeLink(entityKey, fieldKey, JSON.parse(value.slice(1)));
} else {
if (readRecord(entityKey, fieldKey) === undefined) writeRecord(entityKey, fieldKey, JSON.parse(value));
}
}
}
data.storage = storage;
data.hydrating = false;
clearDataState();
};
function isEqualLinkOrScalar(a, b) {
if (typeof a !== typeof b) return false;
if (a !== b) return false;
if (Array.isArray(a) && Array.isArray(b)) {
if (a.length !== b.length) return false;
return !a.some((el, index) => el !== b[index]);
}
return true;
}
var contextRef = null;
var deferRef = false;
var optionalRef = undefined;
// Checks whether the current data field is a cache miss because of a GraphQLError
var getFieldError = ctx => ctx.__internal.path.length > 0 && ctx.__internal.errorMap ? ctx.__internal.errorMap[ctx.__internal.path.join('.')] : undefined;
var makeContext = (store, variables, fragments, typename, entityKey, error) => {
var ctx = {
store,
variables,
fragments,
parent: {
__typename: typename
},
parentTypeName: typename,
parentKey: entityKey,
parentFieldKey: '',
fieldName: '',
error: undefined,
partial: false,
hasNext: false,
optimistic: currentOptimistic,
__internal: {
path: [],
errorMap: undefined
}
};
if (error && error.graphQLErrors) {
for (var i = 0; i < error.graphQLErrors.length; i++) {
var graphQLError = error.graphQLErrors[i];
if (graphQLError.path && graphQLError.path.length) {
if (!ctx.__internal.errorMap) ctx.__internal.errorMap = Object.create(null);
ctx.__internal.errorMap[graphQLError.path.join('.')] = graphQLError;
}
}
}
return ctx;
};
var updateContext = (ctx, data, typename, entityKey, fieldKey, fieldName) => {
contextRef = ctx;
ctx.parent = data;
ctx.parentTypeName = typename;
ctx.parentKey = entityKey;
ctx.parentFieldKey = fieldKey;
ctx.fieldName = fieldName;
ctx.error = getFieldError(ctx);
};
var isFragmentHeuristicallyMatching = (node, typename, entityKey, vars, logger) => {
if (!typename) return false;
var typeCondition = getTypeCondition(node);
if (!typeCondition || typename === typeCondition) return true;
process.env.NODE_ENV !== 'production' ? warn('Heuristic Fragment Matching: A fragment is trying to match against the `' + typename + '` type, ' + 'but the type condition is `' + typeCondition + '`. Since GraphQL allows for interfaces `' + typeCondition + '` may be an ' + 'interface.\nA schema needs to be defined for this match to be deterministic, ' + 'otherwise the fragment will be matched heuristically!', 16, logger) : void 0;
return !getSelectionSet(node).some(node => {
if (node.kind !== graphql_web.Kind.FIELD) return false;
var fieldKey = keyOfField(getName(node), getFieldArguments(node, vars));
return !hasField(entityKey, fieldKey);
});
};
class SelectionIterator {
// NOTE: Outside of this file, we expect `_defer` to always be reset to `false`
// NOTE: Inside this file we expect the state to be recursively passed on
constructor(typename, entityKey, _defer, _optional, selectionSet, ctx) {
this.typename = typename;
this.entityKey = entityKey;
this.ctx = ctx;
this.stack = [{
selectionSet,
index: 0,
defer: _defer,
optional: _optional
}];
}
next() {
while (this.stack.length > 0) {
var state = this.stack[this.stack.length - 1];
while (state.index < state.selectionSet.length) {
var select = state.selectionSet[state.index++];
if (!shouldInclude(select, this.ctx.variables)) ; else if (select.kind !== graphql_web.Kind.FIELD) {
// A fragment is either referred to by FragmentSpread or inline
var fragment = select.kind !== graphql_web.Kind.INLINE_FRAGMENT ? this.ctx.fragments[getName(select)] : select;
if (fragment) {
var isMatching = !fragment.typeCondition || (this.ctx.store.schema ? isInterfaceOfType(this.ctx.store.schema, fragment, this.typename) : currentOperation === 'read' && isFragmentMatching(fragment.typeCondition.name.value, this.typename) || isFragmentHeuristicallyMatching(fragment, this.typename, this.entityKey, this.ctx.variables, this.ctx.store.logger));
if (isMatching || currentOperation === 'write' && !this.ctx.store.schema) {
if (process.env.NODE_ENV !== 'production') pushDebugNode(this.typename, fragment);
var isFragmentOptional = isOptional(select);
if (isMatching && fragment.typeCondition && this.typename !== fragment.typeCondition.name.value) {
writeConcreteType(fragment.typeCondition.name.value, this.typename);
}
this.stack.push(state = {
selectionSet: getSelectionSet(fragment),
index: 0,
defer: state.defer || isDeferred(select, this.ctx.variables),
optional: isFragmentOptional !== undefined ? isFragmentOptional : state.optional
});
}
}
} else if (currentOperation === 'write' || !select._generated) {
deferRef = state.defer;
optionalRef = state.optional;
return select;
}
}
this.stack.pop();
if (process.env.NODE_ENV !== 'production') popDebugNode();
}
return undefined;
}
}
var isFragmentMatching = (typeCondition, typename) => {
if (!typename) return false;
if (typeCondition === typename) return true;
var isProbableAbstractType = !isSeenConcreteType(typeCondition);
if (!isProbableAbstractType) return false;
var types = getConcreteTypes(typeCondition);
return types.size && types.has(typename);
};
var ensureData = x => x == null ? null : x;
var ensureLink = (store, ref) => {
if (!ref) {
return ref || null;
} else if (Array.isArray(ref)) {
var _link = new Array(ref.length);
for (var i = 0, l = _link.length; i < l; i++) _link[i] = ensureLink(store, ref[i]);
return _link;
}
var link = store.keyOfEntity(ref);
if (process.env.NODE_ENV !== 'production') {
if (!link && ref && typeof ref === 'object') {
warn("Can't generate a key for link(...) item." + '\nYou have to pass an `id` or `_id` field or create a custom `keys` config for `' + ref.__typename + '`.', 12, store.logger);
}
}
return link;
};
/** Reads a GraphQL query from the cache.
* @internal
*/
var _query = (store, request, input, error) => {
var query = core.formatDocument(request.query);
var operation = getMainOperation(query);
var rootKey = store.rootFields[operation.operation];
var rootSelect = getSelectionSet(operation);
var ctx = makeContext(store, normalizeVariables(operation, request.variables), getFragments(query), rootKey, rootKey, error);
if (process.env.NODE_ENV !== 'production') {
pushDebugNode(rootKey, operation);
}
// NOTE: This may reuse "previous result data" as indicated by the
// `originalData` argument in readRoot(). This behaviour isn't used
// for readSelection() however, which always produces results from
// scratch
var data = rootKey !== ctx.store.rootFields['query'] ? readRoot(ctx, rootKey, rootSelect, input || makeData()) : readSelection(ctx, rootKey, rootSelect, input || makeData());
if (process.env.NODE_ENV !== 'production') {
popDebugNode();
getCurrentDependencies();
}
return {
dependencies: currentDependencies,
partial: ctx.partial || !data,
hasNext: ctx.hasNext,
data: data || null
};
};
var readRoot = (ctx, entityKey, select, input) => {
var typename = ctx.store.rootNames[entityKey] ? entityKey : input.__typename;
if (typeof typename !== 'string') {
return input;
}
var selection = new SelectionIterator(entityKey, entityKey, false, undefined, select, ctx);
var node;
var hasChanged = currentForeignData;
var output = makeData(input);
while (node = selection.next()) {
var fieldAlias = getFieldAlias(node);
var fieldValue = input[fieldAlias];
// Add the current alias to the walked path before processing the field's value
ctx.__internal.path.push(fieldAlias);
// We temporarily store the data field in here, but undefined
// means that the value is missing from the cache
var dataFieldValue = void 0;
if (node.selectionSet && fieldValue !== null) {
dataFieldValue = readRootField(ctx, getSelectionSet(node), ensureData(fieldValue));
} else {
dataFieldValue = fieldValue;
}
// Check for any referential changes in the field's value
hasChanged = hasChanged || dataFieldValue !== fieldValue;
if (dataFieldValue !== undefined) output[fieldAlias] = dataFieldValue;
// After processing the field, remove the current alias from the path again
ctx.__internal.path.pop();
}
return hasChanged ? output : input;
};
var readRootField = (ctx, select, originalData) => {
if (Array.isArray(originalData)) {
var newData = new Array(originalData.length);
var hasChanged = currentForeignData;
for (var i = 0, l = originalData.length; i < l; i++) {
// Add the current index to the walked path before reading the field's value
ctx.__internal.path.push(i);
// Recursively read the root field's value
newData[i] = readRootField(ctx, select, originalData[i]);
hasChanged = hasChanged || newData[i] !== originalData[i];
// After processing the field, remove the current index from the path
ctx.__internal.path.pop();
}
return hasChanged ? newData : originalData;
} else if (originalData === null) {
return null;
}
// Write entity to key that falls back to the given parentFieldKey
var entityKey = ctx.store.keyOfEntity(originalData);
if (entityKey !== null) {
// We assume that since this is used for result data this can never be undefined,
// since the result data has already been written to the cache
return readSelection(ctx, entityKey, select, originalData) || null;
} else {
return readRoot(ctx, originalData.__typename, select, originalData);
}
};
var _queryFragment = (store, query, entity, variables, fragmentName) => {
var fragments = getFragments(query);
var fragment;
if (fragmentName) {
fragment = fragments[fragmentName];
if (!fragment) {
process.env.NODE_ENV !== 'production' ? warn('readFragment(...) was called with a fragment name that does not exist.\n' + 'You provided ' + fragmentName + ' but could only find ' + Object.keys(fragments).join(', ') + '.', 6, store.logger) : void 0;
return null;
}
} else {
var names = Object.keys(fragments);
fragment = fragments[names[0]];
if (!fragment) {
process.env.NODE_ENV !== 'production' ? warn('readFragment(...) was called with an empty fragment.\n' + 'You have to call it with at least one fragment in your GraphQL document.', 6, store.logger) : void 0;
return null;
}
}
var typename = getFragmentTypeName(fragment);
if (typeof entity !== 'string' && !entity.__typename) entity.__typename = typename;
var entityKey = store.keyOfEntity(entity);
if (!entityKey) {
process.env.NODE_ENV !== 'production' ? warn("Can't generate a key for readFragment(...).\n" + 'You have to pass an `id` or `_id` field or create a custom `keys` config for `' + typename + '`.', 7, store.logger) : void 0;
return null;
}
if (process.env.NODE_ENV !== 'production') {
pushDebugNode(typename, fragment);
}
var ctx = makeContext(store, variables || {}, fragments, typename, entityKey, undefined);
var result = readSelection(ctx, entityKey, getSelectionSet(fragment), makeData()) || null;
if (process.env.NODE_ENV !== 'production') {
popDebugNode();
}
return result;
};
function getFieldResolver(directives, typename, fieldName, ctx) {
var resolvers = ctx.store.resolvers[typename];
var fieldResolver = resolvers && resolvers[fieldName];
var directiveResolver;
for (var name in directives) {
var directiveNode = directives[name];
if (directiveNode && name !== 'include' && name !== 'skip' && ctx.store.directives[name]) {
directiveResolver = ctx.store.directives[name](getFieldArguments(directiveNode, ctx.variables));
if (process.env.NODE_ENV === 'production') return directiveResolver;
break;
}
}
if (process.env.NODE_ENV !== 'production') {
if (fieldResolver && directiveResolver) {
warn(`A resolver and directive is being used at "${typename}.${fieldName}" simultaneously. Only the directive will apply.`, 28, ctx.store.logger);
}
}
return directiveResolver || fieldResolver;
}
var readSelection = (ctx, key, select, input, result) => {
var {
store
} = ctx;
var isQuery = key === store.rootFields.query;
var entityKey = result && store.keyOfEntity(result) || key;
if (process.env.NODE_ENV !== 'production') {
if (!isQuery && !!ctx.store.rootNames[entityKey]) {
warn('Invalid root traversal: A selection was being read on `' + entityKey + '` which is an uncached root type.\n' + 'The `' + ctx.store.rootFields.mutation + '` and `' + ctx.store.rootFields.subscription + '` types are special ' + 'Operation Root Types and cannot be read back from the cache.', 25, store.logger);
}
}
var typename = !isQuery ? readRecord(entityKey, '__typename') || result && result.__typename : key;
if (typeof typename !== 'string') {
return;
} else if (result && typename !== result.__typename) {
process.env.NODE_ENV !== 'production' ? warn('Invalid resolver data: The resolver at `' + entityKey + '` returned an ' + 'invalid typename that could not be reconciled with the cache.', 8, store.logger) : void 0;
return;
}
var selection = new SelectionIterator(typename, entityKey, false, undefined, select, ctx);
var hasFields = false;
var hasNext = false;
var hasChanged = currentForeignData;
var node;
var hasPartials = ctx.partial;
var output = makeData(input);
while ((node = selection.next()) !== undefined) {
// Derive the needed data from our node.
var fieldName = getName(node);
var fieldArgs = getFieldArguments(node, ctx.variables);
var fieldAlias = getFieldAlias(node);
var directives = getDirectives(node);
var resolver = getFieldResolver(directives, typename, fieldName, ctx);
var fieldKey = keyOfField(fieldName, fieldArgs);
var _key = joinKeys(entityKey, fieldKey);
var fieldValue = readRecord(entityKey, fieldKey);
var resultValue = result ? result[fieldN