@data-client/normalizr
Version:
Normalizes and denormalizes JSON according to schema for Redux and Flux applications
819 lines (771 loc) • 27.3 kB
JavaScript
'use strict';
const INVALID = Symbol('INVALID');
/**
* Helpers to enable Immutable compatibility *without* bringing in
* the 'immutable' package as a dependency.
*/
/**
* Check if an object is immutable by checking if it has a key specific
* to the immutable library.
*
* @param {any} object
* @return {bool}
*/
function isImmutable(object) {
return !!(typeof object.hasOwnProperty === 'function' && (Object.hasOwnProperty.call(object, '__ownerID') ||
// Immutable.Map
object._map && Object.hasOwnProperty.call(object._map, '__ownerID'))); // Immutable.Record
}
/**
* Denormalize an immutable entity.
*
* @param {Schema} schema
* @param {Immutable.Map|Immutable.Record} input
* @param {function} unvisit
* @param {function} getDenormalizedEntity
* @return {Immutable.Map|Immutable.Record}
*/
function denormalizeImmutable(schema, input, args, unvisit) {
let deleted = false;
const obj = Object.keys(schema).reduce((object, key) => {
// Immutable maps cast keys to strings on write so we need to ensure
// we're accessing them using string keys.
const stringKey = `${key}`;
const item = unvisit(schema[stringKey], object.get(stringKey));
if (typeof item === 'symbol') {
deleted = true;
}
if (object.has(stringKey)) {
return object.set(stringKey, item);
} else {
return object;
}
}, input);
return deleted ? INVALID : obj;
}
function getEntities(state) {
const entityIsImmutable = isImmutable(state);
if (entityIsImmutable) {
return ({
key,
pk
}) => state.getIn([key, pk]);
} else {
return ({
key,
pk
}) => {
var _state$key;
return (_state$key = state[key]) == null ? void 0 : _state$key[pk];
};
}
}
class LocalCache {
localCache = new Map();
getEntity(pk, schema, entity, computeValue) {
const key = schema.key;
if (!this.localCache.has(key)) {
this.localCache.set(key, new Map());
}
const localCacheKey = this.localCache.get(key);
if (!localCacheKey.get(pk)) {
computeValue(localCacheKey);
}
return localCacheKey.get(pk);
}
getResults(input, cachable, computeValue) {
return {
data: computeValue(),
paths: []
};
}
}
const UNDEF = {};
function isEntity(schema) {
return schema !== null && schema.pk !== undefined;
}
const validateSchema = definition => {
/* istanbul ignore else */
if (process.env.NODE_ENV !== 'production') {
const isArray = Array.isArray(definition);
if (isArray && definition.length > 1) {
throw new Error(`Expected schema definition to be a single schema, but found ${definition.length}.`);
}
}
return definition[0];
};
const getValues = input => Array.isArray(input) ? input : Object.keys(input).map(key => input[key]);
const filterEmpty = item => item !== undefined && typeof item !== 'symbol';
const normalize$2 = (schema, input, parent, key, args, visit, addEntity, getEntity, checkLoop) => {
schema = validateSchema(schema);
const values = getValues(input);
// Special case: Arrays pass *their* parent on to their children, since there
// is not any special information that can be gathered from themselves directly
return values.map((value, index) => visit(schema, value, parent, key, args));
};
const denormalize$2 = (schema, input, args, unvisit) => {
schema = validateSchema(schema);
return input.map ? input.map(entityOrId => unvisit(schema, entityOrId)).filter(filterEmpty) : input;
};
function queryKey$1(schema, args, queryKey, getEntity, getIndex) {
return undefined;
}
const normalize$1 = (schema, input, parent, key, args, visit, addEntity, getEntity, checkLoop) => {
const object = {
...input
};
Object.keys(schema).forEach(key => {
const localSchema = schema[key];
const value = visit(localSchema, input[key], input, key, args);
if (value === undefined) {
delete object[key];
} else {
object[key] = value;
}
});
return object;
};
const denormalize$1 = (schema, input, args, unvisit) => {
if (isImmutable(input)) {
return denormalizeImmutable(schema, input, args, unvisit);
}
const object = {
...input
};
let deleted = false;
Object.keys(schema).forEach(key => {
const item = unvisit(schema[key], object[key]);
if (object[key] !== undefined) {
object[key] = item;
}
if (typeof item === 'symbol') {
deleted = true;
}
});
return deleted ? INVALID : object;
};
function queryKey(schema, args, queryKey, getEntity, getIndex) {
const resultObject = {};
for (const k of Object.keys(schema)) {
resultObject[k] = queryKey(schema[k], args, getEntity, getIndex);
}
return resultObject;
}
function unvisitEntity(schema, entityOrId, args, unvisit, getEntity, cache) {
const entity = typeof entityOrId === 'object' ? entityOrId : getEntity({
key: schema.key,
pk: entityOrId
});
if (typeof entity === 'symbol' && typeof schema.denormalize === 'function') {
return schema.denormalize(entity, args, unvisit);
}
if (entity === undefined && typeof entityOrId !== 'object' && entityOrId !== '' && entityOrId !== 'undefined') {
// we cannot perform lookups with `undefined`, so we use a special object to represent undefined
// we're actually using this call to ensure we update the cache if a nested schema changes from `undefined`
// this is because cache.getEntity adds this key,pk as a dependency of anything it is nested under
return cache.getEntity(entityOrId, schema, UNDEF, localCacheKey => {
localCacheKey.set(entityOrId, undefined);
});
}
if (typeof entity !== 'object' || entity === null) {
return entity;
}
let pk = typeof entityOrId !== 'object' ? entityOrId : schema.pk(isImmutable(entity) ? entity.toJS() : entity, undefined, undefined, args);
// if we can't generate a working pk we cannot do cache lookups properly,
// so simply denormalize without caching
if (pk === undefined || pk === '' || pk === 'undefined') {
return noCacheGetEntity(localCacheKey => unvisitEntityObject(entity, schema, unvisit, '', localCacheKey, args));
}
// just an optimization to make all cache usages of pk monomorphic
if (typeof pk !== 'string') pk = `${pk}`;
// last function computes if it is not in any caches
return cache.getEntity(pk, schema, entity, localCacheKey => unvisitEntityObject(entity, schema, unvisit, pk, localCacheKey, args));
}
function noCacheGetEntity(computeValue) {
const localCacheKey = new Map();
computeValue(localCacheKey);
return localCacheKey.get('');
}
function unvisitEntityObject(entity, schema, unvisit, pk, localCacheKey, args) {
const entityCopy = isImmutable(entity) ? schema.createIfValid(entity.toObject()) : schema.createIfValid(entity);
localCacheKey.set(pk, entityCopy);
if (entityCopy === undefined) {
// undefined indicates we should suspense (perhaps failed validation)
localCacheKey.set(pk, INVALID);
} else {
if (typeof schema.denormalize === 'function') {
localCacheKey.set(pk, schema.denormalize(entityCopy, args, unvisit));
}
}
}
const getUnvisit = (getEntity, cache, args) => {
function unvisit(schema, input) {
if (!schema) return input;
if (input === null || input === undefined) {
return input;
}
if (typeof schema.denormalize !== 'function') {
// deserialize fields (like Temporal.Instant)
if (typeof schema === 'function') {
return schema(input);
}
// shorthand for object, array
if (typeof schema === 'object') {
const method = Array.isArray(schema) ? denormalize$2 : denormalize$1;
return method(schema, input, args, unvisit);
}
} else {
if (isEntity(schema)) {
return unvisitEntity(schema, input, args, unvisit, getEntity, cache);
}
return schema.denormalize(input, args, unvisit);
}
return input;
}
return (schema, input) => {
// in the case where WeakMap cannot be used
// this test ensures null is properly excluded from WeakMap
const cachable = Object(input) === input && Object(schema) === schema;
return cache.getResults(input, cachable, () => unvisit(schema, input));
};
};
function denormalize(schema, input, entities, args = []) {
// undefined means don't do anything
if (schema === undefined || input === undefined) {
return input;
}
return getUnvisit(getEntities(entities), new LocalCache(), args)(schema, input).data;
}
/** Maps a (ordered) list of dependencies to a value.
*
* Useful as a memoization cache for flat/normalized stores.
*
* All dependencies are only weakly referenced, allowing automatic garbage collection
* when any dependencies are no longer used.
*/
class WeakDependencyMap {
next = new WeakMap();
nextPath = undefined;
get(entity, getDependency) {
let curLink = this.next.get(entity);
if (!curLink) return EMPTY;
while (curLink.nextPath) {
var _getDependency;
// we cannot perform lookups with `undefined`, so we use a special object to represent undefined
const nextEntity = (_getDependency = getDependency(curLink.nextPath)) != null ? _getDependency : UNDEF;
curLink = curLink.next.get(nextEntity);
if (!curLink) return EMPTY;
}
// curLink exists, but has no path - so must have a value
return [curLink.value, curLink.journey];
}
set(dependencies, value) {
if (dependencies.length < 1) throw new KeySize();
let curLink = this;
for (const {
entity,
path
} of dependencies) {
let nextLink = curLink.next.get(entity);
if (!nextLink) {
nextLink = new Link();
// void members are represented as a symbol so we can lookup
curLink.next.set(entity != null ? entity : UNDEF, nextLink);
}
curLink.nextPath = path;
curLink = nextLink;
}
// in case there used to be more
curLink.nextPath = undefined;
curLink.value = value;
// we could recompute this on get, but it would have a cost and we optimize for `get`
curLink.journey = dependencies.map(dep => dep.path);
}
}
const EMPTY = [undefined, undefined];
/** Link in a chain */
class Link {
next = new WeakMap();
value = undefined;
journey = [];
nextPath = undefined;
}
class KeySize extends Error {
message = 'Keys must include at least one member';
}
const addEntities = (newEntities, newIndexes, entitiesCopy, indexesCopy, entityMetaCopy, actionMeta) => (schema, processedEntity, id) => {
const schemaKey = schema.key;
// first time we come across this type of entity
if (!newEntities.has(schemaKey)) {
newEntities.set(schemaKey, new Map());
// we will be editing these, so we need to clone them first
entitiesCopy[schemaKey] = {
...entitiesCopy[schemaKey]
};
entityMetaCopy[schemaKey] = {
...entityMetaCopy[schemaKey]
};
}
const newEntitiesKey = newEntities.get(schemaKey);
const existingEntity = newEntitiesKey.get(id);
if (existingEntity) {
newEntitiesKey.set(id, schema.merge(existingEntity, processedEntity));
} else {
const inStoreEntity = entitiesCopy[schemaKey][id];
let inStoreMeta;
// this case we already have this entity in store
if (inStoreEntity && (inStoreMeta = entityMetaCopy[schemaKey][id])) {
newEntitiesKey.set(id, schema.mergeWithStore(inStoreMeta, actionMeta, inStoreEntity, processedEntity));
entityMetaCopy[schemaKey][id] = schema.mergeMetaWithStore(inStoreMeta, actionMeta, inStoreEntity, processedEntity);
} else {
newEntitiesKey.set(id, processedEntity);
entityMetaCopy[schemaKey][id] = actionMeta;
}
}
// update index
if (schema.indexes) {
if (!newIndexes.has(schemaKey)) {
newIndexes.set(schemaKey, new Map());
indexesCopy[schemaKey] = {
...indexesCopy[schemaKey]
};
}
handleIndexes(id, schema.indexes, newIndexes.get(schemaKey), indexesCopy[schemaKey], newEntitiesKey.get(id), entitiesCopy[schemaKey]);
}
// set this after index updates so we know what indexes to remove from
entitiesCopy[schemaKey][id] = newEntitiesKey.get(id);
};
function handleIndexes(id, schemaIndexes, indexes, storeIndexes, entity, storeEntities) {
for (const index of schemaIndexes) {
if (!indexes.has(index)) {
indexes.set(index, storeIndexes[index] = {});
}
const indexMap = indexes.get(index);
if (storeEntities[id]) {
delete indexMap[storeEntities[id][index]];
}
// entity already in cache but the index changed
if (storeEntities && storeEntities[id] && storeEntities[id][index] !== entity[index]) {
indexMap[storeEntities[id][index]] = INVALID;
}
if (index in entity) {
indexMap[entity[index]] = id;
} /* istanbul ignore next */else if (process.env.NODE_ENV !== 'production') {
console.warn(`Index not found in entity. Indexes must be top-level members of your entity.
Index: ${index}
Entity: ${JSON.stringify(entity, undefined, 2)}`);
}
}
}
function getCheckLoop() {
const visitedEntities = new Map();
/* Returns true if a circular reference is found */
return function checkLoop(entityKey, pk, input) {
if (!visitedEntities.has(entityKey)) {
visitedEntities.set(entityKey, new Map());
}
// we have to tell typescript this can't be undefined (due to line above)
const entitiesOneType = visitedEntities.get(entityKey);
if (!entitiesOneType.has(pk)) {
entitiesOneType.set(pk, []);
}
const visitedEntityList = entitiesOneType.get(pk);
if (visitedEntityList.some(entity => entity === input)) {
return true;
}
visitedEntityList.push(input);
return false;
};
}
const getVisit = (addEntity, getEntity) => {
const checkLoop = getCheckLoop();
const visit = (schema, value, parent, key, args) => {
if (!value || !schema) {
return value;
}
if (schema.normalize && typeof schema.normalize === 'function') {
if (typeof value !== 'object') {
if (schema.pk) return `${value}`;
return value;
}
return schema.normalize(value, parent, key, args, visit, addEntity, getEntity, checkLoop);
}
if (typeof value !== 'object' || typeof schema !== 'object') return value;
const method = Array.isArray(schema) ? normalize$2 : normalize$1;
return method(schema, value, parent, key, args, visit);
};
return visit;
};
class GlobalCache {
dependencies = [];
cycleCache = new Map();
cycleIndex = -1;
localCache = new Map();
constructor(getEntity, entityCache, resultCache) {
this._getEntity = getEntity;
this.getCache = getEntityCaches(entityCache);
this.resultCache = resultCache;
}
getEntity(pk, schema, entity, computeValue) {
const key = schema.key;
const {
localCacheKey,
cycleCacheKey
} = this.getCacheKey(key);
if (!localCacheKey.get(pk)) {
const globalCache = this.getCache(pk, schema);
const [cacheValue, cachePath] = globalCache.get(entity, this._getEntity);
// TODO: what if this just returned the deps - then we don't need to store them
if (cachePath) {
localCacheKey.set(pk, cacheValue.value);
// TODO: can we store the cache values instead of tracking *all* their sources?
// this is only used for setting endpoints cache correctly. if we got this far we will def need to set as we would have already tried getting it
this.dependencies.push(...cacheValue.dependencies);
return cacheValue.value;
}
// if we don't find in denormalize cache then do full denormalize
else {
const trackingIndex = this.dependencies.length;
cycleCacheKey.set(pk, trackingIndex);
this.dependencies.push({
entity,
path: {
key,
pk
}
});
/** NON-GLOBAL_CACHE CODE */
computeValue(localCacheKey);
/** /END NON-GLOBAL_CACHE CODE */
cycleCacheKey.delete(pk);
// if in cycle, use the start of the cycle to track all deps
// otherwise, we use our own trackingIndex
const localKey = this.dependencies.slice(this.cycleIndex === -1 ? trackingIndex : this.cycleIndex);
const cacheValue = {
dependencies: localKey,
value: localCacheKey.get(pk)
};
globalCache.set(localKey, cacheValue);
// start of cycle - reset cycle detection
if (this.cycleIndex === trackingIndex) {
this.cycleIndex = -1;
}
}
} else {
// cycle detected
if (cycleCacheKey.has(pk)) {
this.cycleIndex = cycleCacheKey.get(pk);
} else {
// with no cycle, globalCacheEntry will have already been set
this.dependencies.push({
entity,
path: {
key,
pk
}
});
}
}
return localCacheKey.get(pk);
}
getCacheKey(key) {
if (!this.localCache.has(key)) {
this.localCache.set(key, new Map());
}
if (!this.cycleCache.has(key)) {
this.cycleCache.set(key, new Map());
}
const localCacheKey = this.localCache.get(key);
const cycleCacheKey = this.cycleCache.get(key);
return {
localCacheKey,
cycleCacheKey
};
}
/** Cache varies based on input (=== aka reference) */
getResults(input, cachable, computeValue) {
if (!cachable) {
return {
data: computeValue(),
paths: this.paths()
};
}
let [data, paths] = this.resultCache.get(input, this._getEntity);
if (paths === undefined) {
data = computeValue();
// we want to do this before we add our 'input' entry
paths = this.paths();
// for the first entry, `path` is ignored so empty members is fine
this.dependencies.unshift({
entity: input,
path: {
key: '',
pk: ''
}
});
this.resultCache.set(this.dependencies, data);
} else {
paths.shift();
}
return {
data,
paths
};
}
paths() {
return this.dependencies.map(dep => dep.path);
}
}
const getEntityCaches = entityCache => {
return (pk, schema) => {
var _ref;
const key = schema.key;
// collections should use the entities they collect over
// TODO: this should be based on a public interface
const entityInstance = (_ref = schema.cacheWith) != null ? _ref : schema;
if (!entityCache.has(key)) {
entityCache.set(key, new Map());
}
const entityCacheKey = entityCache.get(key);
if (!entityCacheKey.get(pk)) entityCacheKey.set(pk, new WeakMap());
const entityCachePk = entityCacheKey.get(pk);
let wem = entityCachePk.get(entityInstance);
if (!wem) {
wem = new WeakDependencyMap();
entityCachePk.set(entityInstance, wem);
}
return wem;
};
};
/**
* Build the result parameter to denormalize from schema alone.
* Tries to compute the entity ids from params.
*/
function buildQueryKey(schema, args, getEntity, getIndex) {
// schema classes
if (canQuery(schema)) {
return schema.queryKey(args, buildQueryKey, getEntity, getIndex);
}
// plain case
if (typeof schema === 'object' && schema) {
const method = Array.isArray(schema) ? queryKey$1 : queryKey;
return method(schema, args, buildQueryKey, getEntity, getIndex);
}
// fallback for things like null or undefined
return schema;
}
function canQuery(schema) {
return !!schema && typeof schema.queryKey === 'function';
}
// this only works if entity does a lookup first to see if its entity is 'found'
function validateQueryKey(queryKey) {
if (queryKey === undefined) return false;
if (queryKey && typeof queryKey === 'object' && !Array.isArray(queryKey)) {
return Object.values(queryKey).every(validateQueryKey);
}
return true;
}
//TODO: make immutable distinction occur when initilizing MemoCache
/** Singleton to store the memoization cache for denormalization methods */
class MemoCache {
/** Cache for every entity based on its dependencies and its own input */
entities = new Map();
/** Caches the final denormalized form based on input, entities */
endpoints = new WeakDependencyMap();
/** Caches the queryKey based on schema, args, and any used entities or indexes */
queryKeys = new Map();
/** Compute denormalized form maintaining referential equality for same inputs */
denormalize(schema, input, entities, args = []) {
// we already vary based on input, so we don't need endpointKey? TODO: verify
// if (!this.endpoints[endpointKey])
// this.endpoints[endpointKey] = new WeakDependencyMap<EntityPath>();
// undefined means don't do anything
if (schema === undefined) {
return {
data: input,
paths: []
};
}
if (input === undefined) {
return {
data: undefined,
paths: []
};
}
const getEntity = getEntities(entities);
return getUnvisit(getEntity, new GlobalCache(getEntity, this.entities, this.endpoints), args)(schema, input);
}
/** Compute denormalized form maintaining referential equality for same inputs */
query(schema, args, entities, indexes,
// NOTE: different orders can result in cache busting here; but since it's just a perf penalty we will allow for now
argsKey = JSON.stringify(args)) {
const input = this.buildQueryKey(schema, args, entities, indexes, argsKey);
if (!input) {
return;
}
const {
data
} = this.denormalize(schema, input, entities, args);
return typeof data === 'symbol' ? undefined : data;
}
buildQueryKey(schema, args, entities, indexes,
// NOTE: different orders can result in cache busting here; but since it's just a perf penalty we will allow for now
argsKey = JSON.stringify(args)) {
// This is redundant for buildQueryKey checks, but that was is used for recursion so we still need the checks there
// TODO: If we make each recursive call include cache lookups, we combine these checks together
// null is object so we need double check
if (typeof schema !== 'object' && typeof schema.queryKey !== 'function' || !schema) return schema;
// cache lookup: argsKey -> schema -> ...touched indexes or entities
if (!this.queryKeys.get(argsKey)) {
this.queryKeys.set(argsKey, new WeakDependencyMap());
}
const queryCache = this.queryKeys.get(argsKey);
const getEntity = createGetEntity(entities);
const getIndex = createGetIndex(indexes);
// eslint-disable-next-line prefer-const
let [value, paths] = queryCache.get(schema, createDepLookup(getEntity, getIndex));
// paths undefined is the only way to truly tell nothing was found (the value could have actually been undefined)
if (!paths) {
// first dep path is ignored
// we start with schema object, then lookup any 'touched' members and their paths
const dependencies = [{
path: [''],
entity: schema
}];
value = buildQueryKey(schema, args, trackLookup(getEntity, dependencies), trackLookup(getIndex, dependencies));
queryCache.set(dependencies, value);
}
return value;
}
}
function createDepLookup(getEntity, getIndex) {
return args => {
return args.length === 3 ? getIndex(...args) : getEntity(...args);
};
}
function trackLookup(lookup, dependencies) {
return (...args) => {
const entity = lookup(...args);
dependencies.push({
path: args,
entity
});
return entity;
};
}
function createGetEntity(entities) {
const entityIsImmutable = isImmutable(entities);
if (entityIsImmutable) {
return (...args) => {
var _entities$getIn;
return (_entities$getIn = entities.getIn(args)) == null || _entities$getIn.toJS == null ? void 0 : _entities$getIn.toJS();
};
} else {
return (entityKey, pk) => {
var _entities$entityKey;
return pk ? (_entities$entityKey = entities[entityKey]) == null ? void 0 : _entities$entityKey[pk] : entities[entityKey];
};
}
}
function createGetIndex(indexes) {
const entityIsImmutable = isImmutable(indexes);
if (entityIsImmutable) {
return (key, field, value) => {
var _indexes$getIn;
return (_indexes$getIn = indexes.getIn([key, field])) == null || _indexes$getIn.toJS == null ? void 0 : _indexes$getIn.toJS();
};
} else {
return (key, field, value) => {
if (indexes[key]) {
return indexes[key][field];
}
return {};
};
}
}
const normalize = (schema, input, args = [], {
entities,
indexes,
entityMeta
} = emptyStore, meta = {
fetchedAt: 0,
date: Date.now(),
expiresAt: Infinity
}) => {
// no schema means we don't process at all
if (schema === undefined || schema === null) return {
result: input,
entities,
indexes,
entityMeta
};
const schemaType = expectedSchemaType(schema);
if (input === null || typeof input !== schemaType &&
// we will allow a Delete schema to be a string or object
!(schema.key !== undefined && schema.pk === undefined && typeof input === 'string')) {
/* istanbul ignore else */
if (process.env.NODE_ENV !== 'production') {
const parseWorks = input => {
try {
return typeof JSON.parse(input) !== 'string';
} catch (e) {
return false;
}
};
if (typeof input === 'string' && parseWorks(input)) {
throw new Error(`Normalizing a string, but this does match schema.
Parsing this input string as JSON worked. This likely indicates fetch function did not parse
the JSON. By default, this only happens if "content-type" header includes "json".
See https://dataclient.io/rest/api/RestEndpoint#parseResponse for more information
Schema: ${JSON.stringify(schema, undefined, 2)}
Input: "${input}"`);
} else {
throw new Error(`Unexpected input given to normalize. Expected type to be "${schemaType}", found "${input === null ? 'null' : typeof input}".
Schema: ${JSON.stringify(schema, undefined, 2)}
Input: "${input}"`);
}
} else {
throw new Error(`Unexpected input given to normalize. Expected type to be "${schemaType}", found "${input === null ? 'null' : typeof input}".`);
}
}
const newEntities = new Map();
const newIndexes = new Map();
const ret = {
result: '',
entities: {
...entities
},
indexes: {
...indexes
},
entityMeta: {
...entityMeta
}
};
const addEntity = addEntities(newEntities, newIndexes, ret.entities, ret.indexes, ret.entityMeta, meta);
const visit = getVisit(addEntity, createGetEntity(entities));
ret.result = visit(schema, input, input, undefined, args);
return ret;
};
function expectedSchemaType(schema) {
return ['object', 'function'].includes(typeof schema) ? 'object' : typeof schema;
}
const emptyStore = {
entities: {},
indexes: {},
entityMeta: {}
};
var ExpiryStatus = {
Invalid: 1,
InvalidIfStale: 2,
Valid: 3
};
// looser version to allow for cross-package version compatibility
exports.ExpiryStatus = ExpiryStatus;
exports.INVALID = INVALID;
exports.MemoCache = MemoCache;
exports.WeakDependencyMap = WeakDependencyMap;
exports.denormalize = denormalize;
exports.isEntity = isEntity;
exports.normalize = normalize;
exports.validateQueryKey = validateQueryKey;