UNPKG

@reduxjs/toolkit

Version:

The official, opinionated, batteries-included toolset for efficient Redux development

1,108 lines 91 kB
var __defProp = Object.defineProperty; var __defProps = Object.defineProperties; var __getOwnPropDescs = Object.getOwnPropertyDescriptors; var __getOwnPropSymbols = Object.getOwnPropertySymbols; var __hasOwnProp = Object.prototype.hasOwnProperty; var __propIsEnum = Object.prototype.propertyIsEnumerable; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __spreadValues = (a, b) => { for (var prop in b || (b = {})) if (__hasOwnProp.call(b, prop)) __defNormalProp(a, prop, b[prop]); if (__getOwnPropSymbols) for (var prop of __getOwnPropSymbols(b)) { if (__propIsEnum.call(b, prop)) __defNormalProp(a, prop, b[prop]); } return a; }; var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); var __objRest = (source, exclude) => { var target = {}; for (var prop in source) if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0) target[prop] = source[prop]; if (source != null && __getOwnPropSymbols) for (var prop of __getOwnPropSymbols(source)) { if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop)) target[prop] = source[prop]; } return target; }; // src/query/core/apiState.ts var QueryStatus; (function (QueryStatus2) { QueryStatus2["uninitialized"] = "uninitialized"; QueryStatus2["pending"] = "pending"; QueryStatus2["fulfilled"] = "fulfilled"; QueryStatus2["rejected"] = "rejected"; })(QueryStatus || (QueryStatus = {})); function getRequestStatusFlags(status) { return { status, isUninitialized: status === QueryStatus.uninitialized, isLoading: status === QueryStatus.pending, isSuccess: status === QueryStatus.fulfilled, isError: status === QueryStatus.rejected }; } // src/query/utils/isAbsoluteUrl.ts function isAbsoluteUrl(url) { return new RegExp(`(^|:)//`).test(url); } // src/query/utils/joinUrls.ts var withoutTrailingSlash = (url) => url.replace(/\/$/, ""); var withoutLeadingSlash = (url) => url.replace(/^\//, ""); function joinUrls(base, url) { if (!base) { return url; } if (!url) { return base; } if (isAbsoluteUrl(url)) { return url; } const delimiter = base.endsWith("/") || !url.startsWith("?") ? "/" : ""; base = withoutTrailingSlash(base); url = withoutLeadingSlash(url); return `${base}${delimiter}${url}`; } // src/query/utils/flatten.ts var flatten = (arr) => [].concat(...arr); // src/query/utils/isOnline.ts function isOnline() { return typeof navigator === "undefined" ? true : navigator.onLine === void 0 ? true : navigator.onLine; } // src/query/utils/isDocumentVisible.ts function isDocumentVisible() { if (typeof document === "undefined") { return true; } return document.visibilityState !== "hidden"; } // src/query/utils/copyWithStructuralSharing.ts import { isPlainObject as _iPO } from "@reduxjs/toolkit"; var isPlainObject = _iPO; function copyWithStructuralSharing(oldObj, newObj) { if (oldObj === newObj || !(isPlainObject(oldObj) && isPlainObject(newObj) || Array.isArray(oldObj) && Array.isArray(newObj))) { return newObj; } const newKeys = Object.keys(newObj); const oldKeys = Object.keys(oldObj); let isSameObject = newKeys.length === oldKeys.length; const mergeObj = Array.isArray(newObj) ? [] : {}; for (const key of newKeys) { mergeObj[key] = copyWithStructuralSharing(oldObj[key], newObj[key]); if (isSameObject) isSameObject = oldObj[key] === mergeObj[key]; } return isSameObject ? oldObj : mergeObj; } // src/query/fetchBaseQuery.ts import { isPlainObject as isPlainObject2 } from "@reduxjs/toolkit"; var defaultFetchFn = (...args) => fetch(...args); var defaultValidateStatus = (response) => response.status >= 200 && response.status <= 299; var defaultIsJsonContentType = (headers) => /ion\/(vnd\.api\+)?json/.test(headers.get("content-type") || ""); function stripUndefined(obj) { if (!isPlainObject2(obj)) { return obj; } const copy = __spreadValues({}, obj); for (const [k, v] of Object.entries(copy)) { if (v === void 0) delete copy[k]; } return copy; } function fetchBaseQuery(_a = {}) { var _b = _a, { baseUrl, prepareHeaders = (x) => x, fetchFn = defaultFetchFn, paramsSerializer, isJsonContentType = defaultIsJsonContentType, jsonContentType = "application/json", jsonReplacer, timeout: defaultTimeout, responseHandler: globalResponseHandler, validateStatus: globalValidateStatus } = _b, baseFetchOptions = __objRest(_b, [ "baseUrl", "prepareHeaders", "fetchFn", "paramsSerializer", "isJsonContentType", "jsonContentType", "jsonReplacer", "timeout", "responseHandler", "validateStatus" ]); if (typeof fetch === "undefined" && fetchFn === defaultFetchFn) { console.warn("Warning: `fetch` is not available. Please supply a custom `fetchFn` property to use `fetchBaseQuery` on SSR environments."); } return async (arg, api) => { const { signal, getState, extra, endpoint, forced, type } = api; let meta; let _a2 = typeof arg == "string" ? { url: arg } : arg, { url, headers = new Headers(baseFetchOptions.headers), params = void 0, responseHandler = globalResponseHandler != null ? globalResponseHandler : "json", validateStatus = globalValidateStatus != null ? globalValidateStatus : defaultValidateStatus, timeout = defaultTimeout } = _a2, rest = __objRest(_a2, [ "url", "headers", "params", "responseHandler", "validateStatus", "timeout" ]); let config = __spreadValues(__spreadProps(__spreadValues({}, baseFetchOptions), { signal }), rest); headers = new Headers(stripUndefined(headers)); config.headers = await prepareHeaders(headers, { getState, extra, endpoint, forced, type }) || headers; const isJsonifiable = (body) => typeof body === "object" && (isPlainObject2(body) || Array.isArray(body) || typeof body.toJSON === "function"); if (!config.headers.has("content-type") && isJsonifiable(config.body)) { config.headers.set("content-type", jsonContentType); } if (isJsonifiable(config.body) && isJsonContentType(config.headers)) { config.body = JSON.stringify(config.body, jsonReplacer); } if (params) { const divider = ~url.indexOf("?") ? "&" : "?"; const query = paramsSerializer ? paramsSerializer(params) : new URLSearchParams(stripUndefined(params)); url += divider + query; } url = joinUrls(baseUrl, url); const request = new Request(url, config); const requestClone = new Request(url, config); meta = { request: requestClone }; let response, timedOut = false, timeoutId = timeout && setTimeout(() => { timedOut = true; api.abort(); }, timeout); try { response = await fetchFn(request); } catch (e) { return { error: { status: timedOut ? "TIMEOUT_ERROR" : "FETCH_ERROR", error: String(e) }, meta }; } finally { if (timeoutId) clearTimeout(timeoutId); } const responseClone = response.clone(); meta.response = responseClone; let resultData; let responseText = ""; try { let handleResponseError; await Promise.all([ handleResponse(response, responseHandler).then((r) => resultData = r, (e) => handleResponseError = e), responseClone.text().then((r) => responseText = r, () => { }) ]); if (handleResponseError) throw handleResponseError; } catch (e) { return { error: { status: "PARSING_ERROR", originalStatus: response.status, data: responseText, error: String(e) }, meta }; } return validateStatus(response, resultData) ? { data: resultData, meta } : { error: { status: response.status, data: resultData }, meta }; }; async function handleResponse(response, responseHandler) { if (typeof responseHandler === "function") { return responseHandler(response); } if (responseHandler === "content-type") { responseHandler = isJsonContentType(response.headers) ? "json" : "text"; } if (responseHandler === "json") { const text = await response.text(); return text.length ? JSON.parse(text) : null; } return response.text(); } } // src/query/HandledError.ts var HandledError = class { constructor(value, meta = void 0) { this.value = value; this.meta = meta; } }; // src/query/retry.ts async function defaultBackoff(attempt = 0, maxRetries = 5) { const attempts = Math.min(attempt, maxRetries); const timeout = ~~((Math.random() + 0.4) * (300 << attempts)); await new Promise((resolve) => setTimeout((res) => resolve(res), timeout)); } function fail(e) { throw Object.assign(new HandledError({ error: e }), { throwImmediately: true }); } var EMPTY_OPTIONS = {}; var retryWithBackoff = (baseQuery, defaultOptions) => async (args, api, extraOptions) => { const possibleMaxRetries = [ 5, (defaultOptions || EMPTY_OPTIONS).maxRetries, (extraOptions || EMPTY_OPTIONS).maxRetries ].filter((x) => x !== void 0); const [maxRetries] = possibleMaxRetries.slice(-1); const defaultRetryCondition = (_, __, { attempt }) => attempt <= maxRetries; const options = __spreadValues(__spreadValues({ maxRetries, backoff: defaultBackoff, retryCondition: defaultRetryCondition }, defaultOptions), extraOptions); let retry2 = 0; while (true) { try { const result = await baseQuery(args, api, extraOptions); if (result.error) { throw new HandledError(result); } return result; } catch (e) { retry2++; if (e.throwImmediately) { if (e instanceof HandledError) { return e.value; } throw e; } if (e instanceof HandledError && !options.retryCondition(e.value.error, args, { attempt: retry2, baseQueryApi: api, extraOptions })) { return e.value; } await options.backoff(retry2, options.maxRetries); } } }; var retry = /* @__PURE__ */ Object.assign(retryWithBackoff, { fail }); // src/query/core/setupListeners.ts import { createAction } from "@reduxjs/toolkit"; var onFocus = /* @__PURE__ */ createAction("__rtkq/focused"); var onFocusLost = /* @__PURE__ */ createAction("__rtkq/unfocused"); var onOnline = /* @__PURE__ */ createAction("__rtkq/online"); var onOffline = /* @__PURE__ */ createAction("__rtkq/offline"); var initialized = false; function setupListeners(dispatch, customHandler) { function defaultHandler() { const handleFocus = () => dispatch(onFocus()); const handleFocusLost = () => dispatch(onFocusLost()); const handleOnline = () => dispatch(onOnline()); const handleOffline = () => dispatch(onOffline()); const handleVisibilityChange = () => { if (window.document.visibilityState === "visible") { handleFocus(); } else { handleFocusLost(); } }; if (!initialized) { if (typeof window !== "undefined" && window.addEventListener) { window.addEventListener("visibilitychange", handleVisibilityChange, false); window.addEventListener("focus", handleFocus, false); window.addEventListener("online", handleOnline, false); window.addEventListener("offline", handleOffline, false); initialized = true; } } const unsubscribe = () => { window.removeEventListener("focus", handleFocus); window.removeEventListener("visibilitychange", handleVisibilityChange); window.removeEventListener("online", handleOnline); window.removeEventListener("offline", handleOffline); initialized = false; }; return unsubscribe; } return customHandler ? customHandler(dispatch, { onFocus, onFocusLost, onOffline, onOnline }) : defaultHandler(); } // src/query/core/buildSelectors.ts import { createNextState as createNextState2, createSelector } from "@reduxjs/toolkit"; // src/query/endpointDefinitions.ts var DefinitionType; (function (DefinitionType2) { DefinitionType2["query"] = "query"; DefinitionType2["mutation"] = "mutation"; })(DefinitionType || (DefinitionType = {})); function isQueryDefinition(e) { return e.type === DefinitionType.query; } function isMutationDefinition(e) { return e.type === DefinitionType.mutation; } function calculateProvidedBy(description, result, error, queryArg, meta, assertTagTypes) { if (isFunction(description)) { return description(result, error, queryArg, meta).map(expandTagDescription).map(assertTagTypes); } if (Array.isArray(description)) { return description.map(expandTagDescription).map(assertTagTypes); } return []; } function isFunction(t) { return typeof t === "function"; } function expandTagDescription(description) { return typeof description === "string" ? { type: description } : description; } // src/query/core/buildSlice.ts import { combineReducers, createAction as createAction2, createSlice, isAnyOf, isFulfilled as isFulfilled2, isRejectedWithValue as isRejectedWithValue2, createNextState, prepareAutoBatched } from "@reduxjs/toolkit"; // src/query/utils/isNotNullish.ts function isNotNullish(v) { return v != null; } // src/query/core/buildInitiate.ts var forceQueryFnSymbol = Symbol("forceQueryFn"); var isUpsertQuery = (arg) => typeof arg[forceQueryFnSymbol] === "function"; function buildInitiate({ serializeQueryArgs, queryThunk, mutationThunk, api, context }) { const runningQueries = new Map(); const runningMutations = new Map(); const { unsubscribeQueryResult, removeMutationResult, updateSubscriptionOptions } = api.internalActions; return { buildInitiateQuery, buildInitiateMutation, getRunningQueryThunk, getRunningMutationThunk, getRunningQueriesThunk, getRunningMutationsThunk, getRunningOperationPromises, removalWarning }; function removalWarning() { throw new Error(`This method had to be removed due to a conceptual bug in RTK. Please see https://github.com/reduxjs/redux-toolkit/pull/2481 for details. See https://redux-toolkit.js.org/rtk-query/usage/server-side-rendering for new guidance on SSR.`); } function getRunningOperationPromises() { if (typeof process !== "undefined" && true) { removalWarning(); } else { const extract = (v) => Array.from(v.values()).flatMap((queriesForStore) => queriesForStore ? Object.values(queriesForStore) : []); return [...extract(runningQueries), ...extract(runningMutations)].filter(isNotNullish); } } function getRunningQueryThunk(endpointName, queryArgs) { return (dispatch) => { var _a; const endpointDefinition = context.endpointDefinitions[endpointName]; const queryCacheKey = serializeQueryArgs({ queryArgs, endpointDefinition, endpointName }); return (_a = runningQueries.get(dispatch)) == null ? void 0 : _a[queryCacheKey]; }; } function getRunningMutationThunk(_endpointName, fixedCacheKeyOrRequestId) { return (dispatch) => { var _a; return (_a = runningMutations.get(dispatch)) == null ? void 0 : _a[fixedCacheKeyOrRequestId]; }; } function getRunningQueriesThunk() { return (dispatch) => Object.values(runningQueries.get(dispatch) || {}).filter(isNotNullish); } function getRunningMutationsThunk() { return (dispatch) => Object.values(runningMutations.get(dispatch) || {}).filter(isNotNullish); } function middlewareWarning(dispatch) { if (true) { if (middlewareWarning.triggered) return; const registered = dispatch(api.internalActions.internal_probeSubscription({ queryCacheKey: "DOES_NOT_EXIST", requestId: "DUMMY_REQUEST_ID" })); middlewareWarning.triggered = true; if (typeof registered !== "boolean") { throw new Error(`Warning: Middleware for RTK-Query API at reducerPath "${api.reducerPath}" has not been added to the store. You must add the middleware for RTK-Query to function correctly!`); } } } function buildInitiateQuery(endpointName, endpointDefinition) { const queryAction = (arg, { subscribe = true, forceRefetch, subscriptionOptions, [forceQueryFnSymbol]: forceQueryFn } = {}) => (dispatch, getState) => { var _a; const queryCacheKey = serializeQueryArgs({ queryArgs: arg, endpointDefinition, endpointName }); const thunk = queryThunk({ type: "query", subscribe, forceRefetch, subscriptionOptions, endpointName, originalArgs: arg, queryCacheKey, [forceQueryFnSymbol]: forceQueryFn }); const selector = api.endpoints[endpointName].select(arg); const thunkResult = dispatch(thunk); const stateAfter = selector(getState()); middlewareWarning(dispatch); const { requestId, abort } = thunkResult; const skippedSynchronously = stateAfter.requestId !== requestId; const runningQuery = (_a = runningQueries.get(dispatch)) == null ? void 0 : _a[queryCacheKey]; const selectFromState = () => selector(getState()); const statePromise = Object.assign(forceQueryFn ? thunkResult.then(selectFromState) : skippedSynchronously && !runningQuery ? Promise.resolve(stateAfter) : Promise.all([runningQuery, thunkResult]).then(selectFromState), { arg, requestId, subscriptionOptions, queryCacheKey, abort, async unwrap() { const result = await statePromise; if (result.isError) { throw result.error; } return result.data; }, refetch: () => dispatch(queryAction(arg, { subscribe: false, forceRefetch: true })), unsubscribe() { if (subscribe) dispatch(unsubscribeQueryResult({ queryCacheKey, requestId })); }, updateSubscriptionOptions(options) { statePromise.subscriptionOptions = options; dispatch(updateSubscriptionOptions({ endpointName, requestId, queryCacheKey, options })); } }); if (!runningQuery && !skippedSynchronously && !forceQueryFn) { const running = runningQueries.get(dispatch) || {}; running[queryCacheKey] = statePromise; runningQueries.set(dispatch, running); statePromise.then(() => { delete running[queryCacheKey]; if (!Object.keys(running).length) { runningQueries.delete(dispatch); } }); } return statePromise; }; return queryAction; } function buildInitiateMutation(endpointName) { return (arg, { track = true, fixedCacheKey } = {}) => (dispatch, getState) => { const thunk = mutationThunk({ type: "mutation", endpointName, originalArgs: arg, track, fixedCacheKey }); const thunkResult = dispatch(thunk); middlewareWarning(dispatch); const { requestId, abort, unwrap } = thunkResult; const returnValuePromise = thunkResult.unwrap().then((data) => ({ data })).catch((error) => ({ error })); const reset = () => { dispatch(removeMutationResult({ requestId, fixedCacheKey })); }; const ret = Object.assign(returnValuePromise, { arg: thunkResult.arg, requestId, abort, unwrap, unsubscribe: reset, reset }); const running = runningMutations.get(dispatch) || {}; runningMutations.set(dispatch, running); running[requestId] = ret; ret.then(() => { delete running[requestId]; if (!Object.keys(running).length) { runningMutations.delete(dispatch); } }); if (fixedCacheKey) { running[fixedCacheKey] = ret; ret.then(() => { if (running[fixedCacheKey] === ret) { delete running[fixedCacheKey]; if (!Object.keys(running).length) { runningMutations.delete(dispatch); } } }); } return ret; }; } } // src/query/core/buildThunks.ts import { isAllOf, isFulfilled, isPending, isRejected, isRejectedWithValue } from "@reduxjs/toolkit"; import { isDraftable, produceWithPatches } from "immer"; import { createAsyncThunk, SHOULD_AUTOBATCH } from "@reduxjs/toolkit"; function defaultTransformResponse(baseQueryReturnValue) { return baseQueryReturnValue; } function buildThunks({ reducerPath, baseQuery, context: { endpointDefinitions }, serializeQueryArgs, api, assertTagType }) { const patchQueryData = (endpointName, args, patches, updateProvided) => (dispatch, getState) => { const endpointDefinition = endpointDefinitions[endpointName]; const queryCacheKey = serializeQueryArgs({ queryArgs: args, endpointDefinition, endpointName }); dispatch(api.internalActions.queryResultPatched({ queryCacheKey, patches })); if (!updateProvided) { return; } const newValue = api.endpoints[endpointName].select(args)(getState()); const providedTags = calculateProvidedBy(endpointDefinition.providesTags, newValue.data, void 0, args, {}, assertTagType); dispatch(api.internalActions.updateProvidedBy({ queryCacheKey, providedTags })); }; const updateQueryData = (endpointName, args, updateRecipe, updateProvided = true) => (dispatch, getState) => { const endpointDefinition = api.endpoints[endpointName]; const currentState = endpointDefinition.select(args)(getState()); let ret = { patches: [], inversePatches: [], undo: () => dispatch(api.util.patchQueryData(endpointName, args, ret.inversePatches, updateProvided)) }; if (currentState.status === QueryStatus.uninitialized) { return ret; } let newValue; if ("data" in currentState) { if (isDraftable(currentState.data)) { const [value, patches, inversePatches] = produceWithPatches(currentState.data, updateRecipe); ret.patches.push(...patches); ret.inversePatches.push(...inversePatches); newValue = value; } else { newValue = updateRecipe(currentState.data); ret.patches.push({ op: "replace", path: [], value: newValue }); ret.inversePatches.push({ op: "replace", path: [], value: currentState.data }); } } dispatch(api.util.patchQueryData(endpointName, args, ret.patches, updateProvided)); return ret; }; const upsertQueryData = (endpointName, args, value) => (dispatch) => { return dispatch(api.endpoints[endpointName].initiate(args, { subscribe: false, forceRefetch: true, [forceQueryFnSymbol]: () => ({ data: value }) })); }; const executeEndpoint = async (arg, { signal, abort, rejectWithValue, fulfillWithValue, dispatch, getState, extra }) => { const endpointDefinition = endpointDefinitions[arg.endpointName]; try { let transformResponse = defaultTransformResponse; let result; const baseQueryApi = { signal, abort, dispatch, getState, extra, endpoint: arg.endpointName, type: arg.type, forced: arg.type === "query" ? isForcedQuery(arg, getState()) : void 0 }; const forceQueryFn = arg.type === "query" ? arg[forceQueryFnSymbol] : void 0; if (forceQueryFn) { result = forceQueryFn(); } else if (endpointDefinition.query) { result = await baseQuery(endpointDefinition.query(arg.originalArgs), baseQueryApi, endpointDefinition.extraOptions); if (endpointDefinition.transformResponse) { transformResponse = endpointDefinition.transformResponse; } } else { result = await endpointDefinition.queryFn(arg.originalArgs, baseQueryApi, endpointDefinition.extraOptions, (arg2) => baseQuery(arg2, baseQueryApi, endpointDefinition.extraOptions)); } if (typeof process !== "undefined" && true) { const what = endpointDefinition.query ? "`baseQuery`" : "`queryFn`"; let err; if (!result) { err = `${what} did not return anything.`; } else if (typeof result !== "object") { err = `${what} did not return an object.`; } else if (result.error && result.data) { err = `${what} returned an object containing both \`error\` and \`result\`.`; } else if (result.error === void 0 && result.data === void 0) { err = `${what} returned an object containing neither a valid \`error\` and \`result\`. At least one of them should not be \`undefined\``; } else { for (const key of Object.keys(result)) { if (key !== "error" && key !== "data" && key !== "meta") { err = `The object returned by ${what} has the unknown property ${key}.`; break; } } } if (err) { console.error(`Error encountered handling the endpoint ${arg.endpointName}. ${err} It needs to return an object with either the shape \`{ data: <value> }\` or \`{ error: <value> }\` that may contain an optional \`meta\` property. Object returned was:`, result); } } if (result.error) throw new HandledError(result.error, result.meta); return fulfillWithValue(await transformResponse(result.data, result.meta, arg.originalArgs), { fulfilledTimeStamp: Date.now(), baseQueryMeta: result.meta, [SHOULD_AUTOBATCH]: true }); } catch (error) { let catchedError = error; if (catchedError instanceof HandledError) { let transformErrorResponse = defaultTransformResponse; if (endpointDefinition.query && endpointDefinition.transformErrorResponse) { transformErrorResponse = endpointDefinition.transformErrorResponse; } try { return rejectWithValue(await transformErrorResponse(catchedError.value, catchedError.meta, arg.originalArgs), { baseQueryMeta: catchedError.meta, [SHOULD_AUTOBATCH]: true }); } catch (e) { catchedError = e; } } if (typeof process !== "undefined" && true) { console.error(`An unhandled error occurred processing a request for the endpoint "${arg.endpointName}". In the case of an unhandled error, no tags will be "provided" or "invalidated".`, catchedError); } else { console.error(catchedError); } throw catchedError; } }; function isForcedQuery(arg, state) { var _a, _b, _c, _d; const requestState = (_b = (_a = state[reducerPath]) == null ? void 0 : _a.queries) == null ? void 0 : _b[arg.queryCacheKey]; const baseFetchOnMountOrArgChange = (_c = state[reducerPath]) == null ? void 0 : _c.config.refetchOnMountOrArgChange; const fulfilledVal = requestState == null ? void 0 : requestState.fulfilledTimeStamp; const refetchVal = (_d = arg.forceRefetch) != null ? _d : arg.subscribe && baseFetchOnMountOrArgChange; if (refetchVal) { return refetchVal === true || (Number(new Date()) - Number(fulfilledVal)) / 1e3 >= refetchVal; } return false; } const queryThunk = createAsyncThunk(`${reducerPath}/executeQuery`, executeEndpoint, { getPendingMeta() { return { startedTimeStamp: Date.now(), [SHOULD_AUTOBATCH]: true }; }, condition(queryThunkArgs, { getState }) { var _a, _b, _c; const state = getState(); const requestState = (_b = (_a = state[reducerPath]) == null ? void 0 : _a.queries) == null ? void 0 : _b[queryThunkArgs.queryCacheKey]; const fulfilledVal = requestState == null ? void 0 : requestState.fulfilledTimeStamp; const currentArg = queryThunkArgs.originalArgs; const previousArg = requestState == null ? void 0 : requestState.originalArgs; const endpointDefinition = endpointDefinitions[queryThunkArgs.endpointName]; if (isUpsertQuery(queryThunkArgs)) { return true; } if ((requestState == null ? void 0 : requestState.status) === "pending") { return false; } if (isForcedQuery(queryThunkArgs, state)) { return true; } if (isQueryDefinition(endpointDefinition) && ((_c = endpointDefinition == null ? void 0 : endpointDefinition.forceRefetch) == null ? void 0 : _c.call(endpointDefinition, { currentArg, previousArg, endpointState: requestState, state }))) { return true; } if (fulfilledVal) { return false; } return true; }, dispatchConditionRejection: true }); const mutationThunk = createAsyncThunk(`${reducerPath}/executeMutation`, executeEndpoint, { getPendingMeta() { return { startedTimeStamp: Date.now(), [SHOULD_AUTOBATCH]: true }; } }); const hasTheForce = (options) => "force" in options; const hasMaxAge = (options) => "ifOlderThan" in options; const prefetch = (endpointName, arg, options) => (dispatch, getState) => { const force = hasTheForce(options) && options.force; const maxAge = hasMaxAge(options) && options.ifOlderThan; const queryAction = (force2 = true) => api.endpoints[endpointName].initiate(arg, { forceRefetch: force2 }); const latestStateValue = api.endpoints[endpointName].select(arg)(getState()); if (force) { dispatch(queryAction()); } else if (maxAge) { const lastFulfilledTs = latestStateValue == null ? void 0 : latestStateValue.fulfilledTimeStamp; if (!lastFulfilledTs) { dispatch(queryAction()); return; } const shouldRetrigger = (Number(new Date()) - Number(new Date(lastFulfilledTs))) / 1e3 >= maxAge; if (shouldRetrigger) { dispatch(queryAction()); } } else { dispatch(queryAction(false)); } }; function matchesEndpoint(endpointName) { return (action) => { var _a, _b; return ((_b = (_a = action == null ? void 0 : action.meta) == null ? void 0 : _a.arg) == null ? void 0 : _b.endpointName) === endpointName; }; } function buildMatchThunkActions(thunk, endpointName) { return { matchPending: isAllOf(isPending(thunk), matchesEndpoint(endpointName)), matchFulfilled: isAllOf(isFulfilled(thunk), matchesEndpoint(endpointName)), matchRejected: isAllOf(isRejected(thunk), matchesEndpoint(endpointName)) }; } return { queryThunk, mutationThunk, prefetch, updateQueryData, upsertQueryData, patchQueryData, buildMatchThunkActions }; } function calculateProvidedByThunk(action, type, endpointDefinitions, assertTagType) { return calculateProvidedBy(endpointDefinitions[action.meta.arg.endpointName][type], isFulfilled(action) ? action.payload : void 0, isRejectedWithValue(action) ? action.payload : void 0, action.meta.arg.originalArgs, "baseQueryMeta" in action.meta ? action.meta.baseQueryMeta : void 0, assertTagType); } // src/query/core/buildSlice.ts import { isDraft } from "immer"; import { applyPatches, original } from "immer"; function updateQuerySubstateIfExists(state, queryCacheKey, update) { const substate = state[queryCacheKey]; if (substate) { update(substate); } } function getMutationCacheKey(id) { var _a; return (_a = "arg" in id ? id.arg.fixedCacheKey : id.fixedCacheKey) != null ? _a : id.requestId; } function updateMutationSubstateIfExists(state, id, update) { const substate = state[getMutationCacheKey(id)]; if (substate) { update(substate); } } var initialState = {}; function buildSlice({ reducerPath, queryThunk, mutationThunk, context: { endpointDefinitions: definitions, apiUid, extractRehydrationInfo, hasRehydrationInfo }, assertTagType, config }) { const resetApiState = createAction2(`${reducerPath}/resetApiState`); const querySlice = createSlice({ name: `${reducerPath}/queries`, initialState, reducers: { removeQueryResult: { reducer(draft, { payload: { queryCacheKey } }) { delete draft[queryCacheKey]; }, prepare: prepareAutoBatched() }, queryResultPatched: { reducer(draft, { payload: { queryCacheKey, patches } }) { updateQuerySubstateIfExists(draft, queryCacheKey, (substate) => { substate.data = applyPatches(substate.data, patches.concat()); }); }, prepare: prepareAutoBatched() } }, extraReducers(builder) { builder.addCase(queryThunk.pending, (draft, { meta, meta: { arg } }) => { var _a, _b; const upserting = isUpsertQuery(arg); if (arg.subscribe || upserting) { (_b = draft[_a = arg.queryCacheKey]) != null ? _b : draft[_a] = { status: QueryStatus.uninitialized, endpointName: arg.endpointName }; } updateQuerySubstateIfExists(draft, arg.queryCacheKey, (substate) => { substate.status = QueryStatus.pending; substate.requestId = upserting && substate.requestId ? substate.requestId : meta.requestId; if (arg.originalArgs !== void 0) { substate.originalArgs = arg.originalArgs; } substate.startedTimeStamp = meta.startedTimeStamp; }); }).addCase(queryThunk.fulfilled, (draft, { meta, payload }) => { updateQuerySubstateIfExists(draft, meta.arg.queryCacheKey, (substate) => { var _a; if (substate.requestId !== meta.requestId && !isUpsertQuery(meta.arg)) return; const { merge } = definitions[meta.arg.endpointName]; substate.status = QueryStatus.fulfilled; if (merge) { if (substate.data !== void 0) { const { fulfilledTimeStamp, arg, baseQueryMeta, requestId } = meta; let newData = createNextState(substate.data, (draftSubstateData) => { return merge(draftSubstateData, payload, { arg: arg.originalArgs, baseQueryMeta, fulfilledTimeStamp, requestId }); }); substate.data = newData; } else { substate.data = payload; } } else { substate.data = ((_a = definitions[meta.arg.endpointName].structuralSharing) != null ? _a : true) ? copyWithStructuralSharing(isDraft(substate.data) ? original(substate.data) : substate.data, payload) : payload; } delete substate.error; substate.fulfilledTimeStamp = meta.fulfilledTimeStamp; }); }).addCase(queryThunk.rejected, (draft, { meta: { condition, arg, requestId }, error, payload }) => { updateQuerySubstateIfExists(draft, arg.queryCacheKey, (substate) => { if (condition) { } else { if (substate.requestId !== requestId) return; substate.status = QueryStatus.rejected; substate.error = payload != null ? payload : error; } }); }).addMatcher(hasRehydrationInfo, (draft, action) => { const { queries } = extractRehydrationInfo(action); for (const [key, entry] of Object.entries(queries)) { if ((entry == null ? void 0 : entry.status) === QueryStatus.fulfilled || (entry == null ? void 0 : entry.status) === QueryStatus.rejected) { draft[key] = entry; } } }); } }); const mutationSlice = createSlice({ name: `${reducerPath}/mutations`, initialState, reducers: { removeMutationResult: { reducer(draft, { payload }) { const cacheKey = getMutationCacheKey(payload); if (cacheKey in draft) { delete draft[cacheKey]; } }, prepare: prepareAutoBatched() } }, extraReducers(builder) { builder.addCase(mutationThunk.pending, (draft, { meta, meta: { requestId, arg, startedTimeStamp } }) => { if (!arg.track) return; draft[getMutationCacheKey(meta)] = { requestId, status: QueryStatus.pending, endpointName: arg.endpointName, startedTimeStamp }; }).addCase(mutationThunk.fulfilled, (draft, { payload, meta }) => { if (!meta.arg.track) return; updateMutationSubstateIfExists(draft, meta, (substate) => { if (substate.requestId !== meta.requestId) return; substate.status = QueryStatus.fulfilled; substate.data = payload; substate.fulfilledTimeStamp = meta.fulfilledTimeStamp; }); }).addCase(mutationThunk.rejected, (draft, { payload, error, meta }) => { if (!meta.arg.track) return; updateMutationSubstateIfExists(draft, meta, (substate) => { if (substate.requestId !== meta.requestId) return; substate.status = QueryStatus.rejected; substate.error = payload != null ? payload : error; }); }).addMatcher(hasRehydrationInfo, (draft, action) => { const { mutations } = extractRehydrationInfo(action); for (const [key, entry] of Object.entries(mutations)) { if (((entry == null ? void 0 : entry.status) === QueryStatus.fulfilled || (entry == null ? void 0 : entry.status) === QueryStatus.rejected) && key !== (entry == null ? void 0 : entry.requestId)) { draft[key] = entry; } } }); } }); const invalidationSlice = createSlice({ name: `${reducerPath}/invalidation`, initialState, reducers: { updateProvidedBy: { reducer(draft, action) { var _a, _b, _c, _d; const { queryCacheKey, providedTags } = action.payload; for (const tagTypeSubscriptions of Object.values(draft)) { for (const idSubscriptions of Object.values(tagTypeSubscriptions)) { const foundAt = idSubscriptions.indexOf(queryCacheKey); if (foundAt !== -1) { idSubscriptions.splice(foundAt, 1); } } } for (const { type, id } of providedTags) { const subscribedQueries = (_d = (_b = (_a = draft[type]) != null ? _a : draft[type] = {})[_c = id || "__internal_without_id"]) != null ? _d : _b[_c] = []; const alreadySubscribed = subscribedQueries.includes(queryCacheKey); if (!alreadySubscribed) { subscribedQueries.push(queryCacheKey); } } }, prepare: prepareAutoBatched() } }, extraReducers(builder) { builder.addCase(querySlice.actions.removeQueryResult, (draft, { payload: { queryCacheKey } }) => { for (const tagTypeSubscriptions of Object.values(draft)) { for (const idSubscriptions of Object.values(tagTypeSubscriptions)) { const foundAt = idSubscriptions.indexOf(queryCacheKey); if (foundAt !== -1) { idSubscriptions.splice(foundAt, 1); } } } }).addMatcher(hasRehydrationInfo, (draft, action) => { var _a, _b, _c, _d; const { provided } = extractRehydrationInfo(action); for (const [type, incomingTags] of Object.entries(provided)) { for (const [id, cacheKeys] of Object.entries(incomingTags)) { const subscribedQueries = (_d = (_b = (_a = draft[type]) != null ? _a : draft[type] = {})[_c = id || "__internal_without_id"]) != null ? _d : _b[_c] = []; for (const queryCacheKey of cacheKeys) { const alreadySubscribed = subscribedQueries.includes(queryCacheKey); if (!alreadySubscribed) { subscribedQueries.push(queryCacheKey); } } } } }).addMatcher(isAnyOf(isFulfilled2(queryThunk), isRejectedWithValue2(queryThunk)), (draft, action) => { const providedTags = calculateProvidedByThunk(action, "providesTags", definitions, assertTagType); const { queryCacheKey } = action.meta.arg; invalidationSlice.caseReducers.updateProvidedBy(draft, invalidationSlice.actions.updateProvidedBy({ queryCacheKey, providedTags })); }); } }); const subscriptionSlice = createSlice({ name: `${reducerPath}/subscriptions`, initialState, reducers: { updateSubscriptionOptions(d, a) { }, unsubscribeQueryResult(d, a) { }, internal_probeSubscription(d, a) { } } }); const internalSubscriptionsSlice = createSlice({ name: `${reducerPath}/internalSubscriptions`, initialState, reducers: { subscriptionsUpdated: { reducer(state, action) { return applyPatches(state, action.payload); }, prepare: prepareAutoBatched() } } }); const configSlice = createSlice({ name: `${reducerPath}/config`, initialState: __spreadValues({ online: isOnline(), focused: isDocumentVisible(), middlewareRegistered: false }, config), reducers: { middlewareRegistered(state, { payload }) { state.middlewareRegistered = state.middlewareRegistered === "conflict" || apiUid !== payload ? "conflict" : true; } }, extraReducers: (builder) => { builder.addCase(onOnline, (state) => { state.online = true; }).addCase(onOffline, (state) => { state.online = false; }).addCase(onFocus, (state) => { state.focused = true; }).addCase(onFocusLost, (state) => { state.focused = false; }).addMatcher(hasRehydrationInfo, (draft) => __spreadValues({}, draft)); } }); const combinedReducer = combineReducers({ queries: querySlice.reducer, mutations: mutationSlice.reducer, provided: invalidationSlice.reducer, subscriptions: internalSubscriptionsSlice.reducer, config: configSlice.reducer }); const reducer = (state, action) => combinedReducer(resetApiState.match(action) ? void 0 : state, action); const actions = __spreadProps(__spreadValues(__spreadValues(__spreadValues(__spreadValues(__spreadValues(__spreadValues({}, configSlice.actions), querySlice.actions), subscriptionSlice.actions), internalSubscriptionsSlice.actions), mutationSlice.actions), invalidationSlice.actions), { unsubscribeMutationResult: mutationSlice.actions.removeMutationResult, res