@reduxjs/toolkit
Version:
The official, opinionated, batteries-included toolset for efficient Redux development
1,492 lines (1,468 loc) • 98.6 kB
JavaScript
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/query/index.ts
var query_exports = {};
__export(query_exports, {
NamedSchemaError: () => NamedSchemaError,
QueryStatus: () => QueryStatus,
_NEVER: () => _NEVER,
buildCreateApi: () => buildCreateApi,
copyWithStructuralSharing: () => copyWithStructuralSharing,
coreModule: () => coreModule,
coreModuleName: () => coreModuleName,
createApi: () => createApi,
defaultSerializeQueryArgs: () => defaultSerializeQueryArgs,
fakeBaseQuery: () => fakeBaseQuery,
fetchBaseQuery: () => fetchBaseQuery,
retry: () => retry,
setupListeners: () => setupListeners,
skipToken: () => skipToken
});
module.exports = __toCommonJS(query_exports);
// src/query/core/apiState.ts
var QueryStatus = /* @__PURE__ */ ((QueryStatus2) => {
QueryStatus2["uninitialized"] = "uninitialized";
QueryStatus2["pending"] = "pending";
QueryStatus2["fulfilled"] = "fulfilled";
QueryStatus2["rejected"] = "rejected";
return QueryStatus2;
})(QueryStatus || {});
function getRequestStatusFlags(status) {
return {
status,
isUninitialized: status === "uninitialized" /* uninitialized */,
isLoading: status === "pending" /* pending */,
isSuccess: status === "fulfilled" /* fulfilled */,
isError: status === "rejected" /* rejected */
};
}
// src/query/core/rtkImports.ts
var import_toolkit = require("@reduxjs/toolkit");
// src/query/utils/copyWithStructuralSharing.ts
var isPlainObject2 = import_toolkit.isPlainObject;
function copyWithStructuralSharing(oldObj, newObj) {
if (oldObj === newObj || !(isPlainObject2(oldObj) && isPlainObject2(newObj) || Array.isArray(oldObj) && Array.isArray(newObj))) {
return newObj;
}
const newKeys = Object.keys(newObj);
const oldKeys = Object.keys(oldObj);
let isSameObject = newKeys.length === oldKeys.length;
const mergeObj = Array.isArray(newObj) ? [] : {};
for (const key of newKeys) {
mergeObj[key] = copyWithStructuralSharing(oldObj[key], newObj[key]);
if (isSameObject) isSameObject = oldObj[key] === mergeObj[key];
}
return isSameObject ? oldObj : mergeObj;
}
// src/query/utils/countObjectKeys.ts
function countObjectKeys(obj) {
let count = 0;
for (const _key in obj) {
count++;
}
return count;
}
// src/query/utils/flatten.ts
var flatten = (arr) => [].concat(...arr);
// src/query/utils/isAbsoluteUrl.ts
function isAbsoluteUrl(url) {
return new RegExp(`(^|:)//`).test(url);
}
// src/query/utils/isDocumentVisible.ts
function isDocumentVisible() {
if (typeof document === "undefined") {
return true;
}
return document.visibilityState !== "hidden";
}
// src/query/utils/isNotNullish.ts
function isNotNullish(v) {
return v != null;
}
// src/query/utils/isOnline.ts
function isOnline() {
return typeof navigator === "undefined" ? true : navigator.onLine === void 0 ? true : navigator.onLine;
}
// src/query/utils/joinUrls.ts
var withoutTrailingSlash = (url) => url.replace(/\/$/, "");
var withoutLeadingSlash = (url) => url.replace(/^\//, "");
function joinUrls(base, url) {
if (!base) {
return url;
}
if (!url) {
return base;
}
if (isAbsoluteUrl(url)) {
return url;
}
const delimiter = base.endsWith("/") || !url.startsWith("?") ? "/" : "";
base = withoutTrailingSlash(base);
url = withoutLeadingSlash(url);
return `${base}${delimiter}${url}`;
}
// src/query/utils/getOrInsert.ts
function getOrInsert(map, key, value) {
if (map.has(key)) return map.get(key);
return map.set(key, value).get(key);
}
// src/query/fetchBaseQuery.ts
var defaultFetchFn = (...args) => fetch(...args);
var defaultValidateStatus = (response) => response.status >= 200 && response.status <= 299;
var defaultIsJsonContentType = (headers) => (
/*applicat*/
/ion\/(vnd\.api\+)?json/.test(headers.get("content-type") || "")
);
function stripUndefined(obj) {
if (!(0, import_toolkit.isPlainObject)(obj)) {
return obj;
}
const copy = {
...obj
};
for (const [k, v] of Object.entries(copy)) {
if (v === void 0) delete copy[k];
}
return copy;
}
function fetchBaseQuery({
baseUrl,
prepareHeaders = (x) => x,
fetchFn = defaultFetchFn,
paramsSerializer,
isJsonContentType = defaultIsJsonContentType,
jsonContentType = "application/json",
jsonReplacer,
timeout: defaultTimeout,
responseHandler: globalResponseHandler,
validateStatus: globalValidateStatus,
...baseFetchOptions
} = {}) {
if (typeof fetch === "undefined" && fetchFn === defaultFetchFn) {
console.warn("Warning: `fetch` is not available. Please supply a custom `fetchFn` property to use `fetchBaseQuery` on SSR environments.");
}
return async (arg, api, extraOptions) => {
const {
getState,
extra,
endpoint,
forced,
type
} = api;
let meta;
let {
url,
headers = new Headers(baseFetchOptions.headers),
params = void 0,
responseHandler = globalResponseHandler ?? "json",
validateStatus = globalValidateStatus ?? defaultValidateStatus,
timeout = defaultTimeout,
...rest
} = typeof arg == "string" ? {
url: arg
} : arg;
let abortController, signal = api.signal;
if (timeout) {
abortController = new AbortController();
api.signal.addEventListener("abort", abortController.abort);
signal = abortController.signal;
}
let config = {
...baseFetchOptions,
signal,
...rest
};
headers = new Headers(stripUndefined(headers));
config.headers = await prepareHeaders(headers, {
getState,
arg,
extra,
endpoint,
forced,
type,
extraOptions
}) || headers;
const isJsonifiable = (body) => typeof body === "object" && ((0, import_toolkit.isPlainObject)(body) || Array.isArray(body) || typeof body.toJSON === "function");
if (!config.headers.has("content-type") && isJsonifiable(config.body)) {
config.headers.set("content-type", jsonContentType);
}
if (isJsonifiable(config.body) && isJsonContentType(config.headers)) {
config.body = JSON.stringify(config.body, jsonReplacer);
}
if (params) {
const divider = ~url.indexOf("?") ? "&" : "?";
const query = paramsSerializer ? paramsSerializer(params) : new URLSearchParams(stripUndefined(params));
url += divider + query;
}
url = joinUrls(baseUrl, url);
const request = new Request(url, config);
const requestClone = new Request(url, config);
meta = {
request: requestClone
};
let response, timedOut = false, timeoutId = abortController && setTimeout(() => {
timedOut = true;
abortController.abort();
}, timeout);
try {
response = await fetchFn(request);
} catch (e) {
return {
error: {
status: timedOut ? "TIMEOUT_ERROR" : "FETCH_ERROR",
error: String(e)
},
meta
};
} finally {
if (timeoutId) clearTimeout(timeoutId);
abortController?.signal.removeEventListener("abort", abortController.abort);
}
const responseClone = response.clone();
meta.response = responseClone;
let resultData;
let responseText = "";
try {
let handleResponseError;
await Promise.all([
handleResponse(response, responseHandler).then((r) => resultData = r, (e) => handleResponseError = e),
// see https://github.com/node-fetch/node-fetch/issues/665#issuecomment-538995182
// we *have* to "use up" both streams at the same time or they will stop running in node-fetch scenarios
responseClone.text().then((r) => responseText = r, () => {
})
]);
if (handleResponseError) throw handleResponseError;
} catch (e) {
return {
error: {
status: "PARSING_ERROR",
originalStatus: response.status,
data: responseText,
error: String(e)
},
meta
};
}
return validateStatus(response, resultData) ? {
data: resultData,
meta
} : {
error: {
status: response.status,
data: resultData
},
meta
};
};
async function handleResponse(response, responseHandler) {
if (typeof responseHandler === "function") {
return responseHandler(response);
}
if (responseHandler === "content-type") {
responseHandler = isJsonContentType(response.headers) ? "json" : "text";
}
if (responseHandler === "json") {
const text = await response.text();
return text.length ? JSON.parse(text) : null;
}
return response.text();
}
}
// src/query/HandledError.ts
var HandledError = class {
constructor(value, meta = void 0) {
this.value = value;
this.meta = meta;
}
};
// src/query/retry.ts
async function defaultBackoff(attempt = 0, maxRetries = 5) {
const attempts = Math.min(attempt, maxRetries);
const timeout = ~~((Math.random() + 0.4) * (300 << attempts));
await new Promise((resolve) => setTimeout((res) => resolve(res), timeout));
}
function fail(error, meta) {
throw Object.assign(new HandledError({
error,
meta
}), {
throwImmediately: true
});
}
var EMPTY_OPTIONS = {};
var retryWithBackoff = (baseQuery, defaultOptions) => async (args, api, extraOptions) => {
const possibleMaxRetries = [5, (defaultOptions || EMPTY_OPTIONS).maxRetries, (extraOptions || EMPTY_OPTIONS).maxRetries].filter((x) => x !== void 0);
const [maxRetries] = possibleMaxRetries.slice(-1);
const defaultRetryCondition = (_, __, {
attempt
}) => attempt <= maxRetries;
const options = {
maxRetries,
backoff: defaultBackoff,
retryCondition: defaultRetryCondition,
...defaultOptions,
...extraOptions
};
let retry2 = 0;
while (true) {
try {
const result = await baseQuery(args, api, extraOptions);
if (result.error) {
throw new HandledError(result);
}
return result;
} catch (e) {
retry2++;
if (e.throwImmediately) {
if (e instanceof HandledError) {
return e.value;
}
throw e;
}
if (e instanceof HandledError && !options.retryCondition(e.value.error, args, {
attempt: retry2,
baseQueryApi: api,
extraOptions
})) {
return e.value;
}
await options.backoff(retry2, options.maxRetries);
}
}
};
var retry = /* @__PURE__ */ Object.assign(retryWithBackoff, {
fail
});
// src/query/core/setupListeners.ts
var onFocus = /* @__PURE__ */ (0, import_toolkit.createAction)("__rtkq/focused");
var onFocusLost = /* @__PURE__ */ (0, import_toolkit.createAction)("__rtkq/unfocused");
var onOnline = /* @__PURE__ */ (0, import_toolkit.createAction)("__rtkq/online");
var onOffline = /* @__PURE__ */ (0, import_toolkit.createAction)("__rtkq/offline");
var initialized = false;
function setupListeners(dispatch, customHandler) {
function defaultHandler() {
const handleFocus = () => dispatch(onFocus());
const handleFocusLost = () => dispatch(onFocusLost());
const handleOnline = () => dispatch(onOnline());
const handleOffline = () => dispatch(onOffline());
const handleVisibilityChange = () => {
if (window.document.visibilityState === "visible") {
handleFocus();
} else {
handleFocusLost();
}
};
if (!initialized) {
if (typeof window !== "undefined" && window.addEventListener) {
window.addEventListener("visibilitychange", handleVisibilityChange, false);
window.addEventListener("focus", handleFocus, false);
window.addEventListener("online", handleOnline, false);
window.addEventListener("offline", handleOffline, false);
initialized = true;
}
}
const unsubscribe = () => {
window.removeEventListener("focus", handleFocus);
window.removeEventListener("visibilitychange", handleVisibilityChange);
window.removeEventListener("online", handleOnline);
window.removeEventListener("offline", handleOffline);
initialized = false;
};
return unsubscribe;
}
return customHandler ? customHandler(dispatch, {
onFocus,
onFocusLost,
onOffline,
onOnline
}) : defaultHandler();
}
// src/query/endpointDefinitions.ts
function isQueryDefinition(e) {
return e.type === "query" /* query */;
}
function isMutationDefinition(e) {
return e.type === "mutation" /* mutation */;
}
function isInfiniteQueryDefinition(e) {
return e.type === "infinitequery" /* infinitequery */;
}
function isAnyQueryDefinition(e) {
return isQueryDefinition(e) || isInfiniteQueryDefinition(e);
}
function calculateProvidedBy(description, result, error, queryArg, meta, assertTagTypes) {
if (isFunction(description)) {
return description(result, error, queryArg, meta).filter(isNotNullish).map(expandTagDescription).map(assertTagTypes);
}
if (Array.isArray(description)) {
return description.map(expandTagDescription).map(assertTagTypes);
}
return [];
}
function isFunction(t) {
return typeof t === "function";
}
function expandTagDescription(description) {
return typeof description === "string" ? {
type: description
} : description;
}
// src/query/core/buildThunks.ts
var import_immer = require("immer");
// src/query/core/buildInitiate.ts
var import_toolkit2 = require("@reduxjs/toolkit");
// src/tsHelpers.ts
function asSafePromise(promise, fallback) {
return promise.catch(fallback);
}
// src/query/core/buildInitiate.ts
var forceQueryFnSymbol = Symbol("forceQueryFn");
var isUpsertQuery = (arg) => typeof arg[forceQueryFnSymbol] === "function";
function buildInitiate({
serializeQueryArgs,
queryThunk,
infiniteQueryThunk,
mutationThunk,
api,
context
}) {
const runningQueries = /* @__PURE__ */ new Map();
const runningMutations = /* @__PURE__ */ new Map();
const {
unsubscribeQueryResult,
removeMutationResult,
updateSubscriptionOptions
} = api.internalActions;
return {
buildInitiateQuery,
buildInitiateInfiniteQuery,
buildInitiateMutation,
getRunningQueryThunk,
getRunningMutationThunk,
getRunningQueriesThunk,
getRunningMutationsThunk
};
function getRunningQueryThunk(endpointName, queryArgs) {
return (dispatch) => {
const endpointDefinition = context.endpointDefinitions[endpointName];
const queryCacheKey = serializeQueryArgs({
queryArgs,
endpointDefinition,
endpointName
});
return runningQueries.get(dispatch)?.[queryCacheKey];
};
}
function getRunningMutationThunk(_endpointName, fixedCacheKeyOrRequestId) {
return (dispatch) => {
return runningMutations.get(dispatch)?.[fixedCacheKeyOrRequestId];
};
}
function getRunningQueriesThunk() {
return (dispatch) => Object.values(runningQueries.get(dispatch) || {}).filter(isNotNullish);
}
function getRunningMutationsThunk() {
return (dispatch) => Object.values(runningMutations.get(dispatch) || {}).filter(isNotNullish);
}
function middlewareWarning(dispatch) {
if (true) {
if (middlewareWarning.triggered) return;
const returnedValue = dispatch(api.internalActions.internal_getRTKQSubscriptions());
middlewareWarning.triggered = true;
if (typeof returnedValue !== "object" || typeof returnedValue?.type === "string") {
throw new Error(false ? _formatProdErrorMessage(34) : `Warning: Middleware for RTK-Query API at reducerPath "${api.reducerPath}" has not been added to the store.
You must add the middleware for RTK-Query to function correctly!`);
}
}
}
function buildInitiateAnyQuery(endpointName, endpointDefinition) {
const queryAction = (arg, {
subscribe = true,
forceRefetch,
subscriptionOptions,
[forceQueryFnSymbol]: forceQueryFn,
...rest
} = {}) => (dispatch, getState) => {
const queryCacheKey = serializeQueryArgs({
queryArgs: arg,
endpointDefinition,
endpointName
});
let thunk;
const commonThunkArgs = {
...rest,
type: "query",
subscribe,
forceRefetch,
subscriptionOptions,
endpointName,
originalArgs: arg,
queryCacheKey,
[forceQueryFnSymbol]: forceQueryFn
};
if (isQueryDefinition(endpointDefinition)) {
thunk = queryThunk(commonThunkArgs);
} else {
const {
direction,
initialPageParam
} = rest;
thunk = infiniteQueryThunk({
...commonThunkArgs,
// Supply these even if undefined. This helps with a field existence
// check over in `buildSlice.ts`
direction,
initialPageParam
});
}
const selector = api.endpoints[endpointName].select(arg);
const thunkResult = dispatch(thunk);
const stateAfter = selector(getState());
middlewareWarning(dispatch);
const {
requestId,
abort
} = thunkResult;
const skippedSynchronously = stateAfter.requestId !== requestId;
const runningQuery = runningQueries.get(dispatch)?.[queryCacheKey];
const selectFromState = () => selector(getState());
const statePromise = Object.assign(forceQueryFn ? (
// a query has been forced (upsertQueryData)
// -> we want to resolve it once data has been written with the data that will be written
thunkResult.then(selectFromState)
) : skippedSynchronously && !runningQuery ? (
// a query has been skipped due to a condition and we do not have any currently running query
// -> we want to resolve it immediately with the current data
Promise.resolve(stateAfter)
) : (
// query just started or one is already in flight
// -> wait for the running query, then resolve with data from after that
Promise.all([runningQuery, thunkResult]).then(selectFromState)
), {
arg,
requestId,
subscriptionOptions,
queryCacheKey,
abort,
async unwrap() {
const result = await statePromise;
if (result.isError) {
throw result.error;
}
return result.data;
},
refetch: () => dispatch(queryAction(arg, {
subscribe: false,
forceRefetch: true
})),
unsubscribe() {
if (subscribe) dispatch(unsubscribeQueryResult({
queryCacheKey,
requestId
}));
},
updateSubscriptionOptions(options) {
statePromise.subscriptionOptions = options;
dispatch(updateSubscriptionOptions({
endpointName,
requestId,
queryCacheKey,
options
}));
}
});
if (!runningQuery && !skippedSynchronously && !forceQueryFn) {
const running = getOrInsert(runningQueries, dispatch, {});
running[queryCacheKey] = statePromise;
statePromise.then(() => {
delete running[queryCacheKey];
if (!countObjectKeys(running)) {
runningQueries.delete(dispatch);
}
});
}
return statePromise;
};
return queryAction;
}
function buildInitiateQuery(endpointName, endpointDefinition) {
const queryAction = buildInitiateAnyQuery(endpointName, endpointDefinition);
return queryAction;
}
function buildInitiateInfiniteQuery(endpointName, endpointDefinition) {
const infiniteQueryAction = buildInitiateAnyQuery(endpointName, endpointDefinition);
return infiniteQueryAction;
}
function buildInitiateMutation(endpointName) {
return (arg, {
track = true,
fixedCacheKey
} = {}) => (dispatch, getState) => {
const thunk = mutationThunk({
type: "mutation",
endpointName,
originalArgs: arg,
track,
fixedCacheKey
});
const thunkResult = dispatch(thunk);
middlewareWarning(dispatch);
const {
requestId,
abort,
unwrap
} = thunkResult;
const returnValuePromise = asSafePromise(thunkResult.unwrap().then((data) => ({
data
})), (error) => ({
error
}));
const reset = () => {
dispatch(removeMutationResult({
requestId,
fixedCacheKey
}));
};
const ret = Object.assign(returnValuePromise, {
arg: thunkResult.arg,
requestId,
abort,
unwrap,
reset
});
const running = runningMutations.get(dispatch) || {};
runningMutations.set(dispatch, running);
running[requestId] = ret;
ret.then(() => {
delete running[requestId];
if (!countObjectKeys(running)) {
runningMutations.delete(dispatch);
}
});
if (fixedCacheKey) {
running[fixedCacheKey] = ret;
ret.then(() => {
if (running[fixedCacheKey] === ret) {
delete running[fixedCacheKey];
if (!countObjectKeys(running)) {
runningMutations.delete(dispatch);
}
}
});
}
return ret;
};
}
}
// src/query/standardSchema.ts
var import_utils4 = require("@standard-schema/utils");
var NamedSchemaError = class extends import_utils4.SchemaError {
constructor(issues, value, schemaName, _bqMeta) {
super(issues);
this.value = value;
this.schemaName = schemaName;
this._bqMeta = _bqMeta;
}
};
async function parseWithSchema(schema, data, schemaName, bqMeta) {
const result = await schema["~standard"].validate(data);
if (result.issues) {
throw new NamedSchemaError(result.issues, data, schemaName, bqMeta);
}
return result.value;
}
// src/query/core/buildThunks.ts
function defaultTransformResponse(baseQueryReturnValue) {
return baseQueryReturnValue;
}
var addShouldAutoBatch = (arg = {}) => {
return {
...arg,
[import_toolkit.SHOULD_AUTOBATCH]: true
};
};
function buildThunks({
reducerPath,
baseQuery,
context: {
endpointDefinitions
},
serializeQueryArgs,
api,
assertTagType,
selectors,
onSchemaFailure,
catchSchemaFailure: globalCatchSchemaFailure,
skipSchemaValidation: globalSkipSchemaValidation
}) {
const patchQueryData = (endpointName, arg, patches, updateProvided) => (dispatch, getState) => {
const endpointDefinition = endpointDefinitions[endpointName];
const queryCacheKey = serializeQueryArgs({
queryArgs: arg,
endpointDefinition,
endpointName
});
dispatch(api.internalActions.queryResultPatched({
queryCacheKey,
patches
}));
if (!updateProvided) {
return;
}
const newValue = api.endpoints[endpointName].select(arg)(
// Work around TS 4.1 mismatch
getState()
);
const providedTags = calculateProvidedBy(endpointDefinition.providesTags, newValue.data, void 0, arg, {}, assertTagType);
dispatch(api.internalActions.updateProvidedBy([{
queryCacheKey,
providedTags
}]));
};
function addToStart(items, item, max = 0) {
const newItems = [item, ...items];
return max && newItems.length > max ? newItems.slice(0, -1) : newItems;
}
function addToEnd(items, item, max = 0) {
const newItems = [...items, item];
return max && newItems.length > max ? newItems.slice(1) : newItems;
}
const updateQueryData = (endpointName, arg, updateRecipe, updateProvided = true) => (dispatch, getState) => {
const endpointDefinition = api.endpoints[endpointName];
const currentState = endpointDefinition.select(arg)(
// Work around TS 4.1 mismatch
getState()
);
const ret = {
patches: [],
inversePatches: [],
undo: () => dispatch(api.util.patchQueryData(endpointName, arg, ret.inversePatches, updateProvided))
};
if (currentState.status === "uninitialized" /* uninitialized */) {
return ret;
}
let newValue;
if ("data" in currentState) {
if ((0, import_immer.isDraftable)(currentState.data)) {
const [value, patches, inversePatches] = (0, import_immer.produceWithPatches)(currentState.data, updateRecipe);
ret.patches.push(...patches);
ret.inversePatches.push(...inversePatches);
newValue = value;
} else {
newValue = updateRecipe(currentState.data);
ret.patches.push({
op: "replace",
path: [],
value: newValue
});
ret.inversePatches.push({
op: "replace",
path: [],
value: currentState.data
});
}
}
if (ret.patches.length === 0) {
return ret;
}
dispatch(api.util.patchQueryData(endpointName, arg, ret.patches, updateProvided));
return ret;
};
const upsertQueryData = (endpointName, arg, value) => (dispatch) => {
const res = dispatch(api.endpoints[endpointName].initiate(arg, {
subscribe: false,
forceRefetch: true,
[forceQueryFnSymbol]: () => ({
data: value
})
}));
return res;
};
const getTransformCallbackForEndpoint = (endpointDefinition, transformFieldName) => {
return endpointDefinition.query && endpointDefinition[transformFieldName] ? endpointDefinition[transformFieldName] : defaultTransformResponse;
};
const executeEndpoint = async (arg, {
signal,
abort,
rejectWithValue,
fulfillWithValue,
dispatch,
getState,
extra
}) => {
const endpointDefinition = endpointDefinitions[arg.endpointName];
const {
metaSchema,
skipSchemaValidation = globalSkipSchemaValidation
} = endpointDefinition;
try {
let transformResponse = getTransformCallbackForEndpoint(endpointDefinition, "transformResponse");
const baseQueryApi = {
signal,
abort,
dispatch,
getState,
extra,
endpoint: arg.endpointName,
type: arg.type,
forced: arg.type === "query" ? isForcedQuery(arg, getState()) : void 0,
queryCacheKey: arg.type === "query" ? arg.queryCacheKey : void 0
};
const forceQueryFn = arg.type === "query" ? arg[forceQueryFnSymbol] : void 0;
let finalQueryReturnValue;
const fetchPage = async (data, param, maxPages, previous) => {
if (param == null && data.pages.length) {
return Promise.resolve({
data
});
}
const finalQueryArg = {
queryArg: arg.originalArgs,
pageParam: param
};
const pageResponse = await executeRequest(finalQueryArg);
const addTo = previous ? addToStart : addToEnd;
return {
data: {
pages: addTo(data.pages, pageResponse.data, maxPages),
pageParams: addTo(data.pageParams, param, maxPages)
},
meta: pageResponse.meta
};
};
async function executeRequest(finalQueryArg) {
let result;
const {
extraOptions,
argSchema,
rawResponseSchema,
responseSchema
} = endpointDefinition;
if (argSchema && !skipSchemaValidation) {
finalQueryArg = await parseWithSchema(
argSchema,
finalQueryArg,
"argSchema",
{}
// we don't have a meta yet, so we can't pass it
);
}
if (forceQueryFn) {
result = forceQueryFn();
} else if (endpointDefinition.query) {
result = await baseQuery(endpointDefinition.query(finalQueryArg), baseQueryApi, extraOptions);
} else {
result = await endpointDefinition.queryFn(finalQueryArg, baseQueryApi, extraOptions, (arg2) => baseQuery(arg2, baseQueryApi, extraOptions));
}
if (typeof process !== "undefined" && true) {
const what = endpointDefinition.query ? "`baseQuery`" : "`queryFn`";
let err;
if (!result) {
err = `${what} did not return anything.`;
} else if (typeof result !== "object") {
err = `${what} did not return an object.`;
} else if (result.error && result.data) {
err = `${what} returned an object containing both \`error\` and \`result\`.`;
} else if (result.error === void 0 && result.data === void 0) {
err = `${what} returned an object containing neither a valid \`error\` and \`result\`. At least one of them should not be \`undefined\``;
} else {
for (const key of Object.keys(result)) {
if (key !== "error" && key !== "data" && key !== "meta") {
err = `The object returned by ${what} has the unknown property ${key}.`;
break;
}
}
}
if (err) {
console.error(`Error encountered handling the endpoint ${arg.endpointName}.
${err}
It needs to return an object with either the shape \`{ data: <value> }\` or \`{ error: <value> }\` that may contain an optional \`meta\` property.
Object returned was:`, result);
}
}
if (result.error) throw new HandledError(result.error, result.meta);
let {
data
} = result;
if (rawResponseSchema && !skipSchemaValidation) {
data = await parseWithSchema(rawResponseSchema, result.data, "rawResponseSchema", result.meta);
}
let transformedResponse = await transformResponse(data, result.meta, finalQueryArg);
if (responseSchema && !skipSchemaValidation) {
transformedResponse = await parseWithSchema(responseSchema, transformedResponse, "responseSchema", result.meta);
}
return {
...result,
data: transformedResponse
};
}
if (arg.type === "query" && "infiniteQueryOptions" in endpointDefinition) {
const {
infiniteQueryOptions
} = endpointDefinition;
const {
maxPages = Infinity
} = infiniteQueryOptions;
let result;
const blankData = {
pages: [],
pageParams: []
};
const cachedData = selectors.selectQueryEntry(getState(), arg.queryCacheKey)?.data;
const isForcedQueryNeedingRefetch = (
// arg.forceRefetch
isForcedQuery(arg, getState()) && !arg.direction
);
const existingData = isForcedQueryNeedingRefetch || !cachedData ? blankData : cachedData;
if ("direction" in arg && arg.direction && existingData.pages.length) {
const previous = arg.direction === "backward";
const pageParamFn = previous ? getPreviousPageParam : getNextPageParam;
const param = pageParamFn(infiniteQueryOptions, existingData);
result = await fetchPage(existingData, param, maxPages, previous);
} else {
const {
initialPageParam = infiniteQueryOptions.initialPageParam
} = arg;
const cachedPageParams = cachedData?.pageParams ?? [];
const firstPageParam = cachedPageParams[0] ?? initialPageParam;
const totalPages = cachedPageParams.length;
result = await fetchPage(existingData, firstPageParam, maxPages);
if (forceQueryFn) {
result = {
data: result.data.pages[0]
};
}
for (let i = 1; i < totalPages; i++) {
const param = getNextPageParam(infiniteQueryOptions, result.data);
result = await fetchPage(result.data, param, maxPages);
}
}
finalQueryReturnValue = result;
} else {
finalQueryReturnValue = await executeRequest(arg.originalArgs);
}
if (metaSchema && !skipSchemaValidation && finalQueryReturnValue.meta) {
finalQueryReturnValue.meta = await parseWithSchema(metaSchema, finalQueryReturnValue.meta, "metaSchema", finalQueryReturnValue.meta);
}
return fulfillWithValue(finalQueryReturnValue.data, addShouldAutoBatch({
fulfilledTimeStamp: Date.now(),
baseQueryMeta: finalQueryReturnValue.meta
}));
} catch (error) {
let caughtError = error;
if (caughtError instanceof HandledError) {
let transformErrorResponse = getTransformCallbackForEndpoint(endpointDefinition, "transformErrorResponse");
const {
rawErrorResponseSchema,
errorResponseSchema
} = endpointDefinition;
let {
value,
meta
} = caughtError;
try {
if (rawErrorResponseSchema && !skipSchemaValidation) {
value = await parseWithSchema(rawErrorResponseSchema, value, "rawErrorResponseSchema", meta);
}
if (metaSchema && !skipSchemaValidation) {
meta = await parseWithSchema(metaSchema, meta, "metaSchema", meta);
}
let transformedErrorResponse = await transformErrorResponse(value, meta, arg.originalArgs);
if (errorResponseSchema && !skipSchemaValidation) {
transformedErrorResponse = await parseWithSchema(errorResponseSchema, transformedErrorResponse, "errorResponseSchema", meta);
}
return rejectWithValue(transformedErrorResponse, addShouldAutoBatch({
baseQueryMeta: meta
}));
} catch (e) {
caughtError = e;
}
}
try {
if (caughtError instanceof NamedSchemaError) {
const info = {
endpoint: arg.endpointName,
arg: arg.originalArgs,
type: arg.type,
queryCacheKey: arg.type === "query" ? arg.queryCacheKey : void 0
};
endpointDefinition.onSchemaFailure?.(caughtError, info);
onSchemaFailure?.(caughtError, info);
const {
catchSchemaFailure = globalCatchSchemaFailure
} = endpointDefinition;
if (catchSchemaFailure) {
return rejectWithValue(catchSchemaFailure(caughtError, info), addShouldAutoBatch({
baseQueryMeta: caughtError._bqMeta
}));
}
}
} catch (e) {
caughtError = e;
}
if (typeof process !== "undefined" && true) {
console.error(`An unhandled error occurred processing a request for the endpoint "${arg.endpointName}".
In the case of an unhandled error, no tags will be "provided" or "invalidated".`, caughtError);
} else {
console.error(caughtError);
}
throw caughtError;
}
};
function isForcedQuery(arg, state) {
const requestState = selectors.selectQueryEntry(state, arg.queryCacheKey);
const baseFetchOnMountOrArgChange = selectors.selectConfig(state).refetchOnMountOrArgChange;
const fulfilledVal = requestState?.fulfilledTimeStamp;
const refetchVal = arg.forceRefetch ?? (arg.subscribe && baseFetchOnMountOrArgChange);
if (refetchVal) {
return refetchVal === true || (Number(/* @__PURE__ */ new Date()) - Number(fulfilledVal)) / 1e3 >= refetchVal;
}
return false;
}
const createQueryThunk = () => {
const generatedQueryThunk = (0, import_toolkit.createAsyncThunk)(`${reducerPath}/executeQuery`, executeEndpoint, {
getPendingMeta({
arg
}) {
const endpointDefinition = endpointDefinitions[arg.endpointName];
return addShouldAutoBatch({
startedTimeStamp: Date.now(),
...isInfiniteQueryDefinition(endpointDefinition) ? {
direction: arg.direction
} : {}
});
},
condition(queryThunkArg, {
getState
}) {
const state = getState();
const requestState = selectors.selectQueryEntry(state, queryThunkArg.queryCacheKey);
const fulfilledVal = requestState?.fulfilledTimeStamp;
const currentArg = queryThunkArg.originalArgs;
const previousArg = requestState?.originalArgs;
const endpointDefinition = endpointDefinitions[queryThunkArg.endpointName];
const direction = queryThunkArg.direction;
if (isUpsertQuery(queryThunkArg)) {
return true;
}
if (requestState?.status === "pending") {
return false;
}
if (isForcedQuery(queryThunkArg, state)) {
return true;
}
if (isQueryDefinition(endpointDefinition) && endpointDefinition?.forceRefetch?.({
currentArg,
previousArg,
endpointState: requestState,
state
})) {
return true;
}
if (fulfilledVal && !direction) {
return false;
}
return true;
},
dispatchConditionRejection: true
});
return generatedQueryThunk;
};
const queryThunk = createQueryThunk();
const infiniteQueryThunk = createQueryThunk();
const mutationThunk = (0, import_toolkit.createAsyncThunk)(`${reducerPath}/executeMutation`, executeEndpoint, {
getPendingMeta() {
return addShouldAutoBatch({
startedTimeStamp: Date.now()
});
}
});
const hasTheForce = (options) => "force" in options;
const hasMaxAge = (options) => "ifOlderThan" in options;
const prefetch = (endpointName, arg, options) => (dispatch, getState) => {
const force = hasTheForce(options) && options.force;
const maxAge = hasMaxAge(options) && options.ifOlderThan;
const queryAction = (force2 = true) => {
const options2 = {
forceRefetch: force2,
isPrefetch: true
};
return api.endpoints[endpointName].initiate(arg, options2);
};
const latestStateValue = api.endpoints[endpointName].select(arg)(getState());
if (force) {
dispatch(queryAction());
} else if (maxAge) {
const lastFulfilledTs = latestStateValue?.fulfilledTimeStamp;
if (!lastFulfilledTs) {
dispatch(queryAction());
return;
}
const shouldRetrigger = (Number(/* @__PURE__ */ new Date()) - Number(new Date(lastFulfilledTs))) / 1e3 >= maxAge;
if (shouldRetrigger) {
dispatch(queryAction());
}
} else {
dispatch(queryAction(false));
}
};
function matchesEndpoint(endpointName) {
return (action) => action?.meta?.arg?.endpointName === endpointName;
}
function buildMatchThunkActions(thunk, endpointName) {
return {
matchPending: (0, import_toolkit.isAllOf)((0, import_toolkit.isPending)(thunk), matchesEndpoint(endpointName)),
matchFulfilled: (0, import_toolkit.isAllOf)((0, import_toolkit.isFulfilled)(thunk), matchesEndpoint(endpointName)),
matchRejected: (0, import_toolkit.isAllOf)((0, import_toolkit.isRejected)(thunk), matchesEndpoint(endpointName))
};
}
return {
queryThunk,
mutationThunk,
infiniteQueryThunk,
prefetch,
updateQueryData,
upsertQueryData,
patchQueryData,
buildMatchThunkActions
};
}
function getNextPageParam(options, {
pages,
pageParams
}) {
const lastIndex = pages.length - 1;
return options.getNextPageParam(pages[lastIndex], pages, pageParams[lastIndex], pageParams);
}
function getPreviousPageParam(options, {
pages,
pageParams
}) {
return options.getPreviousPageParam?.(pages[0], pages, pageParams[0], pageParams);
}
function calculateProvidedByThunk(action, type, endpointDefinitions, assertTagType) {
return calculateProvidedBy(endpointDefinitions[action.meta.arg.endpointName][type], (0, import_toolkit.isFulfilled)(action) ? action.payload : void 0, (0, import_toolkit.isRejectedWithValue)(action) ? action.payload : void 0, action.meta.arg.originalArgs, "baseQueryMeta" in action.meta ? action.meta.baseQueryMeta : void 0, assertTagType);
}
// src/query/core/buildSlice.ts
var import_immer2 = require("immer");
var import_immer3 = require("immer");
function updateQuerySubstateIfExists(state, queryCacheKey, update) {
const substate = state[queryCacheKey];
if (substate) {
update(substate);
}
}
function getMutationCacheKey(id) {
return ("arg" in id ? id.arg.fixedCacheKey : id.fixedCacheKey) ?? id.requestId;
}
function updateMutationSubstateIfExists(state, id, update) {
const substate = state[getMutationCacheKey(id)];
if (substate) {
update(substate);
}
}
var initialState = {};
function buildSlice({
reducerPath,
queryThunk,
mutationThunk,
serializeQueryArgs,
context: {
endpointDefinitions: definitions,
apiUid,
extractRehydrationInfo,
hasRehydrationInfo
},
assertTagType,
config
}) {
const resetApiState = (0, import_toolkit.createAction)(`${reducerPath}/resetApiState`);
function writePendingCacheEntry(draft, arg, upserting, meta) {
draft[arg.queryCacheKey] ??= {
status: "uninitialized" /* uninitialized */,
endpointName: arg.endpointName
};
updateQuerySubstateIfExists(draft, arg.queryCacheKey, (substate) => {
substate.status = "pending" /* pending */;
substate.requestId = upserting && substate.requestId ? (
// for `upsertQuery` **updates**, keep the current `requestId`
substate.requestId
) : (
// for normal queries or `upsertQuery` **inserts** always update the `requestId`
meta.requestId
);
if (arg.originalArgs !== void 0) {
substate.originalArgs = arg.originalArgs;
}
substate.startedTimeStamp = meta.startedTimeStamp;
const endpointDefinition = definitions[meta.arg.endpointName];
if (isInfiniteQueryDefinition(endpointDefinition) && "direction" in arg) {
;
substate.direction = arg.direction;
}
});
}
function writeFulfilledCacheEntry(draft, meta, payload, upserting) {
updateQuerySubstateIfExists(draft, meta.arg.queryCacheKey, (substate) => {
if (substate.requestId !== meta.requestId && !upserting) return;
const {
merge
} = definitions[meta.arg.endpointName];
substate.status = "fulfilled" /* fulfilled */;
if (merge) {
if (substate.data !== void 0) {
const {
fulfilledTimeStamp,
arg,
baseQueryMeta,
requestId
} = meta;
let newData = (0, import_toolkit.createNextState)(substate.data, (draftSubstateData) => {
return merge(draftSubstateData, payload, {
arg: arg.originalArgs,
baseQueryMeta,
fulfilledTimeStamp,
requestId
});
});
substate.data = newData;
} else {
substate.data = payload;
}
} else {
substate.data = definitions[meta.arg.endpointName].structuralSharing ?? true ? copyWithStructuralSharing((0, import_immer2.isDraft)(substate.data) ? (0, import_immer3.original)(substate.data) : substate.data, payload) : payload;
}
delete substate.error;
substate.fulfilledTimeStamp = meta.fulfilledTimeStamp;
});
}
const querySlice = (0, import_toolkit.createSlice)({
name: `${reducerPath}/queries`,
initialState,
reducers: {
removeQueryResult: {
reducer(draft, {
payload: {
queryCacheKey
}
}) {
delete draft[queryCacheKey];
},
prepare: (0, import_toolkit.prepareAutoBatched)()
},
cacheEntriesUpserted: {
reducer(draft, action) {
for (const entry of action.payload) {
const {
queryDescription: arg,
value
} = entry;
writePendingCacheEntry(draft, arg, true, {
arg,
requestId: action.meta.requestId,
startedTimeStamp: action.meta.timestamp
});
writeFulfilledCacheEntry(
draft,
{
arg,
requestId: action.meta.requestId,
fulfilledTimeStamp: action.meta.timestamp,
baseQueryMeta: {}
},
value,
// We know we're upserting here
true
);
}
},
prepare: (payload) => {
const queryDescriptions = payload.map((entry) => {
const {
endpointName,
arg,
value
} = entry;
const endpointDefinition = definitions[endpointName];
const queryDescription = {
type: "query",
endpointName,
originalArgs: entry.arg,
queryCacheKey: serializeQueryArgs({
queryArgs: arg,
endpointDefinition,
endpointName
})
};
return {
queryDescription,
value
};
});
const result = {
payload: queryDescriptions,
meta: {
[import_toolkit.SHOULD_AUTOBATCH]: true,
requestId: (0, import_toolkit.nanoid)(),
timestamp: Date.now()
}
};
return result;
}
},
queryResultPatched: {
reducer(draft, {
payload: {
queryCacheKey,
patches
}
}) {
updateQuerySubstateIfExists(draft, queryCacheKey, (substate) => {
substate.data = (0, import_immer3.applyPatches)(substate.data, patches.concat());
});
},
prepare: (0, import_toolkit.prepareAutoBatched)()
}
},
extraReducers(builder) {
builder.addCase(queryThunk.pending, (draft, {
meta,
meta: {
arg
}
}) => {
const upserting = isUpsertQuery(arg);
writePendingCacheEntry(draft, arg, upserting, meta);
}).addCase(queryThunk.fulfilled, (draft, {
meta,
payload
}) => {
const upserting = isUpsertQuery(meta.arg);
writeFulfilledCacheEntry(draft, meta, payload, upserting);
}).addCase(queryThunk.rejected, (draft, {
meta: {
condition,
arg,
requestId
},
error,
payload
}) => {
updateQuerySubstateIfExists(draft, arg.queryCacheKey, (substate) => {
if (condition) {
} else {
if (substate.requestId !== requestId) return;
substate.status = "rejected" /* rejected */;
substate.error = payload ?? error;
}
});
}).addMatcher(hasRehydrationInfo, (draft, action) => {
const {
queries
} = extractRehydrationInfo(action);
for (const [key, entry] of Object.entries(queries)) {
if (
// do not rehydrate entries that were currently in flight.
entry?.status === "fulfilled" /* fulfilled */ || entry?.status === "rejected" /* rejected */
) {
draft[key] = entry;
}
}
});
}
});
const mutationSlice = (0, import_toolkit.createSlice)({
name: `${reducerPath}/mutations`,
initialState,
reducers: {
removeMutationResult: {
reducer(draft, {
payload
}) {
const cacheKey = getMutationCacheKey(payload);
if (cacheKey in draft) {
delete draft[cacheKey];
}
},
prepare: (0, import_toolkit.prepareAutoBatched)()
}
},
extraReducers(builder) {
builder.addCase(mutationThunk.pending, (draft, {
meta,
meta: {
requestId,
arg,
startedTimeStamp
}
}) => {
if (!arg.track) return;
draft[getMutationCacheKey(meta)] = {
requestId,
status: "pending" /* pending */,
endpointName: arg.endpointName,
startedTimeStamp
};
}).addCase(mutationThunk.fulfilled, (draft, {
payload,
meta
}) => {
if (!meta.arg.track) return;
updateMutationSubstateIfExists(draft, meta, (substate) => {
if (substate.requestId !== meta.requestId) return;
substate.status = "fulfilled" /* fulfilled */;
substate.data = payload;
substate.fulfilledTimeStamp = meta.fulfilledTimeStamp;
});
}).addCase(mutationThunk.rejected, (draft, {
payload,
error,
meta
}) => {
if (!meta.arg.track) return;
updateMutationSubstateIfExists(draft, meta, (substate) => {
if (substate.requestId !== meta.requestId) return;
substate.status = "rejected" /* rejected */;
substate.error = payload ?? error;
});
}).addMatcher(hasRehydrationInfo, (draft, action) => {
const {
mutations
} = extractRehydrationInfo(action);
for (const [key, entry] of Object.entries(mutations)) {
if (
// do not rehydrate entries that were currently in flight.
(entry?.status === "fulfilled" /* fulfilled */ || entry?.status === "rejected" /* rejected */) && // only rehydrate endpoints that were persisted using a `fixedCacheKey`
key !== entry?.requestId
) {
draft[key] = entry;
}
}
});
}
});
const initialInvalidationState = {
tags: {},
keys: {}
};
const invalidationSlice = (0, import_toolkit.createSlice)({
name: `${reducerPath}/invalidation`,
initialState: initialInvalidationState,
reducers: {
u