with-simple-caching
Version:
A wrapper that makes it simple to add caching to any function
100 lines (99 loc) • 4.45 kB
TypeScript
import { UniDuration } from '@ehmpathy/uni-time';
import { SimpleInMemoryCache } from 'simple-in-memory-cache';
import { NotUndefined } from 'type-fns';
import { SimpleCache } from '../../domain/SimpleCache';
import { WithSimpleCachingCacheOption } from '../options/getCacheFromCacheOption';
import { KeySerializationMethod } from '../serde/defaults';
export type AsyncLogic = (...args: any[]) => Promise<any>;
/**
* options to configure caching for use with-simple-caching
*/
export interface WithSimpleCachingAsyncOptions<
/**
* the logic we are caching the responses for
*/
L extends AsyncLogic,
/**
* the type of cache being used
*/
C extends SimpleCache<any>> {
/**
* the cache to persist outputs
*/
cache: WithSimpleCachingCacheOption<Parameters<L>, C> | {
/**
* the cache to cache output to
*/
output: WithSimpleCachingCacheOption<Parameters<L>, C>;
/**
* the cache to use for parallel in memory request deduplication
*
* note
* - by default, this method will use its own in-memory cache for deduplication
* - if required, you can pass in your own in-memory cache to use
* - for example, if you're instantiating the wrapper on each execution of your logic, instead of globally
* - important: if passing in your own, make sure that the cache time is at least as long as your longest resolving promise (e.g., 15min) ⚠️
*/
deduplication: SimpleInMemoryCache<any>;
};
serialize?: {
key?: KeySerializationMethod<Parameters<L>>;
value?: (output: Awaited<ReturnType<L>>) => NotUndefined<Awaited<ReturnType<C['get']>>>;
};
deserialize?: {
value?: (cached: NotUndefined<Awaited<ReturnType<C['get']>>>) => Awaited<ReturnType<L>>;
};
expiration?: UniDuration | null;
/**
* whether to bypass the cached for either the set or get operation
*/
bypass?: {
/**
* whether to bypass the cache for the get
*
* note
* - equivalent to the result not already being cached
*
* default
* - process.env.CACHE_BYPASS_GET ? process.env.CACHE_BYPASS_GET === 'true' : process.env.CACHE_BYPASS === 'true'
*/
get?: (input: Parameters<L>) => boolean;
/**
* whether to bypass the cache for the set
*
* note
* - keeps whatever the previously cached value was, while returning the new value
*
* default
* - process.env.CACHE_BYPASS_SET ? process.env.CACHE_BYPASS_SET === 'true' : process.env.CACHE_BYPASS === 'true'
*/
set?: (input: Parameters<L>) => boolean;
};
}
/**
* method to get the output cache option chosen by the user from the cache input
*/
export declare const getOutputCacheOptionFromCacheInput: <L extends AsyncLogic, C extends SimpleCache<any>>(cacheInput: WithSimpleCachingCacheOption<Parameters<L>, C> | {
/**
* the cache to cache output to
*/
output: WithSimpleCachingCacheOption<Parameters<L>, C>;
/**
* the cache to use for parallel in memory request deduplication
*
* note
* - by default, this method will use its own in-memory cache for deduplication
* - if required, you can pass in your own in-memory cache to use
* - for example, if you're instantiating the wrapper on each execution of your logic, instead of globally
* - important: if passing in your own, make sure that the cache time is at least as long as your longest resolving promise (e.g., 15min) ⚠️
*/
deduplication: SimpleInMemoryCache<any>;
}) => WithSimpleCachingCacheOption<Parameters<L>, C>;
/**
* a wrapper which adds asynchronous caching to asynchronous logic
*
* note
* - utilizes an additional in-memory synchronous cache under the hood to prevent duplicate requests (otherwise, while async cache is resolving, a duplicate parallel request may have be made)
* - can be given a synchronous cache, since what you can do on an asynchronous cache you can do on a synchronous cache, but not the other way around
*/
export declare const withSimpleCachingAsync: <L extends AsyncLogic, C extends SimpleCache<any>>(logic: L, { cache: cacheOption, serialize: { key: serializeKey, value: serializeValue, }, deserialize: { value: deserializeValue }, expiration, bypass, }: WithSimpleCachingAsyncOptions<L, C>) => L;