@convex-dev/aggregate
Version:
[](https://badge.fury.io/js/@convex-dev%2Faggregate)
143 lines • 6.47 kB
TypeScript
import { GenericDataModel, GenericMutationCtx, GenericQueryCtx } from "convex/server";
import { Key } from "../component/btree.js";
import { api } from "../component/_generated/api.js";
import { UseApi } from "./useApi.js";
import { Bound } from "./positions.js";
export type UsedAPI = UseApi<typeof api>;
export type RunQueryCtx = {
runQuery: GenericQueryCtx<GenericDataModel>["runQuery"];
};
export type RunMutationCtx = {
runMutation: GenericMutationCtx<GenericDataModel>["runMutation"];
};
export type Item<K extends Key, ID extends string> = {
key: K;
id: ID;
summand: number;
};
export type Bounds<K extends Key, ID extends string> = {
lower?: Bound<K, ID>;
upper?: Bound<K, ID>;
};
export type { Key, Bound };
/**
* Write data to be aggregated, and read aggregated data.
*
* The data structure is effectively a key-value store sorted by key, where the
* value is an ID and an optional summand.
* 1. The key can be any Convex value (number, string, array, etc.).
* 2. The ID is a string which should be unique.
* 3. The summand is a number which is aggregated by summing. If not provided,
* it's assumed to be zero.
*
* Once values have been added to the data structure, you can query for the
* count and sum of items between a range of keys.
*/
export declare class Aggregate<K extends Key, ID extends string> {
private component;
constructor(component: UsedAPI);
/**
* Returns the item at the given offset/index/rank in the order of key.
*/
at(ctx: RunQueryCtx, offset: number): Promise<Item<K, ID>>;
/**
* Returns the rank/offset/index of the given key.
* Specifically, it returns the index of the first item with a key >= the given key.
*/
offsetOf(ctx: RunQueryCtx, key: K, id?: ID): Promise<number>;
/**
* Counts items between the given lower and upper bounds.
*/
count(ctx: RunQueryCtx, bounds?: Bounds<K, ID>): Promise<number>;
/**
* Adds up the summands of items between the given lower and upper bounds.
*/
sum(ctx: RunQueryCtx, bounds?: Bounds<K, ID>): Promise<number>;
/**
* Gets the minimum item within the given bounds.
*/
min(ctx: RunQueryCtx, bounds?: Bounds<K, ID>): Promise<Item<K, ID> | null>;
/**
* Gets the maximum item within the given bounds.
*/
max(ctx: RunQueryCtx, bounds?: Bounds<K, ID>): Promise<Item<K, ID> | null>;
/**
* Gets a uniformly random item within the given bounds.
*/
random(ctx: RunQueryCtx, bounds?: Bounds<K, ID>): Promise<Item<K, ID> | null>;
/**
* Insert a new key into the data structure.
* The id should be unique.
* If not provided, the summand is assumed to be zero.
* If the tree does not exist yet, it will be initialized with the default
* maxNodeSize and lazyRoot=true.
*/
insert(ctx: RunMutationCtx, key: K, id: ID, summand?: number): Promise<void>;
/**
* Delete the key with the given ID from the data structure.
* Throws if the given key and ID do not exist.
*/
delete(ctx: RunMutationCtx, key: K, id: ID): Promise<void>;
/**
* Update an existing item in the data structure.
* This is effectively a delete followed by an insert, but it's performed
* atomically so it's impossible to view the data structure with the key missing.
*/
replace(ctx: RunMutationCtx, currentKey: K, newKey: K, id: ID, summand?: number): Promise<void>;
/**
* Equivalents to `insert`, `delete`, and `replace` where the item may or may not exist.
* This can be useful for live backfills:
* 1. Update live writes to use these methods to write into the new Aggregate.
* 2. Run a background backfill, paginating over existing data, calling `insertIfDoesNotExist` on each item.
* 3. Once the backfill is complete, use `insert`, `delete`, and `replace` for live writes.
* 4. Begin using the Aggregate read methods.
*/
insertIfDoesNotExist(ctx: RunMutationCtx, key: K, id: ID, summand?: number): Promise<void>;
deleteIfExists(ctx: RunMutationCtx, key: K, id: ID): Promise<void>;
replaceOrInsert(ctx: RunMutationCtx, currentKey: K, newKey: K, id: ID, summand?: number): Promise<void>;
/**
* (re-)initialize the data structure, removing all items if it exists.
*
* Change the maxNodeSize if provided, otherwise keep it the same.
* maxNodeSize is how you tune the data structure's width and depth.
* Larger values can reduce write contention but increase read latency.
* Default is 16.
* Set rootLazy = false to eagerly compute aggregates on the root node, which
* improves aggregation latency at the expense of making all writes contend
* with each other, so it's only recommended for read-heavy workloads.
* Default is true.
*/
clear(ctx: RunMutationCtx, maxNodeSize?: number, rootLazy?: boolean): Promise<void>;
/**
* If rootLazy is false (the default is true but it can be set to false by
* `clear`), the aggregates data structure writes to a single root node on
* every insert/delete/replace, which can cause contention.
*
* If your data structure has frequent writes, you can reduce contention by
* calling makeRootLazy, which removes the frequent writes to the root node.
* With a lazy root node, updates will only contend with other updates to the
* same shard of the tree. The number of shards is determined by maxNodeSize,
* so larger maxNodeSize can also help.
*/
makeRootLazy(ctx: RunMutationCtx): Promise<void>;
}
/**
* Simplified Aggregate API that doesn't have keys or summands, so it's
* simpler to use for counting all items or getting a random item.
*
* See docstrings on Aggregate for more details.
*/
export declare class Randomize<ID extends string> {
private component;
private aggregate;
constructor(component: UsedAPI);
count(ctx: RunQueryCtx): Promise<number>;
at(ctx: RunQueryCtx, offset: number): Promise<ID>;
random(ctx: RunQueryCtx): Promise<ID | null>;
insert(ctx: RunMutationCtx, id: ID): Promise<void>;
delete(ctx: RunMutationCtx, id: ID): Promise<void>;
insertIfDoesNotExist(ctx: RunMutationCtx, id: ID): Promise<void>;
deleteIfExists(ctx: RunMutationCtx, id: ID): Promise<void>;
clear(ctx: RunMutationCtx, maxNodeSize?: number, rootLazy?: boolean): Promise<void>;
}
//# sourceMappingURL=index.d.ts.map