alepha
Version:
Alepha is a convention-driven TypeScript framework for building robust, end-to-end type-safe applications, from serverless APIs to full-stack React apps.
1,507 lines (1,506 loc) • 68.5 kB
TypeScript
import * as _alepha_core1 from "alepha";
import { Alepha, AlephaError, Descriptor, DescriptorArgs, KIND, Static, StaticEncode, TArray, TBigInt, TBoolean, TInteger, TNull, TNumber, TNumberOptions, TObject, TObjectOptions, TOptional, TOptionalAdd, TRecord, TSchema, TString, TStringOptions, TUnion, TUnsafe } from "alepha";
import { DateTime, DateTimeProvider } from "alepha/datetime";
import * as drizzle_orm0 from "drizzle-orm";
import { BuildExtraConfigColumns, SQL, SQLWrapper, sql } from "drizzle-orm";
import * as drizzle_orm_pg_core0 from "drizzle-orm/pg-core";
import { LockConfig, LockStrength, PgColumn, PgColumnBuilderBase, PgDatabase, PgInsertValue, PgSchema, PgSelectBase, PgSequenceOptions, PgTableExtraConfigValue, PgTableWithColumns, PgTransaction, PgTransactionConfig, UpdateDeleteAction } from "drizzle-orm/pg-core";
import * as _alepha_logger0 from "alepha/logger";
import * as _alepha_lock0 from "alepha/lock";
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
import postgres from "postgres";
import * as _alepha_retry0 from "alepha/retry";
import * as typebox8 from "typebox";
import { PgTransactionConfig as PgTransactionConfig$1 } from "drizzle-orm/pg-core/session";
import * as DrizzleKit from "drizzle-kit/api";
import { MigrationConfig } from "drizzle-orm/migrator";
import * as dayjs0 from "dayjs";
import { UpdateDeleteAction as UpdateDeleteAction$1 } from "drizzle-orm/pg-core/foreign-keys";
export * from "drizzle-orm/pg-core";
//#region src/schemas/insertSchema.d.ts
/**
* Transforms a TObject schema for insert operations.
* All default properties at the root level are made optional.
*
* @example
* Before: { name: string; age: number(default=0); }
* After: { name: string; age?: number; }
*/
type TObjectInsert<T extends TObject> = TObject<{ [K in keyof T["properties"]]: T["properties"][K] extends {
[PG_DEFAULT]: any;
} | {
"~optional": true;
} ? TOptional<T["properties"][K]> : T["properties"][K] }>;
declare const insertSchema: <T extends TObject>(obj: T) => TObjectInsert<T>;
//#endregion
//#region src/schemas/updateSchema.d.ts
/**
* Transforms a TObject schema for update operations.
* All optional properties at the root level are made nullable (i.e., `T | null`).
* This allows an API endpoint to explicitly accept `null` to clear an optional field in the database.
*
* @example
* Before: { name?: string; age: number; }
* After: { name?: string | null; age: number; }
*/
type TObjectUpdate<T extends TObject> = TObject<{ [K in keyof T["properties"]]: T["properties"][K] extends TOptional<infer U> ? TOptional<TUnion<[U, TNull]>> : T["properties"][K] }>;
declare const updateSchema: <T extends TObject>(schema: T) => TObjectUpdate<T>;
//#endregion
//#region src/descriptors/$entity.d.ts
/**
* Creates a database entity descriptor that defines table structure using TypeBox schemas.
*
* @example
* ```ts
* import { t } from "alepha";
* import { $entity } from "alepha/postgres";
*
* const userEntity = $entity({
* name: "users",
* schema: t.object({
* id: pg.primaryKey(),
* name: t.text(),
* email: t.email(),
* }),
* });
* ```
*/
declare const $entity: {
<TSchema$1 extends TObject>(options: EntityDescriptorOptions<TSchema$1>): EntityDescriptor<TSchema$1>;
[KIND]: typeof EntityDescriptor;
};
interface EntityDescriptorOptions<T extends TObject, Keys = keyof Static<T>> {
/**
* The database table name that will be created for this entity.
* If not provided, name will be inferred from the $repository variable name.
*/
name: string;
/**
* TypeBox schema defining the table structure and column types.
*/
schema: T;
/**
* Database indexes to create for query optimization.
*/
indexes?: (Keys | {
/**
* Single column to index.
*/
column: Keys;
/**
* Whether this should be a unique index (enforces uniqueness constraint).
*/
unique?: boolean;
/**
* Custom name for the index. If not provided, generates name automatically.
*/
name?: string;
} | {
/**
* Multiple columns for composite index (order matters for query optimization).
*/
columns: Keys[];
/**
* Whether this should be a unique index (enforces uniqueness constraint).
*/
unique?: boolean;
/**
* Custom name for the index. If not provided, generates name automatically.
*/
name?: string;
})[];
/**
* Foreign key constraints to maintain referential integrity.
*/
foreignKeys?: Array<{
/**
* Optional name for the foreign key constraint.
*/
name?: string;
/**
* Local columns that reference the foreign table.
*/
columns: Array<keyof Static<T>>;
/**
* Referenced columns in the foreign table.
* Must be EntityColumn references from other entities.
*/
foreignColumns: Array<() => EntityColumn<any>>;
}>;
/**
* Additional table constraints for data validation.
*
* Constraints enforce business rules at the database level, providing
* an additional layer of data integrity beyond application validation.
*
* **Constraint Types**:
* - **Unique constraints**: Prevent duplicate values across columns
* - **Check constraints**: Enforce custom validation rules with SQL expressions
*
* @example
* ```ts
* constraints: [
* {
* name: "unique_user_email",
* columns: ["email"],
* unique: true
* },
* {
* name: "valid_age_range",
* columns: ["age"],
* check: sql`age >= 0 AND age <= 150`
* },
* {
* name: "unique_user_username_per_tenant",
* columns: ["tenantId", "username"],
* unique: true
* }
* ]
* ```
*/
constraints?: Array<{
/**
* Columns involved in this constraint.
*/
columns: Array<keyof Static<T>>;
/**
* Optional name for the constraint.
*/
name?: string;
/**
* Whether this is a unique constraint.
*/
unique?: boolean | {};
/**
* SQL expression for check constraint validation.
*/
check?: SQL;
}>;
/**
* Advanced Drizzle ORM configuration for complex table setups.
*/
config?: (self: BuildExtraConfigColumns<string, FromSchema<T>, "pg">) => PgTableExtraConfigValue[];
}
declare class EntityDescriptor<T extends TObject = TObject> {
readonly options: EntityDescriptorOptions<T>;
constructor(options: EntityDescriptorOptions<T>);
alias(alias: string): this;
get cols(): EntityColumns<T>;
get name(): string;
get schema(): T;
get insertSchema(): TObjectInsert<T>;
get updateSchema(): TObjectUpdate<T>;
}
/**
* Convert a schema to columns.
*/
type FromSchema<T extends TObject> = { [key in keyof T["properties"]]: PgColumnBuilderBase };
type SchemaToTableConfig<T extends TObject> = {
name: string;
schema: string | undefined;
columns: { [key in keyof T["properties"]]: PgColumn };
dialect: string;
};
type EntityColumn<T extends TObject> = {
name: string;
entity: EntityDescriptor<T>;
};
type EntityColumns<T extends TObject> = { [key in keyof T["properties"]]: EntityColumn<T> };
//#endregion
//#region src/constants/PG_SYMBOLS.d.ts
declare const PG_DEFAULT: unique symbol;
declare const PG_PRIMARY_KEY: unique symbol;
declare const PG_CREATED_AT: unique symbol;
declare const PG_UPDATED_AT: unique symbol;
declare const PG_DELETED_AT: unique symbol;
declare const PG_VERSION: unique symbol;
declare const PG_IDENTITY: unique symbol;
declare const PG_ENUM: unique symbol;
declare const PG_REF: unique symbol;
/**
* @deprecated Use `PG_IDENTITY` instead.
*/
declare const PG_SERIAL: unique symbol;
type PgDefault = typeof PG_DEFAULT;
type PgRef = typeof PG_REF;
type PgPrimaryKey = typeof PG_PRIMARY_KEY;
type PgSymbols = {
[PG_DEFAULT]: {};
[PG_PRIMARY_KEY]: {};
[PG_CREATED_AT]: {};
[PG_UPDATED_AT]: {};
[PG_DELETED_AT]: {};
[PG_VERSION]: {};
[PG_IDENTITY]: PgIdentityOptions;
[PG_REF]: PgRefOptions;
[PG_ENUM]: PgEnumOptions;
/**
* @deprecated Use `PG_IDENTITY` instead.
*/
[PG_SERIAL]: {};
};
type PgSymbolKeys = keyof PgSymbols;
type PgIdentityOptions = {
mode: "always" | "byDefault";
} & PgSequenceOptions & {
name?: string;
};
interface PgEnumOptions {
name?: string;
}
interface PgRefOptions {
ref: () => {
name: string;
entity: EntityDescriptor;
};
actions?: {
onUpdate?: UpdateDeleteAction;
onDelete?: UpdateDeleteAction;
};
}
//#endregion
//#region src/errors/PgError.d.ts
declare class PgError extends AlephaError {
name: string;
constructor(message: string, cause?: unknown);
}
//#endregion
//#region src/helpers/pgAttr.d.ts
/**
* Decorates a typebox schema with a Postgres attribute.
*
* > It's just a fancy way to add Symbols to a field.
*
* @example
* ```ts
* import { t } from "alepha";
* import { PG_UPDATED_AT } from "../constants/PG_SYMBOLS";
*
* export const updatedAtSchema = pgAttr(
* t.datetime(), PG_UPDATED_AT,
* );
* ```
*/
declare const pgAttr: <T extends TSchema, Attr extends PgSymbolKeys>(type: T, attr: Attr, value?: PgSymbols[Attr]) => PgAttr<T, Attr>;
/**
* Retrieves the fields of a schema that have a specific attribute.
*/
declare const getAttrFields: (schema: TObject, name: PgSymbolKeys) => PgAttrField[];
/**
* Type representation.
*/
type PgAttr<T extends TSchema, TAttr extends PgSymbolKeys> = T & { [K in TAttr]: PgSymbols[K] };
interface PgAttrField {
key: string;
type: TSchema;
data: any;
nested?: any[];
one?: boolean;
}
//#endregion
//#region src/interfaces/FilterOperators.d.ts
interface FilterOperators<TValue> {
/**
* Test that two values are equal.
*
* Remember that the SQL standard dictates that
* two NULL values are not equal, so if you want to test
* whether a value is null, you may want to use
* `isNull` instead.
*
* ## Examples
*
* ```ts
* // Select cars made by Ford
* db.select().from(cars)
* .where(eq(cars.make, 'Ford'))
* ```
*
* @see isNull for a way to test equality to NULL.
*/
eq?: TValue;
/**
* Test that two values are not equal.
*
* Remember that the SQL standard dictates that
* two NULL values are not equal, so if you want to test
* whether a value is not null, you may want to use
* `isNotNull` instead.
*
* ## Examples
*
* ```ts
* // Select cars not made by Ford
* db.select().from(cars)
* .where(ne(cars.make, 'Ford'))
* ```
*
* @see isNotNull for a way to test whether a value is not null.
*/
ne?: TValue;
/**
* Test that the first expression passed is greater than
* the second expression.
*
* ## Examples
*
* ```ts
* // Select cars made after 2000.
* db.select().from(cars)
* .where(gt(cars.year, 2000))
* ```
*
* @see gte for greater-than-or-equal
*/
gt?: TValue;
/**
* Test that the first expression passed is greater than
* or equal to the second expression. Use `gt` to
* test whether an expression is strictly greater
* than another.
*
* ## Examples
*
* ```ts
* // Select cars made on or after 2000.
* db.select().from(cars)
* .where(gte(cars.year, 2000))
* ```
*
* @see gt for a strictly greater-than condition
*/
gte?: TValue;
/**
* Test that the first expression passed is less than
* the second expression.
*
* ## Examples
*
* ```ts
* // Select cars made before 2000.
* db.select().from(cars)
* .where(lt(cars.year, 2000))
* ```
*
* @see lte for greater-than-or-equal
*/
lt?: TValue;
/**
* Test that the first expression passed is less than
* or equal to the second expression.
*
* ## Examples
*
* ```ts
* // Select cars made before 2000.
* db.select().from(cars)
* .where(lte(cars.year, 2000))
* ```
*
* @see lt for a strictly less-than condition
*/
lte?: TValue;
/**
* Test whether the first parameter, a column or expression,
* has a value from a list passed as the second argument.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select cars made by Ford or GM.
* db.select().from(cars)
* .where(inArray(cars.make, ['Ford', 'GM']))
* ```
*
* @see notInArray for the inverse of this test
*/
inArray?: TValue[];
/**
* Test whether the first parameter, a column or expression,
* has a value that is not present in a list passed as the
* second argument.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select cars made by any company except Ford or GM.
* db.select().from(cars)
* .where(notInArray(cars.make, ['Ford', 'GM']))
* ```
*
* @see inArray for the inverse of this test
*/
notInArray?: TValue[];
/**
* Test whether an expression is not NULL. By the SQL standard,
* NULL is neither equal nor not equal to itself, so
* it's recommended to use `isNull` and `notIsNull` for
* comparisons to NULL.
*
* ## Examples
*
* ```ts
* // Select cars that have been discontinued.
* db.select().from(cars)
* .where(isNotNull(cars.discontinuedAt))
* ```
*
* @see isNull for the inverse of this test
*/
isNotNull?: true;
/**
* Test whether an expression is NULL. By the SQL standard,
* NULL is neither equal nor not equal to itself, so
* it's recommended to use `isNull` and `notIsNull` for
* comparisons to NULL.
*
* ## Examples
*
* ```ts
* // Select cars that have no discontinuedAt date.
* db.select().from(cars)
* .where(isNull(cars.discontinuedAt))
* ```
*
* @see isNotNull for the inverse of this test
*/
isNull?: true;
/**
* Test whether an expression is between two values. This
* is an easier way to express range tests, which would be
* expressed mathematically as `x <= a <= y` but in SQL
* would have to be like `a >= x AND a <= y`.
*
* Between is inclusive of the endpoints: if `column`
* is equal to `min` or `max`, it will be TRUE.
*
* ## Examples
*
* ```ts
* // Select cars made between 1990 and 2000
* db.select().from(cars)
* .where(between(cars.year, 1990, 2000))
* ```
*
* @see notBetween for the inverse of this test
*/
between?: [number, number];
/**
* Test whether an expression is not between two values.
*
* This, like `between`, includes its endpoints, so if
* the `column` is equal to `min` or `max`, in this case
* it will evaluate to FALSE.
*
* ## Examples
*
* ```ts
* // Exclude cars made in the 1970s
* db.select().from(cars)
* .where(notBetween(cars.year, 1970, 1979))
* ```
*
* @see between for the inverse of this test
*/
notBetween?: [number, number];
/**
* Compare a column to a pattern, which can include `%` and `_`
* characters to match multiple variations. Including `%`
* in the pattern matches zero or more characters, and including
* `_` will match a single character.
*
* ## Examples
*
* ```ts
* // Select all cars with 'Turbo' in their names.
* db.select().from(cars)
* .where(like(cars.name, '%Turbo%'))
* ```
*
* @see ilike for a case-insensitive version of this condition
*/
like?: string;
/**
* The inverse of like - this tests that a given column
* does not match a pattern, which can include `%` and `_`
* characters to match multiple variations. Including `%`
* in the pattern matches zero or more characters, and including
* `_` will match a single character.
*
* ## Examples
*
* ```ts
* // Select all cars that don't have "ROver" in their name.
* db.select().from(cars)
* .where(notLike(cars.name, '%Rover%'))
* ```
*
* @see like for the inverse condition
* @see notIlike for a case-insensitive version of this condition
*/
notLike?: string;
/**
* Case-insensitively compare a column to a pattern,
* which can include `%` and `_`
* characters to match multiple variations. Including `%`
* in the pattern matches zero or more characters, and including
* `_` will match a single character.
*
* Unlike like, this performs a case-insensitive comparison.
*
* ## Examples
*
* ```ts
* // Select all cars with 'Turbo' in their names.
* db.select().from(cars)
* .where(ilike(cars.name, '%Turbo%'))
* ```
*
* @see like for a case-sensitive version of this condition
*/
ilike?: string;
/**
* The inverse of ilike - this case-insensitively tests that a given column
* does not match a pattern, which can include `%` and `_`
* characters to match multiple variations. Including `%`
* in the pattern matches zero or more characters, and including
* `_` will match a single character.
*
* ## Examples
*
* ```ts
* // Select all cars that don't have "Rover" in their name.
* db.select().from(cars)
* .where(notLike(cars.name, '%Rover%'))
* ```
*
* @see ilike for the inverse condition
* @see notLike for a case-sensitive version of this condition
*/
notIlike?: string;
/**
* Test that a column or expression contains all elements of
* the list passed as the second argument.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select posts where its tags contain "Typescript" and "ORM".
* db.select().from(posts)
* .where(arrayContains(posts.tags, ['Typescript', 'ORM']))
* ```
*
* @see arrayContained to find if an array contains all elements of a column or expression
* @see arrayOverlaps to find if a column or expression contains any elements of an array
*/
arrayContains?: TValue;
/**
* Test that the list passed as the second argument contains
* all elements of a column or expression.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select posts where its tags contain "Typescript", "ORM" or both,
* // but filtering posts that have additional tags.
* db.select().from(posts)
* .where(arrayContained(posts.tags, ['Typescript', 'ORM']))
* ```
*
* @see arrayContains to find if a column or expression contains all elements of an array
* @see arrayOverlaps to find if a column or expression contains any elements of an array
*/
arrayContained?: TValue;
/**
* Test that a column or expression contains any elements of
* the list passed as the second argument.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select posts where its tags contain "Typescript", "ORM" or both.
* db.select().from(posts)
* .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM']))
* ```
*
* @see arrayContains to find if a column or expression contains all elements of an array
* @see arrayContained to find if an array contains all elements of a column or expression
*/
arrayOverlaps?: TValue;
}
//#endregion
//#region src/interfaces/PgQueryWhere.d.ts
type PgQueryWhere<T extends TObject, Relations extends PgRelationMap<TObject> | undefined = undefined> = (PgQueryWhereOperators<T> & PgQueryWhereConditions<T>) | (PgQueryWhereRelations<Relations> & PgQueryWhereOperators<T> & PgQueryWhereConditions<T, Relations>);
type PgQueryWhereOrSQL<T extends TObject, Relations extends PgRelationMap<TObject> | undefined = undefined> = SQLWrapper | PgQueryWhere<T, Relations>;
type PgQueryWhereOperators<T extends TObject> = { [Key in keyof Static<T>]?: FilterOperators<Static<T>[Key]> | Static<T>[Key] | (Static<T>[Key] extends object ? NestedJsonbQuery<Static<T>[Key]> : never) };
type PgQueryWhereConditions<T extends TObject, Relations extends PgRelationMap<TObject> | undefined = undefined> = {
/**
* Combine a list of conditions with the `and` operator. Conditions
* that are equal `undefined` are automatically ignored.
*
* ## Examples
*
* ```ts
* db.select().from(cars)
* .where(
* and(
* eq(cars.make, 'Volvo'),
* eq(cars.year, 1950),
* )
* )
* ```
*/
and?: Array<PgQueryWhereOrSQL<T, Relations>>;
/**
* Combine a list of conditions with the `or` operator. Conditions
* that are equal `undefined` are automatically ignored.
*
* ## Examples
*
* ```ts
* db.select().from(cars)
* .where(
* or(
* eq(cars.make, 'GM'),
* eq(cars.make, 'Ford'),
* )
* )
* ```
*/
or?: Array<PgQueryWhereOrSQL<T, Relations>>;
/**
* Negate the meaning of an expression using the `not` keyword.
*
* ## Examples
*
* ```ts
* // Select cars _not_ made by GM or Ford.
* db.select().from(cars)
* .where(not(inArray(cars.make, ['GM', 'Ford'])))
* ```
*/
not?: PgQueryWhereOrSQL<T, Relations>;
/**
* Test whether a subquery evaluates to have any rows.
*
* ## Examples
*
* ```ts
* // Users whose `homeCity` column has a match in a cities
* // table.
* db
* .select()
* .from(users)
* .where(
* exists(db.select()
* .from(cities)
* .where(eq(users.homeCity, cities.id))),
* );
* ```
*
* @see notExists for the inverse of this test
*/
exists?: SQLWrapper;
};
type PgQueryWhereRelations<Relations extends PgRelationMap<TObject> | undefined = undefined> = Relations extends PgRelationMap<TObject> ? { [K in keyof Relations]?: PgQueryWhere<Relations[K]["join"]["schema"], Relations[K]["with"]> } : {};
/**
* Recursively allow nested queries for JSONB object/array types
*/
type NestedJsonbQuery<T> = T extends object ? T extends Array<infer U> ? U extends object ? { [K in keyof U]?: FilterOperators<U[K]> | U[K] } : FilterOperators<U> | U : { [K in keyof T]?: FilterOperators<T[K]> | T[K] | (T[K] extends object ? NestedJsonbQuery<T[K]> : never) } : FilterOperators<T> | T;
//#endregion
//#region src/interfaces/PgQuery.d.ts
/**
* Order direction for sorting
*/
type OrderDirection = "asc" | "desc";
/**
* Single order by clause with column and direction
*/
interface OrderByClause<T> {
column: keyof T;
direction?: OrderDirection;
}
/**
* Order by parameter - supports 3 modes:
* 1. String: orderBy: "name" (defaults to ASC)
* 2. Single object: orderBy: { column: "name", direction: "desc" }
* 3. Array: orderBy: [{ column: "name", direction: "asc" }, { column: "age", direction: "desc" }]
*/
type OrderBy<T> = keyof T | OrderByClause<T> | Array<OrderByClause<T>>;
/**
* Generic query interface for PostgreSQL entities
*/
interface PgQuery<T extends TObject = TObject> {
distinct?: (keyof Static<T>)[];
columns?: (keyof Static<T>)[];
where?: PgQueryWhereOrSQL<T>;
limit?: number;
offset?: number;
orderBy?: OrderBy<Static<T>>;
groupBy?: (keyof Static<T>)[];
}
type PgStatic<T extends TObject, Relations extends PgRelationMap<T>> = Static<T> & { [K in keyof Relations]: Static<Relations[K]["join"]["schema"]> & (Relations[K]["with"] extends PgRelationMap<TObject> ? PgStatic<Relations[K]["join"]["schema"], Relations[K]["with"]> : {}) };
interface PgQueryRelations<T extends TObject = TObject, Relations extends PgRelationMap<T> | undefined = undefined> extends PgQuery<T> {
with?: Relations;
where?: PgQueryWhereOrSQL<T, Relations>;
}
type PgRelationMap<Base extends TObject> = Record<string, PgRelation<Base>>;
type PgRelation<Base extends TObject> = {
type?: "left" | "inner" | "right";
join: {
schema: TObject;
name: string;
};
on: SQLWrapper | [keyof Static<Base>, {
name: string;
}];
with?: PgRelationMap<TObject>;
};
//#endregion
//#region src/descriptors/$sequence.d.ts
/**
* Creates a PostgreSQL sequence descriptor for generating unique numeric values.
*/
declare const $sequence: {
(options?: SequenceDescriptorOptions): SequenceDescriptor;
[KIND]: typeof SequenceDescriptor;
};
interface SequenceDescriptorOptions extends PgSequenceOptions {
/**
* The name of the sequence. If not provided, the property key will be used.
*/
name?: string;
provider?: DatabaseProvider;
}
declare class SequenceDescriptor extends Descriptor<SequenceDescriptorOptions> {
readonly provider: DatabaseProvider;
onInit(): void;
get name(): string;
next(): Promise<number>;
current(): Promise<number>;
protected $provider(): DatabaseProvider;
}
//#endregion
//#region src/services/ModelBuilder.d.ts
/**
* Database-specific table configuration functions
*/
interface TableConfigBuilders<TConfig> {
index: (name: string) => {
on: (...columns: any[]) => TConfig;
};
uniqueIndex: (name: string) => {
on: (...columns: any[]) => TConfig;
};
unique: (name: string) => {
on: (...columns: any[]) => TConfig;
};
check: (name: string, sql: SQL) => TConfig;
foreignKey: (config: {
name: string;
columns: any[];
foreignColumns: any[];
}) => TConfig;
}
/**
* Abstract base class for transforming Alepha Descriptors (Entity, Sequence, etc...)
* into drizzle models (tables, enums, sequences, etc...).
*/
declare abstract class ModelBuilder {
/**
* Build a table from an entity descriptor.
*/
abstract buildTable(entity: EntityDescriptor, options: {
tables: Map<string, unknown>;
enums: Map<string, unknown>;
schema: string;
}): void;
/**
* Build a sequence from a sequence descriptor.
*/
abstract buildSequence(sequence: SequenceDescriptor, options: {
sequences: Map<string, unknown>;
schema: string;
}): void;
/**
* Convert camelCase to snake_case for column names.
*/
protected toColumnName(str: string): string;
/**
* Build the table configuration function for any database.
* This includes indexes, foreign keys, constraints, and custom config.
*
* @param entity - The entity descriptor
* @param builders - Database-specific builder functions
* @param tableResolver - Function to resolve entity references to table columns
* @param customConfigHandler - Optional handler for custom config
*/
protected buildTableConfig<TConfig, TSelf>(entity: EntityDescriptor, builders: TableConfigBuilders<TConfig>, tableResolver?: (entityName: string) => any, customConfigHandler?: (config: any, self: TSelf) => TConfig[]): ((self: TSelf) => TConfig[]) | undefined;
}
//#endregion
//#region src/providers/drivers/DatabaseProvider.d.ts
type SQLLike = SQLWrapper | string;
declare abstract class DatabaseProvider {
protected readonly alepha: Alepha;
protected abstract readonly builder: ModelBuilder;
abstract readonly db: PgDatabase<any>;
abstract readonly dialect: "postgres" | "sqlite";
readonly enums: Map<string, unknown>;
readonly tables: Map<string, unknown>;
readonly sequences: Map<string, unknown>;
table<T extends TObject>(entity: EntityDescriptor<T>): PgTableWithColumns<SchemaToTableConfig<T>>;
get schema(): string;
registerEntity(entity: EntityDescriptor): void;
registerSequence(sequence: SequenceDescriptor): void;
abstract execute(statement: SQLLike): Promise<Record<string, unknown>[]>;
run<T extends TObject>(statement: SQLLike, schema: T): Promise<Array<Static<T>>>;
}
//#endregion
//#region src/schemas/pageQuerySchema.d.ts
declare const pageQuerySchema: typebox8.TObject<{
page: typebox8.TOptional<typebox8.TInteger>;
size: typebox8.TOptional<typebox8.TInteger>;
sort: typebox8.TOptional<typebox8.TString>;
}>;
type PageQuery = Static<typeof pageQuerySchema>;
//#endregion
//#region src/schemas/pageSchema.d.ts
/**
* Create a pagination schema for the given object schema.
*
* > use `pg.page` directly.
*
* @example
* ```ts
* const userSchema = t.object({ id: t.int(), name: t.text() });
* const userPageSchema = pageSchema(userSchema);
* ```
*
* @see {@link $repository#paginate}
*/
declare const pageSchema: <T extends TObject | TRecord>(objectSchema: T, options?: TObjectOptions) => TPage<T>;
type TPage<T extends TObject | TRecord> = TObject<{
content: TArray<T>;
can: TObject<{
next: TBoolean;
previous: TBoolean;
}>;
page: TObject<{
number: TInteger;
size: TInteger;
totalElements: TOptionalAdd<TInteger>;
}>;
}>;
/**
* Opinionated type definition for a paginated response.
*
* @example
* ```ts
* const page = {
* content: [ ... ]
* can: {
* next: true,
* previous: false
* },
* page: {
* number: 0,
* size: 10,
* totalElements: 1200
* }
* }
* ```
*/
type Page<T> = {
/**
* Array of items on the current page.
*/
content: T[];
can: {
/**
* Indicates if there is a next page.
*/
next: boolean;
/**
* Indicates if there is a previous page.
*/
previous: boolean;
};
page: {
/**
* Page number, starting from 0.
*/
number: number;
/**
* Number of items per page.
*/
size: number;
/**
* Requires `count: true` in the paginate options.
*/
totalElements?: number;
};
};
//#endregion
//#region src/services/PgJsonQueryManager.d.ts
/**
* Manages JSONB query generation for nested object and array queries in PostgreSQL.
* This class handles complex nested queries using PostgreSQL's JSONB operators.
*/
declare class PgJsonQueryManager {
/**
* Check if a query contains nested JSONB queries.
* A nested query is when the value is an object with operator keys.
*/
hasNestedQuery(where: PgQueryWhere<TObject>): boolean;
/**
* Build a JSONB query condition for nested object queries.
* Supports deep nesting like: { profile: { contact: { email: { eq: "test@example.com" } } } }
*
* @param column The JSONB column
* @param path The path to the nested property (e.g., ['profile', 'contact', 'email'])
* @param operator The filter operator (e.g., { eq: "test@example.com" })
* @returns SQL condition
*/
buildJsonbCondition(column: PgColumn, path: string[], operator: FilterOperators<any>): SQL | undefined;
/**
* Build JSONB array query conditions.
* Supports queries like: { addresses: { city: { eq: "Wonderland" } } }
* which translates to: EXISTS (SELECT 1 FROM jsonb_array_elements(addresses) elem WHERE elem->>'city' = 'Wonderland')
*/
buildJsonbArrayCondition(column: PgColumn, path: string[], arrayPath: string, operator: FilterOperators<any>): SQL | undefined;
/**
* Apply a filter operator to a JSONB value.
*/
private applyOperatorToJsonValue;
/**
* Parse a nested query object and extract the path and operator.
* For example: { profile: { contact: { email: { eq: "test@example.com" } } } }
* Returns: { path: ['profile', 'contact', 'email'], operator: { eq: "test@example.com" } }
*/
parseNestedQuery(nestedQuery: any, currentPath?: string[]): Array<{
path: string[];
operator: FilterOperators<any>;
}>;
/**
* Determine if a property is a JSONB column based on the schema.
* A column is JSONB if it's defined as an object or array in the TypeBox schema.
*/
isJsonbColumn(schema: TObject, columnName: string): boolean;
/**
* Check if an array property contains primitive types (string, number, boolean, etc.)
* rather than objects. Primitive arrays should use native Drizzle operators.
* @returns true if the array contains primitives, false if it contains objects
*/
isPrimitiveArray(schema: TObject, columnName: string): boolean;
/**
* Check if a nested path points to an array property.
*/
isArrayProperty(schema: TObject, path: string[]): boolean;
}
//#endregion
//#region src/services/PgQueryManager.d.ts
declare class PgQueryManager {
protected readonly jsonQueryManager: PgJsonQueryManager;
protected readonly alepha: Alepha;
/**
* Convert a query object to a SQL query.
*/
toSQL(query: PgQueryWhereOrSQL<TObject>, options: {
schema: TObject;
col: (key: string) => PgColumn;
joins?: PgJoin[];
}): SQL | undefined;
/**
* Build a JSONB query for nested object/array queries.
*/
protected buildJsonbQuery(column: PgColumn, nestedQuery: any, schema: TObject, columnName: string): SQL | undefined;
/**
* Check if an object has any filter operator properties.
*/
protected hasFilterOperatorProperties(obj: any): boolean;
/**
* Map a filter operator to a SQL query.
*/
mapOperatorToSql(operator: FilterOperators<any> | any, column: PgColumn, columnSchema?: TObject, columnName?: string): SQL | undefined;
/**
* Parse pagination sort string to orderBy format.
* Format: "firstName,-lastName" -> [{ column: "firstName", direction: "asc" }, { column: "lastName", direction: "desc" }]
* - Columns separated by comma
* - Prefix with '-' for DESC direction
*
* @param sort Pagination sort string
* @returns OrderBy array or single object
*/
parsePaginationSort(sort: string): Array<{
column: string;
direction: "asc" | "desc";
}> | {
column: string;
direction: "asc" | "desc";
};
/**
* Normalize orderBy parameter to array format.
* Supports 3 modes:
* 1. String: "name" -> [{ column: "name", direction: "asc" }]
* 2. Object: { column: "name", direction: "desc" } -> [{ column: "name", direction: "desc" }]
* 3. Array: [{ column: "name" }, { column: "age", direction: "desc" }] -> normalized array
*
* @param orderBy The orderBy parameter
* @returns Normalized array of order by clauses
*/
normalizeOrderBy(orderBy: any): Array<{
column: string;
direction: "asc" | "desc";
}>;
/**
* Create a pagination object.
*
* @param entities The entities to paginate.
* @param limit The limit of the pagination.
* @param offset The offset of the pagination.
*/
createPagination<T>(entities: T[], limit?: number, offset?: number): Page<T>;
}
interface PgJoin {
table: string;
schema: TObject;
key: string;
col: (key: string) => PgColumn;
parent?: string;
}
//#endregion
//#region src/services/PgRelationManager.d.ts
declare class PgRelationManager {
/**
* Recursively build joins for the query builder based on the relations map
*/
buildJoins(provider: DatabaseProvider, builder: PgSelectBase<any, any, any>, joins: Array<PgJoin>, withRelations: PgRelationMap<TObject>, table: PgTableWithColumns<any>, parentKey?: string): void;
/**
* Map a row with its joined relations based on the joins definition
*/
mapRowWithJoins(record: Record<string, unknown>, row: Record<string, unknown>, schema: TObject, joins: PgJoin[], parentKey?: string): Record<string, unknown>;
/**
* Check if all values in an object are null (indicates a left join with no match)
*/
private isAllNull;
/**
* Build a schema that includes all join properties recursively
*/
buildSchemaWithJoins(baseSchema: TObject, joins: PgJoin[], parentPath?: string): TObject;
}
//#endregion
//#region src/descriptors/$repository.d.ts
/**
* Creates a repository for database operations on a defined entity.
*
* This descriptor provides a comprehensive, type-safe interface for performing all
* database operations on entities defined with $entity. It offers a rich set of
* CRUD operations, advanced querying capabilities, pagination, transactions, and
* built-in support for audit trails and soft deletes.
*/
declare const $repository: {
<T extends TObject>(optionsOrEntity: EntityDescriptor<T> | EntityDescriptorOptions<T> | RepositoryDescriptorOptions<T>): RepositoryDescriptor<T>;
[KIND]: typeof RepositoryDescriptor;
};
interface RepositoryDescriptorOptions<T extends TObject> {
/**
* The entity table definition created with $entity.
*
* This table:
* - Must be created using the $entity descriptor
* - Defines the schema, indexes, and constraints for the repository
* - Provides type information for all repository operations
* - Must include exactly one primary key field
*
* The repository will automatically:
* - Generate typed CRUD operations based on the entity schema
* - Handle audit fields like createdAt, updatedAt, deletedAt
* - Support optimistic locking if version field is present
* - Provide soft delete functionality if deletedAt field exists
*
* **Entity Requirements**:
* - Must have been created with $entity descriptor
* - Schema must include a primary key field marked with `pg.primaryKey()`
* - Corresponding database table must exist (created via migrations)
*
* @example
* ```ts
* const User = $entity({
* name: "users",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* email: t.text({ format: "email" }),
* name: t.text()
* })
* });
*
* const userRepository = $repository({ table: User });
* ```
*/
entity: EntityDescriptor<T>;
/**
* Override the default PostgreSQL database provider.
*
* By default, the repository will use the injected PostgresProvider from the
* dependency injection container. Use this option to:
* - Connect to a different database
* - Use a specific connection pool
* - Implement custom database behavior
* - Support multi-tenant architectures with database per tenant
*
* **Common Use Cases**:
* - Multi-database applications
* - Read replicas for query optimization
* - Different databases for different entity types
* - Testing with separate test databases
*
* @default Uses injected PostgresProvider
*
* @example ReadOnlyPostgresProvider
* @example TenantSpecificPostgresProvider
* @example TestDatabaseProvider
*/
provider?: DatabaseProvider;
name?: string;
}
declare class RepositoryDescriptor<T extends TObject = TObject> extends Descriptor<RepositoryDescriptorOptions<T>> {
protected readonly relationManager: PgRelationManager;
protected readonly queryManager: PgQueryManager;
protected readonly dateTimeProvider: DateTimeProvider;
protected readonly alepha: Alepha;
readonly provider: DatabaseProvider;
constructor(args: DescriptorArgs<RepositoryDescriptorOptions<T>>);
/**
* Get the entity descriptor associated with this repository.
*/
get entity(): EntityDescriptor<T>;
/**
* Represents the primary key of the table.
* - Key is the name of the primary key column.
* - Type is the type (TypeBox) of the primary key column.
*
* ID is mandatory. If the table does not have a primary key, it will throw an error.
*/
get id(): {
type: TSchema;
key: keyof T["properties"];
col: PgColumn;
};
/**
* Get Drizzle table object.
*/
get table(): PgTableWithColumns<SchemaToTableConfig<T>>;
/**
* Get SQL table name. (from Drizzle table object)
*/
get tableName(): string;
/**
* Getter for the database connection from the database provider.
*/
protected get db(): PgDatabase<any>;
/**
* Execute a SQL query.
*
* This method allows executing raw SQL queries against the database.
* This is by far the easiest way to run custom queries that are not covered by the repository's built-in methods!
*
* You must use the `sql` tagged template function from Drizzle ORM to create the query. https://orm.drizzle.team/docs/sql
*
* @example
* ```ts
* class App {
* repository = $repository({ ... });
* async getAdults() {
* const users = repository.table; // Drizzle table object
* await repository.query(sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
* // or better
* await repository.query((users) => sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
* }
* }
* ```
*/
query<R extends TObject = T>(query: SQLLike | ((table: PgTableWithColumns<SchemaToTableConfig<T>>, db: PgDatabase<any>) => SQLLike), schema?: R): Promise<Static<R>[]>;
/**
* Map raw database fields to entity fields. (handles column name differences)
*/
protected mapRawFieldsToEntity(row: Record<string, unknown>): any;
/**
* Get a Drizzle column from the table by his name.
*/
protected col(name: keyof StaticEncode<T>): PgColumn;
/**
* Run a transaction.
*/
transaction<T>(transaction: (tx: PgTransaction<any, Record<string, any>, any>) => Promise<T>, config?: PgTransactionConfig): Promise<T>;
/**
* Start a SELECT query on the table.
*/
protected select(opts?: StatementOptions): drizzle_orm_pg_core0.PgSelectBase<string, Record<string, PgColumn<drizzle_orm0.ColumnBaseConfig<drizzle_orm0.ColumnDataType, string>, {}, {}>>, "single", Record<string, "not-null">, false, never, {
[x: string]: unknown;
}[], {
[x: string]: PgColumn<drizzle_orm0.ColumnBaseConfig<drizzle_orm0.ColumnDataType, string>, {}, {}>;
}>;
/**
* Start a SELECT DISTINCT query on the table.
*/
protected selectDistinct(opts?: StatementOptions, columns?: (keyof Static<T>)[]): drizzle_orm_pg_core0.PgSelectBase<string, Record<string, any>, "partial", Record<string, "not-null">, false, never, {
[x: string]: any;
}[], {
[x: string]: any;
}>;
/**
* Start an INSERT query on the table.
*/
protected insert(opts?: StatementOptions): drizzle_orm_pg_core0.PgInsertBuilder<PgTableWithColumns<SchemaToTableConfig<T>>, any, false>;
/**
* Start an UPDATE query on the table.
*/
protected update(opts?: StatementOptions): drizzle_orm_pg_core0.PgUpdateBuilder<PgTableWithColumns<SchemaToTableConfig<T>>, any>;
/**
* Start a DELETE query on the table.
*/
protected delete(opts?: StatementOptions): drizzle_orm_pg_core0.PgDeleteBase<PgTableWithColumns<SchemaToTableConfig<T>>, any, undefined, undefined, false, never>;
/**
* Create a Drizzle `select` query based on a JSON query object.
*
* > This method is the base for `find`, `findOne`, `findById`, and `paginate`.
*/
find<R extends PgRelationMap<T>>(query?: PgQueryRelations<T, R>, opts?: StatementOptions): Promise<PgStatic<T, R>[]>;
/**
* Find a single entity.
*/
findOne<R extends PgRelationMap<T>>(query: Pick<PgQueryRelations<T, R>, "with" | "where">, opts?: StatementOptions): Promise<PgStatic<T, R>>;
/**
* Find entities with pagination.
*
* It uses the same parameters as `find()`, but adds pagination metadata to the response.
*
* > Pagination CAN also do a count query to get the total number of elements.
*/
paginate<R extends PgRelationMap<T>>(pagination?: PageQuery, query?: PgQueryRelations<T, R>, opts?: StatementOptions & {
count?: boolean;
}): Promise<Page<PgStatic<T, R>>>;
/**
* Find an entity by ID.
*
* This is a convenience method for `findOne` with a where clause on the primary key.
* If you need more complex queries, use `findOne` instead.
*/
findById(id: string | number, opts?: StatementOptions): Promise<Static<T>>;
/**
* Helper to create a type-safe query object.
*/
createQuery(): PgQuery<T>;
/**
* Helper to create a type-safe where clause.
*/
createQueryWhere(): PgQueryWhere<T>;
/**
* Create an entity.
*
* @param data The entity to create.
* @param opts The options for creating the entity.
* @returns The ID of the created entity.
*/
create(data: Static<TObjectInsert<T>>, opts?: StatementOptions): Promise<Static<T>>;
/**
* Create many entities.
*
* @param values The entities to create.
* @param opts The statement options.
* @returns The created entities.
*/
createMany(values: Array<Static<TObjectInsert<T>>>, opts?: StatementOptions): Promise<Static<T>[]>;
/**
* Find an entity and update it.
*/
updateOne(where: PgQueryWhereOrSQL<T>, data: Partial<Static<TObjectUpdate<T>>>, opts?: StatementOptions): Promise<Static<T>>;
/**
* Save a given entity.
*
* @example
* ```ts
* const entity = await repository.findById(1);
* entity.name = "New Name"; // update a field
* delete entity.description; // delete a field
* await repository.save(entity);
* ```
*
* Difference with `updateById/updateOne`:
*
* - requires the entity to be fetched first (whole object is expected)
* - check pg.version() if present -> optimistic locking
* - validate entity against schema
* - undefined values will be set to null, not ignored!
*
* @see {@link PostgresTypeProvider#version}
* @see {@link PgVersionMismatchError}
*/
save(entity: Static<T>, opts?: StatementOptions): Promise<void>;
/**
* Find an entity by ID and update it.
*/
updateById(id: string | number, data: Partial<Static<TObjectUpdate<T>>>, opts?: StatementOptions): Promise<Static<T>>;
/**
* Find many entities and update all of them.
*/
updateMany(where: PgQueryWhereOrSQL<T>, data: Partial<Static<TObjectUpdate<T>>>, opts?: StatementOptions): Promise<Array<number | string>>;
/**
* Find many and delete all of them.
* @returns Array of deleted entity IDs
*/
deleteMany(where?: PgQueryWhereOrSQL<T>, opts?: StatementOptions): Promise<Array<number | string>>;
/**
* Delete all entities.
* @returns Array of deleted entity IDs
*/
clear(opts?: StatementOptions): Promise<Array<number | string>>;
/**
* Delete the given entity.
*
* You must fetch the entity first in order to delete it.
* @returns Array containing the deleted entity ID
*/
destroy(entity: Static<T>, opts?: StatementOptions): Promise<Array<number | string>>;
/**
* Find an entity and delete it.
* @returns Array of deleted entity IDs (should contain at most one ID)
*/
deleteOne(where?: PgQueryWhereOrSQL<T>, opts?: StatementOptions): Promise<Array<number | string>>;
/**
* Find an entity by ID and delete it.
* @returns Array containing the deleted entity ID
* @throws PgEntityNotFoundError if the entity is not found
*/
deleteById(id: string | number, opts?: StatementOptions): Promise<Array<number | string>>;
/**
* Count entities.
*/
count(where?: PgQueryWhereOrSQL<T>, opts?: StatementOptions): Promise<number>;
protected conflictMessagePattern: string;
protected handleError(error: unknown, message: string): PgError;
protected withDeletedAt(where: PgQueryWhereOrSQL<T>, opts?: {
force?: boolean;
}): PgQueryWhereOrSQL<T>;
protected deletedAt(): PgAttrField | undefined;
/**
* Convert something to valid Pg Insert Value.
*/
protected cast(data: any, insert: boolean): PgInsertValue<PgTableWithColumns<SchemaToTableConfig<T>>>;
/**
* Transform a row from the database into a clean entity.
*
* - Validate against schema
* - Replace all null values by undefined
* - Fix date-time and date fields to ISO strings
* - Cast BigInt to string
*/
protected clean<T extends TObject>(row: Record<string, unknown>, schema: T): Static<T>;
/**
* Clean a row with joins recursively
*/
protected cleanWithJoins<T extends TObject>(row: Record<string, unknown>, schema: T, joins: PgJoin[], parentPath?: string): Static<T>;
/**
* Convert a where clause to SQL.
*/
protected toSQL(where: PgQueryWhereOrSQL<T>, joins?: PgJoin[]): SQL | undefined;
/**
* Get the where clause for an ID.
*
* @param id The ID to get the where clause for.
* @returns The where clause for the ID.
*/
protected getWhereId(id: string | number): PgQueryWhere<T>;
/**
* Find a primary key in the schema.
*/
protected getPrimaryKey(schema: TObject): {
key: string;
col: PgColumn<drizzle_orm0.ColumnBaseConfig<drizzle_orm0.ColumnDataType, string>, {}, {}>;
type: TSchema;
};
protected $provider(): DatabaseProvider;
}
/**
* The options for a statement.
*/
interface StatementOptions {
/**
* Transaction to use.
*/
tx?: PgTransaction<any, Record<string, any>>;
/**
* Lock strength.
*/
for?: LockStrength | {
config: LockConfig;
strength: LockStrength;
};
/**
* If true, ignore soft delete.
*/
force?: boolean;
/**
* Force the current time.
*/
now?: DateTime;
}
//#endregion
//#region src/descriptors/$transaction.d.ts
/**
* Creates a transaction descriptor for database operations requiring atomicity and consistency.
*
* This descriptor provides a convenient way to wrap database operations in PostgreSQL
* transactions, ensuring ACID properties and automatic retry logic for version conflicts.
* It integrates seamlessly with the repository pattern and provides built-in handling
* for optimistic locking scenarios with automatic retry on version mismatches.
*
* **Important Notes**:
* - All operations within the transaction handler are atomic
* - Automatic retry on `PgVersionMismatchError` for optimistic locking
* - Pass `{ tx }` option to all repository operations within the transaction
* - Transactions are automatically rolled back on any unhandled error
* - Use appropriate isolation levels based on your consistency requirements
*/
declare const $transaction: <T extends any[], R>(opts: TransactionDescriptorOptions<T, R>) => _alepha_retry0.RetryDescriptorFn<(...args: T) => Promise<R>>;
interface TransactionDescriptorOptions<T extends any[], R> {
/**
* Transaction handler function that contains all database operations to be executed atomically.
*
* This function:
* - Receives a transaction object as the first parameter
* - Should pass the transaction to all repository operations via `{ tx }` option
* - All operations within are automatically rolled back if any error occurs
* - Has access to the full Alepha dependency injection container
* - Will be automatically retried if a `PgVersionMismatchError` occurs
*
* **Transaction Guidelines**:
* - Keep transactions as short as possible to minimize lock contention
* - Always pass the `tx` parameter to repository operations
* - Handle expected business errors gracefully
* - Log im