alepha
Version:
Alepha is a convention-driven TypeScript framework for building robust, end-to-end type-safe applications, from serverless APIs to full-stack React apps.
1,585 lines (1,584 loc) • 103 kB
TypeScript
import * as _alepha_core1 from "alepha";
import { Alepha, AlephaError, Descriptor, KIND, Service, Static, TArray, TBigInt, TBoolean, TInteger, TKeysToIndexer, TNull, TNumber, TNumberOptions, TObject, TObjectOptions, TOptional, TOptionalAdd, TPick, TRecord, TSchema as TSchema$1, TString, TStringOptions, TUnion } from "alepha";
import * as drizzle_orm6 from "drizzle-orm";
import { BuildColumns, BuildExtraConfigColumns, SQL, SQLWrapper, TableConfig, sql } from "drizzle-orm";
import * as pg$1 from "drizzle-orm/pg-core";
import { AnyPgColumn, LockConfig, LockStrength, PgColumn, PgColumnBuilderBase, PgDatabase, PgInsertValue, PgSequenceOptions, PgTableExtraConfigValue, PgTableWithColumns, PgTransaction, PgTransactionConfig, SelectedFields, TableConfig as TableConfig$1, UpdateDeleteAction } from "drizzle-orm/pg-core";
import { DateTime, DateTimeProvider } from "alepha/datetime";
import * as _alepha_logger1 from "alepha/logger";
import * as _alepha_lock0 from "alepha/lock";
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
import postgres from "postgres";
import * as _alepha_retry0 from "alepha/retry";
import * as typebox1 from "typebox";
import { PgTransactionConfig as PgTransactionConfig$1 } from "drizzle-orm/pg-core/session";
import * as DrizzleKit from "drizzle-kit/api";
import { MigrationConfig } from "drizzle-orm/migrator";
import { UpdateDeleteAction as UpdateDeleteAction$1 } from "drizzle-orm/pg-core/foreign-keys";
export * from "drizzle-orm/pg-core";
//#region src/constants/PG_SCHEMA.d.ts
declare const PG_SCHEMA: unique symbol;
//#endregion
//#region src/constants/PG_SYMBOLS.d.ts
declare const PG_DEFAULT: unique symbol;
declare const PG_PRIMARY_KEY: unique symbol;
declare const PG_CREATED_AT: unique symbol;
declare const PG_UPDATED_AT: unique symbol;
declare const PG_DELETED_AT: unique symbol;
declare const PG_VERSION: unique symbol;
declare const PG_IDENTITY: unique symbol;
declare const PG_REF: unique symbol;
/**
* @deprecated Use `PG_IDENTITY` instead.
*/
declare const PG_SERIAL: unique symbol;
type PgDefault = typeof PG_DEFAULT;
type PgRef = typeof PG_REF;
type PgPrimaryKey = typeof PG_PRIMARY_KEY;
type PgSymbols = {
[PG_DEFAULT]: {};
[PG_PRIMARY_KEY]: {};
[PG_CREATED_AT]: {};
[PG_UPDATED_AT]: {};
[PG_DELETED_AT]: {};
[PG_VERSION]: {};
[PG_IDENTITY]: PgIdentityOptions;
[PG_REF]: PgRefOptions;
/**
* @deprecated Use `PG_IDENTITY` instead.
*/
[PG_SERIAL]: {};
};
type PgSymbolKeys = keyof PgSymbols;
type PgIdentityOptions = {
mode: "always" | "byDefault";
} & PgSequenceOptions & {
name?: string;
};
interface PgRefOptions {
ref: () => AnyPgColumn;
actions?: {
onUpdate?: UpdateDeleteAction;
onDelete?: UpdateDeleteAction;
};
}
//#endregion
//#region src/schemas/insertSchema.d.ts
/**
* Transforms a TObject schema for insert operations.
* All default properties at the root level are made optional.
*
* @example
* Before: { name: string; age: number(default=0); }
* After: { name: string; age?: number; }
*/
type TObjectInsert<T extends TObject> = TObject<{ [K in keyof T["properties"]]: T["properties"][K] extends {
[PG_DEFAULT]: any;
} | {
"~optional": true;
} ? TOptional<T["properties"][K]> : T["properties"][K] }>;
declare const insertSchema: <T extends TObject>(obj: T) => TObjectInsert<T>;
//#endregion
//#region src/schemas/updateSchema.d.ts
/**
* Transforms a TObject schema for update operations.
* All optional properties at the root level are made nullable (i.e., `T | null`).
* This allows an API endpoint to explicitly accept `null` to clear an optional field in the database.
*
* @example
* Before: { name?: string; age: number; }
* After: { name?: string | null; age: number; }
*/
type TObjectUpdate<T extends TObject> = TObject<{ [K in keyof T["properties"]]: T["properties"][K] extends TOptional<infer U> ? TOptional<TUnion<[U, TNull]>> : T["properties"][K] }>;
//#endregion
//#region src/helpers/schemaToPgColumns.d.ts
/**
* Convert a Typebox Schema to Drizzle ORM Postgres columns
*/
declare const schemaToPgColumns: <T extends TObject>(schema: T) => FromSchema<T>;
/**
* Map a Typebox field to a PG column.
*
* @param name The key of the field.
* @param value The value of the field.
* @returns The PG column.
*/
declare const mapFieldToColumn: (name: string, value: TSchema$1) => pg$1.PgSerialBuilderInitial<string> | pg$1.PgIntegerBuilderInitial<string> | drizzle_orm6.IsIdentity<pg$1.PgBigInt64BuilderInitial<"">, "byDefault"> | drizzle_orm6.IsIdentity<pg$1.PgBigInt64BuilderInitial<"">, "always"> | pg$1.PgBigInt53BuilderInitial<string> | pg$1.PgNumericBuilderInitial<string> | pg$1.PgTimestampBuilderInitial<string> | pg$1.PgUUIDBuilderInitial<string> | pg$1.PgCustomColumnBuilder<{
name: string;
dataType: "custom";
columnType: "PgCustomColumn";
data: Buffer<ArrayBufferLike>;
driverParam: unknown;
enumValues: undefined;
}> | pg$1.PgTimestampStringBuilderInitial<string> | pg$1.PgDateStringBuilderInitial<string> | pg$1.PgTextBuilderInitial<string, [string, ...string[]]> | pg$1.PgBooleanBuilderInitial<string> | drizzle_orm6.$Type<pg$1.PgCustomColumnBuilder<{
name: string;
dataType: "custom";
columnType: "PgCustomColumn";
data: {
[x: string]: unknown;
[x: number]: unknown;
[x: symbol]: unknown;
};
driverParam: string;
enumValues: undefined;
}>, {
[x: string]: unknown;
[x: number]: unknown;
[x: symbol]: unknown;
}> | drizzle_orm6.$Type<pg$1.PgCustomColumnBuilder<{
name: string;
dataType: "custom";
columnType: "PgCustomColumn";
data: unknown[];
driverParam: string;
enumValues: undefined;
}>, unknown[]> | pg$1.PgArrayBuilder<{
name: string;
dataType: "array";
columnType: "PgArray";
data: string[];
driverParam: string | string[];
enumValues: [string, ...string[]];
size: undefined;
baseBuilder: {
name: string;
dataType: "string";
columnType: "PgText";
data: string;
enumValues: [string, ...string[]];
driverParam: string;
};
}, {
name: string;
dataType: "string";
columnType: "PgText";
data: string;
enumValues: [string, ...string[]];
driverParam: string;
}> | pg$1.PgArrayBuilder<{
name: string;
dataType: "array";
columnType: "PgArray";
data: number[];
driverParam: string | (string | number)[];
enumValues: undefined;
size: undefined;
baseBuilder: {
name: string;
dataType: "number";
columnType: "PgInteger";
data: number;
driverParam: number | string;
enumValues: undefined;
};
}, {
name: string;
dataType: "number";
columnType: "PgInteger";
data: number;
driverParam: number | string;
enumValues: undefined;
}> | pg$1.PgArrayBuilder<{
name: string;
dataType: "array";
columnType: "PgArray";
data: string[];
driverParam: string | string[];
enumValues: undefined;
size: undefined;
baseBuilder: {
name: string;
dataType: "string";
columnType: "PgNumeric";
data: string;
driverParam: string;
enumValues: undefined;
};
}, {
name: string;
dataType: "string";
columnType: "PgNumeric";
data: string;
driverParam: string;
enumValues: undefined;
}> | pg$1.PgArrayBuilder<{
name: string;
dataType: "array";
columnType: "PgArray";
data: boolean[];
driverParam: string | boolean[];
enumValues: undefined;
size: undefined;
baseBuilder: {
name: string;
dataType: "boolean";
columnType: "PgBoolean";
data: boolean;
driverParam: boolean;
enumValues: undefined;
};
}, {
name: string;
dataType: "boolean";
columnType: "PgBoolean";
data: boolean;
driverParam: boolean;
enumValues: undefined;
}>;
/**
* Map a string to a PG column.
*
* @param key The key of the field.
* @param value The value of the field.
*/
declare const mapStringToColumn: (key: string, value: TSchema$1) => pg$1.PgUUIDBuilderInitial<string> | pg$1.PgCustomColumnBuilder<{
name: string;
dataType: "custom";
columnType: "PgCustomColumn";
data: Buffer<ArrayBufferLike>;
driverParam: unknown;
enumValues: undefined;
}> | pg$1.PgTimestampStringBuilderInitial<string> | pg$1.PgDateStringBuilderInitial<string> | pg$1.PgTextBuilderInitial<string, [string, ...string[]]>;
declare const camelToSnakeCase: (str: string) => string;
/**
* Convert a schema to columns.
*/
type FromSchema<T extends TObject> = { [key in keyof T["properties"]]: PgColumnBuilderBase };
/**
* A table with columns and schema.
*/
type PgTableWithColumnsAndSchema<T extends TableConfig, R extends TObject> = PgTableWithColumns<T> & {
get $table(): PgTableWithColumns<T>;
get $schema(): R;
get $insertSchema(): TObjectInsert<R>;
get $updateSchema(): TObjectUpdate<R>;
};
//#endregion
//#region src/descriptors/$entity.d.ts
/**
* Creates a database entity descriptor that defines table structure using TypeBox schemas.
*
* This descriptor provides a type-safe way to define database tables using JSON Schema
* syntax while generating the necessary database metadata for migrations and operations.
* It integrates with Drizzle ORM under the hood and works seamlessly with the $repository
* descriptor for complete database functionality.
*
* **Key Features**
*
* - **Type-Safe Schema Definition**: Uses TypeBox for full TypeScript type inference
* - **Automatic Table Generation**: Creates Drizzle ORM table structures automatically
* - **Index Management**: Supports single-column, multi-column, and unique indexes
* - **Constraint Support**: Foreign keys, unique constraints, and check constraints
* - **Audit Fields**: Built-in support for created_at, updated_at, deleted_at, and version fields
* - **Schema Validation**: Automatic insert/update schema generation with validation
*
* **Important Note**:
* This descriptor only defines the table structure - it does not create the physical
* database table. Use it with $repository to perform actual database operations,
* and run migrations to create the tables in your database.
*
* **Use Cases**
*
* Essential for defining database schema in type-safe applications:
* - User management and authentication tables
* - Business domain entities (products, orders, customers)
* - Audit and logging tables
* - Junction tables for many-to-many relationships
* - Configuration and settings tables
*
* @example
* **Basic entity with indexes:**
* ```ts
* import { $entity } from "alepha/postgres";
* import { pg, t } from "alepha";
*
* const User = $entity({
* name: "users",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* email: t.string({ format: "email" }),
* username: t.string({ minLength: 3, maxLength: 30 }),
* firstName: t.string(),
* lastName: t.string(),
* isActive: t.boolean({ default: true }),
* createdAt: pg.createdAt(),
* updatedAt: pg.updatedAt(),
* deletedAt: pg.deletedAt()
* }),
* indexes: [
* "email", // Simple index on email
* "username", // Simple index on username
* { column: "email", unique: true }, // Unique constraint on email
* { columns: ["firstName", "lastName"] } // Composite index
* ]
* });
* ```
*
* @example
* **E-commerce product entity with relationships:**
* ```ts
* const Product = $entity({
* name: "products",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* sku: t.string({ minLength: 3 }),
* name: t.string({ minLength: 1, maxLength: 200 }),
* description: t.optional(t.string()),
* price: t.number({ minimum: 0 }),
* categoryId: t.string({ format: "uuid" }),
* inStock: t.boolean({ default: true }),
* stockQuantity: t.integer({ minimum: 0, default: 0 }),
* tags: t.optional(t.array(t.string())), // PostgreSQL array column
* metadata: t.optional(t.record(t.string(), t.any())), // JSONB column
* version: pg.version(),
* createdAt: pg.createdAt(),
* updatedAt: pg.updatedAt()
* }),
* indexes: [
* { column: "sku", unique: true }, // Unique SKU
* "categoryId", // Foreign key index
* "inStock", // Filter frequently by stock status
* { columns: ["categoryId", "inStock"] }, // Composite for category + stock queries
* "createdAt" // For date-based queries
* ],
* foreignKeys: [
* {
* name: "fk_product_category",
* columns: ["categoryId"],
* foreignColumns: [Category.id] // Reference to Category entity
* }
* ]
* });
* ```
*
* @example
* **Audit log entity with constraints:**
* ```ts
* const AuditLog = $entity({
* name: "audit_logs",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* tableName: t.string(),
* recordId: t.string(),
* action: t.enum(["CREATE", "UPDATE", "DELETE"]),
* userId: t.optional(t.string({ format: "uuid" })),
* oldValues: t.optional(t.record(t.string(), t.any())),
* newValues: t.optional(t.record(t.string(), t.any())),
* timestamp: pg.createdAt(),
* ipAddress: t.optional(t.string()),
* userAgent: t.optional(t.string())
* }),
* indexes: [
* "tableName",
* "recordId",
* "userId",
* "action",
* { columns: ["tableName", "recordId"] }, // Find all changes to a record
* { columns: ["userId", "timestamp"] }, // User activity timeline
* "timestamp" // Time-based queries
* ],
* constraints: [
* {
* name: "valid_action_values",
* columns: ["action"],
* check: sql`action IN ('CREATE', 'UPDATE', 'DELETE')`
* }
* ]
* });
* ```
*
* @example
* **Many-to-many junction table:**
* ```ts
* const UserRole = $entity({
* name: "user_roles",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* userId: t.string({ format: "uuid" }),
* roleId: t.string({ format: "uuid" }),
* assignedBy: t.string({ format: "uuid" }),
* assignedAt: pg.createdAt(),
* expiresAt: t.optional(t.datetime())
* }),
* indexes: [
* "userId",
* "roleId",
* "assignedBy",
* { columns: ["userId", "roleId"], unique: true }, // Prevent duplicate assignments
* "expiresAt" // For cleanup of expired roles
* ],
* foreignKeys: [
* {
* columns: ["userId"],
* foreignColumns: [User.id]
* },
* {
* columns: ["roleId"],
* foreignColumns: [Role.id]
* },
* {
* columns: ["assignedBy"],
* foreignColumns: [User.id]
* }
* ]
* });
* ```
*
* @example
* **Entity with custom Drizzle configuration:**
* ```ts
* const Order = $entity({
* name: "orders",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* orderNumber: t.string(),
* customerId: t.string({ format: "uuid" }),
* status: t.enum(["pending", "processing", "shipped", "delivered"]),
* totalAmount: t.number({ minimum: 0 }),
* currency: t.string({ default: "USD" }),
* notes: t.optional(t.string()),
* createdAt: pg.createdAt(),
* updatedAt: pg.updatedAt(),
* version: pg.version()
* }),
* indexes: [
* { column: "orderNumber", unique: true },
* "customerId",
* "status",
* "createdAt",
* { columns: ["customerId", "status"] }
* ],
* // Advanced Drizzle ORM configuration
* config: (table) => [
* // Custom index with specific options
* index("idx_orders_amount_status")
* .on(table.totalAmount, table.status)
* .where(sql`status != 'cancelled'`), // Partial index
*
* // Full-text search index (PostgreSQL specific)
* index("idx_orders_search")
* .using("gin", table.notes)
* ]
* });
* ```
*
* @stability 2
*/
declare const $entity: {
<TTableName extends string, TSchema extends TObject, TColumnsMap extends FromSchema<TSchema>>(options: EntityDescriptorOptions<TTableName, TSchema>): PgTableWithColumnsAndSchema<PgTableConfig<TTableName, TSchema, TColumnsMap>, TSchema>;
[KIND]: string;
};
interface EntityDescriptorOptions<TTableName extends string, T extends TObject, Keys = keyof Static<T>> {
/**
* The database table name that will be created for this entity.
*
* This name:
* - Must be unique within your database schema
* - Should follow your database naming conventions (typically snake_case)
* - Will be used in generated SQL queries and migrations
* - Should be descriptive of the entity's purpose
*
* **Naming Guidelines**:
* - Use plural nouns for table names ("users", "products", "orders")
* - Use snake_case for multi-word names ("user_profiles", "order_items")
* - Keep names concise but descriptive
* - Avoid SQL reserved words
*
* @example "users"
* @example "product_categories"
* @example "user_roles"
* @example "audit_logs"
*/
name: TTableName;
/**
* TypeBox schema defining the table structure and column types.
*
* This schema:
* - Defines all table columns with their types and constraints
* - Provides full TypeScript type inference for the entity
* - Supports validation rules and default values
* - Enables automatic insert/update schema generation
* - Must include exactly one primary key field marked with `pg.primaryKey()`
*
* **Supported PostgreSQL Types**:
* - `pg.primaryKey(t.uuid())` - UUID primary key
* - `t.string()` - VARCHAR column
* - `t.integer()`, `t.number()` - Numeric columns
* - `t.boolean()` - Boolean column
* - `t.array(t.string())` - PostgreSQL array column
* - `t.record(t.string(), t.any())` - JSONB column
* - `pg.createdAt()`, `pg.updatedAt()`, `pg.deletedAt()` - Audit timestamps
* - `pg.version()` - Optimistic locking version field
*
* **Schema Best Practices**:
* - Always include a primary key
* - Use appropriate TypeBox constraints (minLength, format, etc.)
* - Add audit fields for trackability
* - Use optional fields for nullable columns
* - Include foreign key columns for relationships
*
* @example
* ```ts
* t.object({
* id: pg.primaryKey(t.uuid()),
* email: t.string({ format: "email" }),
* firstName: t.string({ minLength: 1, maxLength: 100 }),
* lastName: t.string({ minLength: 1, maxLength: 100 }),
* age: t.optional(t.integer({ minimum: 0, maximum: 150 })),
* isActive: t.boolean({ default: true }),
* preferences: t.optional(t.record(t.string(), t.any())),
* tags: t.optional(t.array(t.string())),
* createdAt: pg.createdAt(),
* updatedAt: pg.updatedAt(),
* version: pg.version()
* })
* ```
*/
schema: T;
/**
* Database indexes to create for query optimization.
*
* Indexes improve query performance but consume disk space and slow down writes.
* Choose indexes based on your actual query patterns and performance requirements.
*
* **Index Types**:
* - **Simple string**: Creates a single-column index
* - **Single column object**: Creates index on one column with options
* - **Multi-column object**: Creates composite index on multiple columns
*
* **Index Guidelines**:
* - Index frequently queried columns (WHERE, ORDER BY, JOIN conditions)
* - Create unique indexes for business constraints
* - Use composite indexes for multi-column queries
* - Index foreign key columns for join performance
* - Monitor index usage and remove unused indexes
*
* **Performance Considerations**:
* - Each index increases storage requirements
* - Indexes slow down INSERT/UPDATE/DELETE operations
* - PostgreSQL can use multiple indexes in complex queries
* - Partial indexes can be more efficient for filtered queries
*
* @example ["email", "createdAt", { column: "username", unique: true }]
* @example [{ columns: ["userId", "status"], name: "idx_user_status" }]
* @example ["categoryId", { columns: ["price", "inStock"] }]
*/
indexes?: (Keys | {
/**
* Single column to index.
*/
column: Keys;
/**
* Whether this should be a unique index (enforces uniqueness constraint).
*/
unique?: boolean;
/**
* Custom name for the index. If not provided, generates name automatically.
*/
name?: string;
} | {
/**
* Multiple columns for composite index (order matters for query optimization).
*/
columns: Keys[];
/**
* Whether this should be a unique index (enforces uniqueness constraint).
*/
unique?: boolean;
/**
* Custom name for the index. If not provided, generates name automatically.
*/
name?: string;
})[];
/**
* Foreign key constraints to maintain referential integrity.
*
* Foreign keys ensure that values in specified columns must exist in the referenced table.
* They prevent orphaned records and maintain database consistency.
*
* **Foreign Key Benefits**:
* - Prevents invalid references to non-existent records
* - Maintains data integrity automatically
* - Provides clear schema documentation of relationships
* - Enables cascade operations (DELETE, UPDATE)
*
* **Considerations**:
* - Foreign keys can impact performance on large tables
* - They prevent deletion of referenced records
* - Consider cascade options for related data cleanup
*
* @example
* ```ts
* foreignKeys: [
* {
* name: "fk_user_role",
* columns: ["roleId"],
* foreignColumns: [Role.id]
* },
* {
* columns: ["createdBy"],
* foreignColumns: [User.id]
* }
* ]
* ```
*/
foreignKeys?: Array<{
/**
* Optional name for the foreign key constraint.
*/
name?: string;
/**
* Local columns that reference the foreign table.
*/
columns: Array<keyof Static<T>>;
/**
* Referenced columns in the foreign table.
*/
foreignColumns: Array<AnyPgColumn>;
}>;
/**
* Additional table constraints for data validation.
*
* Constraints enforce business rules at the database level, providing
* an additional layer of data integrity beyond application validation.
*
* **Constraint Types**:
* - **Unique constraints**: Prevent duplicate values across columns
* - **Check constraints**: Enforce custom validation rules with SQL expressions
*
* **Use Cases**:
* - Enforce unique combinations of columns
* - Validate value ranges or patterns
* - Ensure consistent data states
* - Implement business rule validation
*
* @example
* ```ts
* constraints: [
* {
* name: "unique_user_email",
* columns: ["email"],
* unique: true
* },
* {
* name: "valid_age_range",
* columns: ["age"],
* check: sql`age >= 0 AND age <= 150`
* },
* {
* name: "unique_user_username_per_tenant",
* columns: ["tenantId", "username"],
* unique: true
* }
* ]
* ```
*/
constraints?: Array<{
/**
* Columns involved in this constraint.
*/
columns: Array<keyof Static<T>>;
/**
* Optional name for the constraint.
*/
name?: string;
/**
* Whether this is a unique constraint.
*/
unique?: boolean | {};
/**
* SQL expression for check constraint validation.
*/
check?: SQL;
}>;
/**
* Advanced Drizzle ORM configuration for complex table setups.
*
* This allows you to use advanced Drizzle ORM features that aren't covered
* by the simplified options above. Use this for:
* - Custom index types (GIN, GIST, etc.)
* - Partial indexes with WHERE clauses
* - Advanced constraint configurations
* - PostgreSQL-specific features
*
* **When to Use**:
* - Need PostgreSQL-specific index types
* - Require partial indexes for performance
* - Want fine-grained control over table creation
* - Using advanced PostgreSQL features
*
* See Drizzle ORM documentation for complete configuration options.
*
* @param self - The table columns available for configuration
* @returns Array of Drizzle table configuration objects
*
* @example
* ```ts
* config: (table) => [
* // Partial index for active users only
* index("idx_active_users_email")
* .on(table.email)
* .where(sql`is_active = true`),
*
* // GIN index for full-text search
* index("idx_content_search")
* .using("gin", table.searchVector),
*
* // Unique constraint with custom options
* uniqueIndex("idx_unique_slug_per_tenant")
* .on(table.tenantId, table.slug)
* ]
* ```
*/
config?: (self: BuildExtraConfigColumns<string, FromSchema<T>, "pg">) => PgTableExtraConfigValue[];
}
type Entity<T extends TObject> = PgTableWithColumnsAndSchema<PgTableConfig<string, T, FromSchema<T>>, T>;
type PgTableConfig<TTableName extends string, TSchema extends TObject, TColumnsMap extends FromSchema<TSchema>> = {
name: TTableName;
schema: any;
columns: BuildColumns<TTableName, TColumnsMap, "pg">;
dialect: "pg";
};
//#endregion
//#region src/errors/PgError.d.ts
declare class PgError extends AlephaError {
name: string;
constructor(message: string, cause?: unknown);
}
//#endregion
//#region src/helpers/pgAttr.d.ts
/**
* Type representation.
*/
type PgAttr<T extends TSchema$1, TAttr extends PgSymbolKeys> = T & { [K in TAttr]: PgSymbols[K] };
interface PgAttrField {
key: string;
type: TSchema$1;
data: any;
nested?: any[];
}
//#endregion
//#region src/interfaces/FilterOperators.d.ts
interface FilterOperators<TValue> {
/**
* Test that two values are equal.
*
* Remember that the SQL standard dictates that
* two NULL values are not equal, so if you want to test
* whether a value is null, you may want to use
* `isNull` instead.
*
* ## Examples
*
* ```ts
* // Select cars made by Ford
* db.select().from(cars)
* .where(eq(cars.make, 'Ford'))
* ```
*
* @see isNull for a way to test equality to NULL.
*/
eq?: TValue;
/**
* Test that two values are not equal.
*
* Remember that the SQL standard dictates that
* two NULL values are not equal, so if you want to test
* whether a value is not null, you may want to use
* `isNotNull` instead.
*
* ## Examples
*
* ```ts
* // Select cars not made by Ford
* db.select().from(cars)
* .where(ne(cars.make, 'Ford'))
* ```
*
* @see isNotNull for a way to test whether a value is not null.
*/
ne?: TValue;
/**
* Test that the first expression passed is greater than
* the second expression.
*
* ## Examples
*
* ```ts
* // Select cars made after 2000.
* db.select().from(cars)
* .where(gt(cars.year, 2000))
* ```
*
* @see gte for greater-than-or-equal
*/
gt?: TValue;
/**
* Test that the first expression passed is greater than
* or equal to the second expression. Use `gt` to
* test whether an expression is strictly greater
* than another.
*
* ## Examples
*
* ```ts
* // Select cars made on or after 2000.
* db.select().from(cars)
* .where(gte(cars.year, 2000))
* ```
*
* @see gt for a strictly greater-than condition
*/
gte?: TValue;
/**
* Test that the first expression passed is less than
* the second expression.
*
* ## Examples
*
* ```ts
* // Select cars made before 2000.
* db.select().from(cars)
* .where(lt(cars.year, 2000))
* ```
*
* @see lte for greater-than-or-equal
*/
lt?: TValue;
/**
* Test that the first expression passed is less than
* or equal to the second expression.
*
* ## Examples
*
* ```ts
* // Select cars made before 2000.
* db.select().from(cars)
* .where(lte(cars.year, 2000))
* ```
*
* @see lt for a strictly less-than condition
*/
lte?: TValue;
/**
* Test whether the first parameter, a column or expression,
* has a value from a list passed as the second argument.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select cars made by Ford or GM.
* db.select().from(cars)
* .where(inArray(cars.make, ['Ford', 'GM']))
* ```
*
* @see notInArray for the inverse of this test
*/
inArray?: TValue[];
/**
* Test whether the first parameter, a column or expression,
* has a value that is not present in a list passed as the
* second argument.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select cars made by any company except Ford or GM.
* db.select().from(cars)
* .where(notInArray(cars.make, ['Ford', 'GM']))
* ```
*
* @see inArray for the inverse of this test
*/
notInArray?: TValue[];
/**
* Test whether an expression is not NULL. By the SQL standard,
* NULL is neither equal nor not equal to itself, so
* it's recommended to use `isNull` and `notIsNull` for
* comparisons to NULL.
*
* ## Examples
*
* ```ts
* // Select cars that have been discontinued.
* db.select().from(cars)
* .where(isNotNull(cars.discontinuedAt))
* ```
*
* @see isNull for the inverse of this test
*/
isNotNull?: true;
/**
* Test whether an expression is NULL. By the SQL standard,
* NULL is neither equal nor not equal to itself, so
* it's recommended to use `isNull` and `notIsNull` for
* comparisons to NULL.
*
* ## Examples
*
* ```ts
* // Select cars that have no discontinuedAt date.
* db.select().from(cars)
* .where(isNull(cars.discontinuedAt))
* ```
*
* @see isNotNull for the inverse of this test
*/
isNull?: true;
/**
* Test whether an expression is between two values. This
* is an easier way to express range tests, which would be
* expressed mathematically as `x <= a <= y` but in SQL
* would have to be like `a >= x AND a <= y`.
*
* Between is inclusive of the endpoints: if `column`
* is equal to `min` or `max`, it will be TRUE.
*
* ## Examples
*
* ```ts
* // Select cars made between 1990 and 2000
* db.select().from(cars)
* .where(between(cars.year, 1990, 2000))
* ```
*
* @see notBetween for the inverse of this test
*/
between?: [number, number];
/**
* Test whether an expression is not between two values.
*
* This, like `between`, includes its endpoints, so if
* the `column` is equal to `min` or `max`, in this case
* it will evaluate to FALSE.
*
* ## Examples
*
* ```ts
* // Exclude cars made in the 1970s
* db.select().from(cars)
* .where(notBetween(cars.year, 1970, 1979))
* ```
*
* @see between for the inverse of this test
*/
notBetween?: [number, number];
/**
* Compare a column to a pattern, which can include `%` and `_`
* characters to match multiple variations. Including `%`
* in the pattern matches zero or more characters, and including
* `_` will match a single character.
*
* ## Examples
*
* ```ts
* // Select all cars with 'Turbo' in their names.
* db.select().from(cars)
* .where(like(cars.name, '%Turbo%'))
* ```
*
* @see ilike for a case-insensitive version of this condition
*/
like?: string;
/**
* The inverse of like - this tests that a given column
* does not match a pattern, which can include `%` and `_`
* characters to match multiple variations. Including `%`
* in the pattern matches zero or more characters, and including
* `_` will match a single character.
*
* ## Examples
*
* ```ts
* // Select all cars that don't have "ROver" in their name.
* db.select().from(cars)
* .where(notLike(cars.name, '%Rover%'))
* ```
*
* @see like for the inverse condition
* @see notIlike for a case-insensitive version of this condition
*/
notLike?: string;
/**
* Case-insensitively compare a column to a pattern,
* which can include `%` and `_`
* characters to match multiple variations. Including `%`
* in the pattern matches zero or more characters, and including
* `_` will match a single character.
*
* Unlike like, this performs a case-insensitive comparison.
*
* ## Examples
*
* ```ts
* // Select all cars with 'Turbo' in their names.
* db.select().from(cars)
* .where(ilike(cars.name, '%Turbo%'))
* ```
*
* @see like for a case-sensitive version of this condition
*/
ilike?: string;
/**
* The inverse of ilike - this case-insensitively tests that a given column
* does not match a pattern, which can include `%` and `_`
* characters to match multiple variations. Including `%`
* in the pattern matches zero or more characters, and including
* `_` will match a single character.
*
* ## Examples
*
* ```ts
* // Select all cars that don't have "Rover" in their name.
* db.select().from(cars)
* .where(notLike(cars.name, '%Rover%'))
* ```
*
* @see ilike for the inverse condition
* @see notLike for a case-sensitive version of this condition
*/
notIlike?: string;
/**
* Test that a column or expression contains all elements of
* the list passed as the second argument.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select posts where its tags contain "Typescript" and "ORM".
* db.select().from(posts)
* .where(arrayContains(posts.tags, ['Typescript', 'ORM']))
* ```
*
* @see arrayContained to find if an array contains all elements of a column or expression
* @see arrayOverlaps to find if a column or expression contains any elements of an array
*/
arrayContains?: TValue;
/**
* Test that the list passed as the second argument contains
* all elements of a column or expression.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select posts where its tags contain "Typescript", "ORM" or both,
* // but filtering posts that have additional tags.
* db.select().from(posts)
* .where(arrayContained(posts.tags, ['Typescript', 'ORM']))
* ```
*
* @see arrayContains to find if a column or expression contains all elements of an array
* @see arrayOverlaps to find if a column or expression contains any elements of an array
*/
arrayContained?: TValue;
/**
* Test that a column or expression contains any elements of
* the list passed as the second argument.
*
* ## Throws
*
* The argument passed in the second array can't be empty:
* if an empty is provided, this method will throw.
*
* ## Examples
*
* ```ts
* // Select posts where its tags contain "Typescript", "ORM" or both.
* db.select().from(posts)
* .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM']))
* ```
*
* @see arrayContains to find if a column or expression contains all elements of an array
* @see arrayContained to find if an array contains all elements of a column or expression
*/
arrayOverlaps?: TValue;
}
//#endregion
//#region src/interfaces/PgQueryWhere.d.ts
type PgQueryWhereOrSQL<T extends object> = SQLWrapper | PgQueryWhere<T>;
type PgQueryWhere<T extends object> = { [Key in keyof T]?: FilterOperators<T[Key]> | T[Key] } & {
/**
* Combine a list of conditions with the `and` operator. Conditions
* that are equal `undefined` are automatically ignored.
*
* ## Examples
*
* ```ts
* db.select().from(cars)
* .where(
* and(
* eq(cars.make, 'Volvo'),
* eq(cars.year, 1950),
* )
* )
* ```
*/
and?: Array<PgQueryWhereOrSQL<T>>;
/**
* Combine a list of conditions with the `or` operator. Conditions
* that are equal `undefined` are automatically ignored.
*
* ## Examples
*
* ```ts
* db.select().from(cars)
* .where(
* or(
* eq(cars.make, 'GM'),
* eq(cars.make, 'Ford'),
* )
* )
* ```
*/
or?: Array<PgQueryWhereOrSQL<T>>;
/**
* Negate the meaning of an expression using the `not` keyword.
*
* ## Examples
*
* ```ts
* // Select cars _not_ made by GM or Ford.
* db.select().from(cars)
* .where(not(inArray(cars.make, ['GM', 'Ford'])))
* ```
*/
not?: PgQueryWhereOrSQL<T>;
/**
* Test whether a subquery evaluates to have any rows.
*
* ## Examples
*
* ```ts
* // Users whose `homeCity` column has a match in a cities
* // table.
* db
* .select()
* .from(users)
* .where(
* exists(db.select()
* .from(cities)
* .where(eq(users.homeCity, cities.id))),
* );
* ```
*
* @see notExists for the inverse of this test
*/
exists?: SQLWrapper;
};
//#endregion
//#region src/interfaces/PgQuery.d.ts
interface PgQuery<T extends TObject> {
distinct?: boolean;
where?: PgQueryWhereOrSQL<Static<T>>;
limit?: number;
offset?: number;
sort?: { [key in keyof Static<T>]?: "asc" | "desc" };
groupBy?: (keyof Static<T>)[];
}
type PgQueryResult<T extends TObject, Select extends (keyof Static<T>)[]> = TPick<T, TKeysToIndexer<Select>>;
//#endregion
//#region src/providers/drivers/PostgresProvider.d.ts
type SQLLike = SQLWrapper | string;
declare abstract class PostgresProvider {
protected readonly alepha: Alepha;
abstract get db(): PgDatabase<any>;
abstract get schema(): string;
abstract get dialect(): string;
abstract execute<T extends TObject | undefined = undefined>(query: SQLLike, schema?: T): Promise<Array<T extends TObject ? Static<T> : any>>;
}
//#endregion
//#region src/schemas/pageQuerySchema.d.ts
declare const pageQuerySchema: typebox1.TObject<{
page: typebox1.TOptional<typebox1.TInteger>;
size: typebox1.TOptional<typebox1.TInteger>;
sort: typebox1.TOptional<typebox1.TString>;
}>;
type PageQuery = Static<typeof pageQuerySchema>;
//#endregion
//#region src/schemas/pageSchema.d.ts
/**
* Create a pagination schema for the given object schema.
*
* @example
* const userSchema = t.object({ id: t.int(), name: t.string() });
* const pagedUserSchema = pageSchema(userSchema);
*
* @see {@link $repository#paginate}
*/
declare const pageSchema: <T extends TObject | TRecord>(objectSchema: T, options?: TObjectOptions) => TPage<T>;
type TPage<T extends TObject | TRecord> = TObject<{
content: TArray<T>;
can: TObject<{
next: TBoolean;
previous: TBoolean;
}>;
page: TObject<{
number: TInteger;
size: TInteger;
totalElements: TOptionalAdd<TInteger>;
}>;
}>;
type Page<T> = {
content: T[];
can: {
next: boolean;
previous: boolean;
};
page: {
number: number;
size: number;
totalElements?: number;
};
};
//#endregion
//#region src/descriptors/$repository.d.ts
/**
* Creates a repository descriptor for database operations on a defined entity.
*
* This descriptor provides a comprehensive, type-safe interface for performing all
* database operations on entities defined with $entity. It offers a rich set of
* CRUD operations, advanced querying capabilities, pagination, transactions, and
* built-in support for audit trails and soft deletes.
*
* **Key Features**
*
* - **Complete CRUD Operations**: Create, read, update, delete with full type safety
* - **Advanced Querying**: Complex WHERE conditions, sorting, pagination, and aggregations
* - **Transaction Support**: Database transactions for consistency and atomicity
* - **Soft Delete Support**: Built-in soft delete functionality with `pg.deletedAt()` fields
* - **Optimistic Locking**: Version-based conflict resolution with `pg.version()` fields
* - **Audit Trail Integration**: Automatic handling of `createdAt`, `updatedAt` timestamps
* - **Raw SQL Support**: Execute custom SQL queries when needed
* - **Pagination**: Built-in pagination with metadata and navigation
*
* **Important Requirements**
* - Must be used with an entity created by $entity
* - Entity schema must include exactly one primary key field
* - Database tables must be created via migrations before use
*
* **Use Cases**
*
* Essential for all database-driven applications:
* - User management and authentication systems
* - E-commerce product and order management
* - Content management and blogging platforms
* - Financial and accounting applications
* - Any application requiring persistent data storage
*
* @example
* **Basic repository with CRUD operations:**
* ```ts
* import { $entity, $repository } from "alepha/postgres";
* import { pg, t } from "alepha";
*
* // First, define the entity
* const User = $entity({
* name: "users",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* email: t.string({ format: "email" }),
* firstName: t.string(),
* lastName: t.string(),
* isActive: t.boolean({ default: true }),
* createdAt: pg.createdAt(),
* updatedAt: pg.updatedAt()
* }),
* indexes: [{ column: "email", unique: true }]
* });
*
* class UserService {
* users = $repository({ table: User });
*
* async createUser(userData: { email: string; firstName: string; lastName: string }) {
* return await this.users.create({
* id: generateUUID(),
* email: userData.email,
* firstName: userData.firstName,
* lastName: userData.lastName,
* isActive: true
* });
* }
*
* async getUserByEmail(email: string) {
* return await this.users.findOne({ email });
* }
*
* async updateUser(id: string, updates: { firstName?: string; lastName?: string }) {
* return await this.users.updateById(id, updates);
* }
*
* async deactivateUser(id: string) {
* return await this.users.updateById(id, { isActive: false });
* }
* }
* ```
*
* @example
* **Advanced querying and filtering:**
* ```ts
* const Product = $entity({
* name: "products",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* name: t.string(),
* price: t.number({ minimum: 0 }),
* categoryId: t.string({ format: "uuid" }),
* inStock: t.boolean(),
* tags: t.optional(t.array(t.string())),
* createdAt: pg.createdAt(),
* updatedAt: pg.updatedAt()
* }),
* indexes: ["categoryId", "inStock", "price"]
* });
*
* class ProductService {
* products = $repository({ table: Product });
*
* async searchProducts(filters: {
* categoryId?: string;
* minPrice?: number;
* maxPrice?: number;
* inStock?: boolean;
* searchTerm?: string;
* }, page: number = 0, size: number = 20) {
* const query = this.products.createQuery({
* where: {
* and: [
* filters.categoryId ? { categoryId: filters.categoryId } : {},
* filters.inStock !== undefined ? { inStock: filters.inStock } : {},
* filters.minPrice ? { price: { gte: filters.minPrice } } : {},
* filters.maxPrice ? { price: { lte: filters.maxPrice } } : {},
* filters.searchTerm ? { name: { ilike: `%${filters.searchTerm}%` } } : {}
* ]
* },
* sort: { createdAt: "desc" }
* });
*
* return await this.products.paginate({ page, size }, query, { count: true });
* }
*
* async getTopSellingProducts(limit: number = 10) {
* // Custom SQL query for complex analytics
* return await this.products.query(
* (table, db) => db
* .select({
* id: table.id,
* name: table.name,
* price: table.price,
* salesCount: sql<number>`COALESCE(sales.count, 0)`
* })
* .from(table)
* .leftJoin(
* sql`(
* SELECT product_id, COUNT(*) as count
* FROM order_items
* WHERE created_at > NOW() - INTERVAL '30 days'
* GROUP BY product_id
* ) sales`,
* sql`sales.product_id = ${table.id}`
* )
* .orderBy(sql`sales.count DESC NULLS LAST`)
* .limit(limit)
* );
* }
* }
* ```
*
* @example
* **Transaction handling and data consistency:**
* ```ts
* class OrderService {
* orders = $repository({ table: Order });
* orderItems = $repository({ table: OrderItem });
* products = $repository({ table: Product });
*
* async createOrderWithItems(orderData: {
* customerId: string;
* items: Array<{ productId: string; quantity: number; price: number }>;
* }) {
* return await this.orders.transaction(async (tx) => {
* // Create the order
* const order = await this.orders.create({
* id: generateUUID(),
* customerId: orderData.customerId,
* status: 'pending',
* totalAmount: orderData.items.reduce((sum, item) => sum + (item.price * item.quantity), 0)
* }, { tx });
*
* // Create order items and update product inventory
* for (const itemData of orderData.items) {
* await this.orderItems.create({
* id: generateUUID(),
* orderId: order.id,
* productId: itemData.productId,
* quantity: itemData.quantity,
* unitPrice: itemData.price
* }, { tx });
*
* // Update product inventory using optimistic locking
* const product = await this.products.findById(itemData.productId, { tx });
* if (product.stockQuantity < itemData.quantity) {
* throw new Error(`Insufficient stock for product ${itemData.productId}`);
* }
*
* await this.products.save({
* ...product,
* stockQuantity: product.stockQuantity - itemData.quantity
* }, { tx });
* }
*
* return order;
* });
* }
* }
* ```
*
* @example
* **Soft delete and audit trail:**
* ```ts
* const Document = $entity({
* name: "documents",
* schema: t.object({
* id: pg.primaryKey(t.uuid()),
* title: t.string(),
* content: t.string(),
* authorId: t.string({ format: "uuid" }),
* version: pg.version(),
* createdAt: pg.createdAt(),
* updatedAt: pg.updatedAt(),
* deletedAt: pg.deletedAt() // Enables soft delete
* })
* });
*
* class DocumentService {
* documents = $repository({ table: Document });
*
* async updateDocument(id: string, updates: { title?: string; content?: string }) {
* // This uses optimistic locking via the version field
* const document = await this.documents.findById(id);
* return await this.documents.save({
* ...document,
* ...updates // updatedAt will be set automatically
* });
* }
*
* async softDeleteDocument(id: string) {
* // Soft delete - sets deletedAt timestamp
* await this.documents.deleteById(id);
* }
*
* async permanentDeleteDocument(id: string) {
* // Hard delete - actually removes from database
* await this.documents.deleteById(id, { force: true });
* }
*
* async getActiveDocuments() {
* // Automatically excludes soft-deleted records
* return await this.documents.find({
* where: { authorId: { isNotNull: true } },
* sort: { updatedAt: "desc" }
* });
* }
*
* async getAllDocumentsIncludingDeleted() {
* // Include soft-deleted records
* return await this.documents.find({}, { force: true });
* }
* }
* ```
*
* @example
* **Complex filtering and aggregation:**
* ```ts
* class AnalyticsService {
* users = $repository({ table: User });
* orders = $repository({ table: Order });
*
* async getUserStatistics(filters: {
* startDate?: string;
* endDate?: string;
* isActive?: boolean;
* }) {
* const whereConditions = [];
*
* if (filters.startDate) {
* whereConditions.push({ createdAt: { gte: filters.startDate } });
* }
* if (filters.endDate) {
* whereConditions.push({ createdAt: { lte: filters.endDate } });
* }
* if (filters.isActive !== undefined) {
* whereConditions.push({ isActive: filters.isActive });
* }
*
* const totalUsers = await this.users.count({
* and: whereConditions
* });
*
* const activeUsers = await this.users.count({
* and: [...whereConditions, { isActive: true }]
* });
*
* // Complex aggregation query
* const recentActivity = await this.users.query(
* sql`
* SELECT
* DATE_TRUNC('day', created_at) as date,
* COUNT(*) as new_users,
* COUNT(*) FILTER (WHERE is_active = true) as active_users
* FROM users
* WHERE created_at >= NOW() - INTERVAL '30 days'
* GROUP BY DATE_TRUNC('day', created_at)
* ORDER BY date DESC
* `
* );
*
* return {
* totalUsers,
* activeUsers,
* inactiveUsers: totalUsers - activeUsers,
* recentActivity
* };
* }
* }
* ```
*
* @stability 3
*/
declare const $repository: {
<EntityTableConfig extends TableConfig, EntitySchema extends TObject>(optionsOrTable: RepositoryDescriptorOptions<EntityTableConfig, EntitySchema> | PgTableWithColumnsAndSchema<EntityTableConfig, EntitySchema>): RepositoryDescriptor<EntityTableConfig, EntitySchema>;
[KIND]: typeof RepositoryDescriptor;
};
interface RepositoryDescriptorOptions<EntityTableConfig extends TableConfig, EntitySchema extends TObject> {
/**
* The entity table definition created with $entity.
*
* This table:
* - Must be created using the $entity descriptor
* - Defines the schema, indexes, and constraints for the repository
* - Provides type information for all repository operations
* - Must include exactly one primary key field
*
* The repository will automatically:
* - Generate typed CRUD operations based on the entity schema
* - Handle audit fields like createdAt, upd