alepha
Version:
Alepha is a convention-driven TypeScript framework for building robust, end-to-end type-safe applications, from serverless APIs to full-stack React apps.
760 lines (759 loc) • 25.6 kB
TypeScript
import * as _alepha_core1 from "alepha";
import { Alepha, Descriptor, KIND, Service, Static, TSchema } from "alepha";
import * as _alepha_logger0 from "alepha/logger";
import { DateTimeProvider } from "alepha/datetime";
//#region src/providers/QueueProvider.d.ts
/**
* Minimalist Queue interface.
*
* Will be probably enhanced in the future to support more advanced features. But for now, it's enough!
*/
declare abstract class QueueProvider {
/**
* Push a message to the queue.
*
* @param queue Name of the queue to push the message to.
* @param message String message to be pushed to the queue. Buffer messages are not supported for now.
*/
abstract push(queue: string, message: string): Promise<void>;
/**
* Pop a message from the queue.
*
* @param queue Name of the queue to pop the message from.
*
* @returns The message popped or `undefined` if the queue is empty.
*/
abstract pop(queue: string): Promise<string | undefined>;
}
//#endregion
//#region src/providers/MemoryQueueProvider.d.ts
declare class MemoryQueueProvider implements QueueProvider {
protected readonly log: _alepha_logger0.Logger;
protected queueList: Record<string, string[]>;
push(queue: string, ...messages: string[]): Promise<void>;
pop(queue: string): Promise<string | undefined>;
}
//#endregion
//#region src/providers/WorkerProvider.d.ts
declare const envSchema: _alepha_core1.TObject<{
/**
* The interval in milliseconds to wait before checking for new messages.
*/
QUEUE_WORKER_INTERVAL: _alepha_core1.TInteger;
/**
* The maximum interval in milliseconds to wait before checking for new messages.
*/
QUEUE_WORKER_MAX_INTERVAL: _alepha_core1.TInteger;
/**
* The number of workers to run concurrently. Defaults to 1.
* Useful only if you are doing a lot of I/O.
*/
QUEUE_WORKER_CONCURRENCY: _alepha_core1.TInteger;
}>;
declare module "alepha" {
interface Env extends Partial<Static<typeof envSchema>> {}
}
declare class WorkerProvider {
protected readonly log: _alepha_logger0.Logger;
protected readonly env: {
QUEUE_WORKER_INTERVAL: number;
QUEUE_WORKER_MAX_INTERVAL: number;
QUEUE_WORKER_CONCURRENCY: number;
};
protected readonly alepha: Alepha;
protected readonly queueProvider: QueueProvider;
protected readonly dateTimeProvider: DateTimeProvider;
protected workerPromises: Array<Promise<void>>;
protected workersRunning: number;
protected abortController: AbortController;
protected workerIntervals: Record<number, number>;
protected consumers: Array<Consumer>;
get isRunning(): boolean;
protected readonly start: _alepha_core1.HookDescriptor<"start">;
/**
* Start the workers.
* This method will create an endless loop that will check for new messages!
*/
protected startWorkers(): void;
protected readonly stop: _alepha_core1.HookDescriptor<"stop">;
/**
* Wait for the next message, where `n` is the worker number.
*
* This method will wait for a certain amount of time, increasing the wait time again if no message is found.
*/
protected waitForNextMessage(n: number): Promise<void>;
/**
* Get the next message.
*/
protected getNextMessage(): Promise<undefined | NextMessage>;
/**
* Process a message from a queue.
*/
protected processMessage(response: {
message: any;
consumer: Consumer;
}): Promise<void>;
/**
* Stop the workers.
*
* This method will stop the workers and wait for them to finish processing.
*/
protected stopWorkers(): Promise<void>;
/**
* Force the workers to get back to work.
*/
wakeUp(): void;
}
interface Consumer<T extends TSchema = TSchema> {
queue: QueueDescriptor<T>;
handler: (message: QueueMessage<T>) => Promise<void>;
}
interface NextMessage {
consumer: Consumer;
message: string;
}
//#endregion
//#region src/descriptors/$queue.d.ts
/**
* Creates a queue descriptor for asynchronous message processing with background workers.
*
* The $queue descriptor enables powerful asynchronous communication patterns in your application.
* It provides type-safe message queuing with automatic worker processing, making it perfect for
* decoupling components and handling background tasks efficiently.
*
* **Background Processing**
* - Automatic worker threads for non-blocking message processing
* - Built-in retry mechanisms and error handling
* - Dead letter queues for failed message handling
* - Graceful shutdown and worker lifecycle management
*
* **Type Safety**
* - Full TypeScript support with schema validation using TypeBox
* - Type-safe message payloads with automatic inference
* - Runtime validation of all queued messages
* - Compile-time errors for invalid message structures
*
* **Storage Flexibility**
* - Memory provider for development and testing
* - Redis provider for production scalability and persistence
* - Custom provider support for specialized backends
* - Automatic failover and connection pooling
*
* **Performance & Scalability**
* - Batch processing support for high-throughput scenarios
* - Horizontal scaling with distributed queue backends
* - Configurable concurrency and worker pools
* - Efficient serialization and message routing
*
* **Reliability**
* - Message persistence across application restarts
* - Automatic retry with exponential backoff
* - Dead letter handling for permanently failed messages
* - Comprehensive logging and monitoring integration
*
* @example Basic notification queue
* ```typescript
* const emailQueue = $queue({
* name: "email-notifications",
* schema: t.object({
* to: t.string(),
* subject: t.string(),
* body: t.string(),
* priority: t.optional(t.enum(["high", "normal"]))
* }),
* handler: async (message) => {
* await emailService.send(message.payload);
* console.log(`Email sent to ${message.payload.to}`);
* }
* });
*
* // Push messages for background processing
* await emailQueue.push({
* to: "user@example.com",
* subject: "Welcome!",
* body: "Welcome to our platform",
* priority: "high"
* });
* ```
*
* @example Batch processing with Redis
* ```typescript
* const imageQueue = $queue({
* name: "image-processing",
* provider: RedisQueueProvider,
* schema: t.object({
* imageId: t.string(),
* operations: t.array(t.enum(["resize", "compress", "thumbnail"]))
* }),
* handler: async (message) => {
* for (const op of message.payload.operations) {
* await processImage(message.payload.imageId, op);
* }
* }
* });
*
* // Batch processing multiple images
* await imageQueue.push(
* { imageId: "img1", operations: ["resize", "thumbnail"] },
* { imageId: "img2", operations: ["compress"] },
* { imageId: "img3", operations: ["resize", "compress", "thumbnail"] }
* );
* ```
*
* @example Development with memory provider
* ```typescript
* const taskQueue = $queue({
* name: "dev-tasks",
* provider: "memory",
* schema: t.object({
* taskType: t.enum(["cleanup", "backup", "report"]),
* data: t.record(t.string(), t.any())
* }),
* handler: async (message) => {
* switch (message.payload.taskType) {
* case "cleanup":
* await performCleanup(message.payload.data);
* break;
* case "backup":
* await createBackup(message.payload.data);
* break;
* case "report":
* await generateReport(message.payload.data);
* break;
* }
* }
* });
* ```
*/
declare const $queue: {
<T extends TSchema>(options: QueueDescriptorOptions<T>): QueueDescriptor<T>;
[KIND]: typeof QueueDescriptor;
};
interface QueueDescriptorOptions<T extends TSchema> {
/**
* Unique name for the queue.
*
* This name is used for:
* - Queue identification across the system
* - Storage backend key generation
* - Logging and monitoring
* - Worker assignment and routing
*
* If not provided, defaults to the property key where the queue is declared.
*
* @example "email-notifications"
* @example "image-processing"
* @example "order-fulfillment"
*/
name?: string;
/**
* Human-readable description of the queue's purpose.
*
* Used for:
* - Documentation generation
* - Monitoring dashboards
* - Development team communication
* - Queue management interfaces
*
* @example "Process user registration emails and welcome sequences"
* @example "Handle image uploads, resizing, and thumbnail generation"
* @example "Manage order processing, payment, and shipping workflows"
*/
description?: string;
/**
* Queue storage provider configuration.
*
* Options:
* - **"memory"**: In-memory queue (default for development, lost on restart)
* - **Service<QueueProvider>**: Custom provider class (e.g., RedisQueueProvider)
* - **undefined**: Uses the default queue provider from dependency injection
*
* **Provider Selection Guidelines**:
* - Development: Use "memory" for fast, simple testing
* - Production: Use Redis or database-backed providers for persistence
* - High-throughput: Use specialized providers with connection pooling
* - Distributed systems: Use Redis or message brokers for scalability
*
* @default Uses injected QueueProvider
* @example "memory"
* @example RedisQueueProvider
* @example DatabaseQueueProvider
*/
provider?: "memory" | Service<QueueProvider>;
/**
* TypeBox schema defining the structure of messages in this queue.
*
* This schema:
* - Validates all messages pushed to the queue
* - Provides full TypeScript type inference
* - Ensures type safety between producers and consumers
* - Enables automatic serialization/deserialization
*
* **Schema Design Best Practices**:
* - Keep schemas simple and focused on the specific task
* - Use optional fields for data that might not always be available
* - Include version fields for schema evolution
* - Use union types for different message types in the same queue
*
* @example
* ```ts
* t.object({
* userId: t.string(),
* action: t.enum(["create", "update"]),
* data: t.record(t.string(), t.any()),
* timestamp: t.optional(t.number())
* })
* ```
*/
schema: T;
/**
* Message handler function that processes queue messages.
*
* This function:
* - Runs in background worker threads for non-blocking processing
* - Receives type-safe message payloads based on the schema
* - Should be idempotent to handle potential retries
* - Can throw errors to trigger retry mechanisms
* - Has access to the full Alepha dependency injection container
*
* **Handler Best Practices**:
* - Keep handlers focused on a single responsibility
* - Use proper error handling and logging
* - Make operations idempotent when possible
* - Validate critical business logic within handlers
* - Consider using transactions for data consistency
*
* @param message - The queue message with validated payload
* @returns Promise that resolves when processing is complete
*
* @example
* ```ts
* handler: async (message) => {
* const { userId, email, template } = message.payload;
*
* try {
* await this.emailService.send({
* to: email,
* template,
* data: { userId }
* });
*
* await this.userService.markEmailSent(userId, template);
* } catch (error) {
* // Log error and let the queue system handle retries
* this.logger.error(`Failed to send email to ${email}`, error);
* throw error;
* }
* }
* ```
*/
handler?: (message: QueueMessage<T>) => Promise<void>;
}
declare class QueueDescriptor<T extends TSchema> extends Descriptor<QueueDescriptorOptions<T>> {
protected readonly log: _alepha_logger0.Logger;
protected readonly workerProvider: WorkerProvider;
readonly provider: QueueProvider | MemoryQueueProvider;
push(...payloads: Array<Static<T>>): Promise<void>;
get name(): string;
protected $provider(): QueueProvider | MemoryQueueProvider;
}
interface QueueMessageSchema {
payload: TSchema;
}
interface QueueMessage<T extends TSchema> {
payload: Static<T>;
}
//#endregion
//#region src/descriptors/$consumer.d.ts
/**
* Creates a consumer descriptor to process messages from a specific queue.
*
* This descriptor creates a dedicated message consumer that connects to a queue and processes
* its messages using a custom handler function. Consumers provide a clean way to separate
* message production from consumption, enabling scalable architectures where multiple
* consumers can process messages from the same queue.
*
* **Key Features**
*
* - **Queue Integration**: Seamlessly connects to any $queue descriptor
* - **Type Safety**: Full TypeScript support inherited from the connected queue's schema
* - **Dedicated Processing**: Isolated message processing logic separate from the queue
* - **Worker Management**: Automatic integration with the worker system for background processing
* - **Error Handling**: Built-in error handling and retry mechanisms from the queue system
* - **Scalability**: Multiple consumers can process the same queue for horizontal scaling
*
* **Use Cases**
*
* Perfect for creating specialized message processors:
* - Dedicated email sending services
* - Image processing workers
* - Data synchronization tasks
* - Event handlers for specific domains
* - Microservice message consumers
* - Background job processors
*
* @example
* **Basic consumer setup:**
* ```ts
* import { $queue, $consumer } from "alepha/queue";
* import { t } from "alepha";
*
* class EmailService {
* // Define the queue
* emailQueue = $queue({
* name: "emails",
* schema: t.object({
* to: t.string(),
* subject: t.string(),
* body: t.string(),
* template: t.optional(t.string())
* })
* });
*
* // Create a dedicated consumer for this queue
* emailConsumer = $consumer({
* queue: this.emailQueue,
* handler: async (message) => {
* const { to, subject, body, template } = message.payload;
*
* if (template) {
* await this.sendTemplatedEmail(to, template, { subject, body });
* } else {
* await this.sendPlainEmail(to, subject, body);
* }
*
* console.log(`Email sent to ${to}: ${subject}`);
* }
* });
*
* async sendWelcomeEmail(userEmail: string) {
* // Push to queue - consumer will automatically process it
* await this.emailQueue.push({
* to: userEmail,
* subject: "Welcome!",
* body: "Thanks for joining our platform.",
* template: "welcome"
* });
* }
* }
* ```
*
* @example
* **Multiple specialized consumers for different message types:**
* ```ts
* class NotificationService {
* notificationQueue = $queue({
* name: "notifications",
* schema: t.object({
* type: t.enum(["email", "sms", "push"]),
* recipient: t.string(),
* message: t.string(),
* metadata: t.optional(t.record(t.string(), t.any()))
* })
* });
*
* // Email-specific consumer
* emailConsumer = $consumer({
* queue: this.notificationQueue,
* handler: async (message) => {
* if (message.payload.type === "email") {
* await this.emailProvider.send({
* to: message.payload.recipient,
* subject: message.payload.metadata?.subject || "Notification",
* body: message.payload.message
* });
* }
* }
* });
*
* // SMS-specific consumer
* smsConsumer = $consumer({
* queue: this.notificationQueue,
* handler: async (message) => {
* if (message.payload.type === "sms") {
* await this.smsProvider.send({
* to: message.payload.recipient,
* message: message.payload.message
* });
* }
* }
* });
*
* // Push notification consumer
* pushConsumer = $consumer({
* queue: this.notificationQueue,
* handler: async (message) => {
* if (message.payload.type === "push") {
* await this.pushProvider.send({
* deviceToken: message.payload.recipient,
* title: message.payload.metadata?.title || "Notification",
* body: message.payload.message
* });
* }
* }
* });
* }
* ```
*
* @example
* **Consumer with advanced error handling and logging:**
* ```ts
* class OrderProcessor {
* orderQueue = $queue({
* name: "order-processing",
* schema: t.object({
* orderId: t.string(),
* customerId: t.string(),
* items: t.array(t.object({
* productId: t.string(),
* quantity: t.number(),
* price: t.number()
* }))
* })
* });
*
* orderConsumer = $consumer({
* queue: this.orderQueue,
* handler: async (message) => {
* const { orderId, customerId, items } = message.payload;
*
* try {
* // Log processing start
* this.logger.info(`Processing order ${orderId} for customer ${customerId}`);
*
* // Validate inventory
* await this.validateInventory(items);
*
* // Process payment
* const paymentResult = await this.processPayment(orderId, items);
* if (!paymentResult.success) {
* throw new Error(`Payment failed: ${paymentResult.error}`);
* }
*
* // Update inventory
* await this.updateInventory(items);
*
* // Create shipment
* await this.createShipment(orderId, customerId);
*
* // Send confirmation
* await this.sendOrderConfirmation(customerId, orderId);
*
* this.logger.info(`Order ${orderId} processed successfully`);
*
* } catch (error) {
* // Log detailed error information
* this.logger.error(`Failed to process order ${orderId}`, {
* error: error.message,
* orderId,
* customerId,
* itemCount: items.length
* });
*
* // Re-throw to trigger queue retry mechanism
* throw error;
* }
* }
* });
* }
* ```
*
* @example
* **Consumer for batch processing with performance optimization:**
* ```ts
* class DataProcessor {
* dataQueue = $queue({
* name: "data-processing",
* schema: t.object({
* batchId: t.string(),
* records: t.array(t.object({
* id: t.string(),
* data: t.record(t.string(), t.any())
* })),
* processingOptions: t.object({
* validateData: t.boolean(),
* generateReport: t.boolean(),
* notifyCompletion: t.boolean()
* })
* })
* });
*
* dataConsumer = $consumer({
* queue: this.dataQueue,
* handler: async (message) => {
* const { batchId, records, processingOptions } = message.payload;
* const startTime = Date.now();
*
* this.logger.info(`Starting batch processing for ${batchId} with ${records.length} records`);
*
* try {
* // Process records in chunks for better performance
* const chunkSize = 100;
* const chunks = this.chunkArray(records, chunkSize);
*
* for (let i = 0; i < chunks.length; i++) {
* const chunk = chunks[i];
*
* if (processingOptions.validateData) {
* await this.validateChunk(chunk);
* }
*
* await this.processChunk(chunk);
*
* // Log progress
* const progress = ((i + 1) / chunks.length) * 100;
* this.logger.debug(`Batch ${batchId} progress: ${progress.toFixed(1)}%`);
* }
*
* if (processingOptions.generateReport) {
* await this.generateProcessingReport(batchId, records.length);
* }
*
* if (processingOptions.notifyCompletion) {
* await this.notifyBatchCompletion(batchId);
* }
*
* const duration = Date.now() - startTime;
* this.logger.info(`Batch ${batchId} completed in ${duration}ms`);
*
* } catch (error) {
* const duration = Date.now() - startTime;
* this.logger.error(`Batch ${batchId} failed after ${duration}ms`, error);
* throw error;
* }
* }
* });
* }
* ```
*/
declare const $consumer: {
<T extends TSchema>(options: ConsumerDescriptorOptions<T>): ConsumerDescriptor<T>;
[KIND]: typeof ConsumerDescriptor;
};
interface ConsumerDescriptorOptions<T extends TSchema> {
/**
* The queue descriptor that this consumer will process messages from.
*
* This establishes the connection between the consumer and its source queue:
* - The consumer inherits the queue's message schema for type safety
* - Messages pushed to the queue will be automatically routed to this consumer
* - Multiple consumers can be attached to the same queue for parallel processing
* - The consumer will use the queue's provider and configuration settings
*
* **Queue Integration Benefits**:
* - Type safety: Consumer handler gets fully typed message payloads
* - Schema validation: Messages are validated before reaching the consumer
* - Error handling: Failed messages can be retried or moved to dead letter queues
* - Monitoring: Queue metrics include consumer processing statistics
*
* @example
* ```ts
* // First, define a queue
* emailQueue = $queue({
* name: "emails",
* schema: t.object({ to: t.string(), subject: t.string() })
* });
*
* // Then, create a consumer for that queue
* emailConsumer = $consumer({
* queue: this.emailQueue, // Reference the queue descriptor
* handler: async (message) => { } // process email
* });
* ```
*/
queue: QueueDescriptor<T>;
/**
* Message handler function that processes individual messages from the queue.
*
* This function:
* - Receives fully typed and validated message payloads from the connected queue
* - Runs in the background worker system for non-blocking operation
* - Should implement the core business logic for processing this message type
* - Can throw errors to trigger the queue's retry mechanisms
* - Has access to the full Alepha dependency injection container
* - Should be idempotent to handle potential duplicate deliveries
*
* **Handler Design Guidelines**:
* - Keep handlers focused on a single responsibility
* - Use proper error handling and meaningful error messages
* - Log important processing steps for debugging and monitoring
* - Consider transaction boundaries for data consistency
* - Make operations idempotent when possible
* - Validate business rules within the handler logic
*
* **Error Handling Strategy**:
* - Throw errors for temporary failures that should be retried
* - Log and handle permanent failures gracefully
* - Use specific error types to control retry behavior
* - Consider implementing circuit breakers for external service calls
*
* @param message - The queue message containing the validated payload
* @param message.payload - The typed message data based on the queue's schema
* @returns Promise that resolves when processing is complete
*
* @example
* ```ts
* handler: async (message) => {
* const { userId, action, data } = message.payload;
*
* try {
* // Log processing start
* this.logger.info(`Processing ${action} for user ${userId}`);
*
* // Validate business rules
* if (!await this.userService.exists(userId)) {
* throw new Error(`User ${userId} not found`);
* }
*
* // Perform the main processing logic
* switch (action) {
* case "create":
* await this.processCreation(userId, data);
* break;
* case "update":
* await this.processUpdate(userId, data);
* break;
* default:
* throw new Error(`Unknown action: ${action}`);
* }
*
* // Log successful completion
* this.logger.info(`Successfully processed ${action} for user ${userId}`);
*
* } catch (error) {
* // Log error with context
* this.logger.error(`Failed to process ${action} for user ${userId}`, {
* error: error.message,
* userId,
* action,
* data
* });
*
* // Re-throw to trigger queue retry mechanism
* throw error;
* }
* }
* ```
*/
handler: (message: {
payload: Static<T>;
}) => Promise<void>;
}
declare class ConsumerDescriptor<T extends TSchema> extends Descriptor<ConsumerDescriptorOptions<T>> {}
//#endregion
//#region src/index.d.ts
/**
* Provides asynchronous message queuing and processing capabilities through declarative queue descriptors.
*
* The queue module enables reliable background job processing and message passing using the `$queue` descriptor
* on class properties. It supports schema validation, automatic retries, and multiple queue backends for
* building scalable, decoupled applications with robust error handling.
*
* @see {@link $queue}
* @see {@link $consumer}
* @module alepha.queue
*/
declare const AlephaQueue: _alepha_core1.Service<_alepha_core1.Module>;
//#endregion
export { $consumer, $queue, AlephaQueue, ConsumerDescriptor, ConsumerDescriptorOptions, MemoryQueueProvider, QueueDescriptor, QueueDescriptorOptions, QueueMessage, QueueMessageSchema, QueueProvider };
//# sourceMappingURL=index.d.ts.map