UNPKG

firestore-queue

Version:

A powerful, scalable queue system built on Google Firestore with time-based indexing, auto-configuration, and connection reuse

278 lines โ€ข 10 kB
#!/usr/bin/env ts-node "use strict"; /** * Fire Queue Data Entry Examples * Demonstrates multiple ways to write data into the queue */ Object.defineProperty(exports, "__esModule", { value: true }); exports.dataEntryExamples = dataEntryExamples; const writers_1 = require("../writers"); const factory_1 = require("../utils/factory"); async function dataEntryExamples() { console.log('๐Ÿ“ Fire Queue Data Entry Examples'); console.log('=================================\n'); const projectId = 'your-project-id'; const queueName = 'data_entry_queue'; // Example 1: Direct Firestore Writing console.log('1๏ธโƒฃ Direct Firestore Writing'); console.log('----------------------------'); const firestoreWriter = new writers_1.FirestoreWriter({ projectId, queueName, validatePayload: true, enableBatching: true, batchSize: 25, batchTimeoutMs: 3000, enableMetrics: true, metricsCallback: (metrics) => { console.log(` ๐Ÿ“Š Metrics: ${metrics.successfulWrites}/${metrics.totalWrites} successful`); }, }); // Write individual messages await firestoreWriter.write({ type: 'user_signup', payload: { userId: 'user123', email: 'user@example.com', timestamp: Date.now() }, priority: 2, tags: ['user', 'signup'], }); // Write batch of messages const signupMessages = Array.from({ length: 10 }, (_, i) => ({ type: 'user_signup', payload: { userId: `user${i + 100}`, email: `user${i + 100}@example.com`, timestamp: Date.now() }, priority: 3, tags: ['user', 'signup', 'batch'], })); const batchResult = await firestoreWriter.writeBatch(signupMessages); console.log(` โœ… Batch write: ${batchResult.successfulWrites}/${batchResult.totalMessages} successful`); // Example 2: HTTP Writing with Server console.log('\n2๏ธโƒฃ HTTP Writing'); console.log('----------------'); // Start HTTP server const httpServer = new writers_1.HTTPQueueServer(3001); const queue = (0, factory_1.createQuickQueue)(projectId, { queueName }); await queue.initialize(); httpServer.registerQueue(queueName, queue); await httpServer.start(); // Create HTTP client const httpWriter = new writers_1.HTTPWriter({ endpoint: 'http://localhost:3001', queueName, projectId, timeout: 10000, retryAttempts: 2, }); // Write via HTTP const httpResult = await httpWriter.write({ type: 'api_request', payload: { endpoint: '/users', method: 'POST', data: { name: 'John Doe' } }, priority: 1, tags: ['api', 'external'], }); console.log(` โœ… HTTP write: ${httpResult.success ? 'success' : 'failed'} (${httpResult.latencyMs}ms)`); // Example 3: Bulk Writing for High Volume console.log('\n3๏ธโƒฃ Bulk Writing'); console.log('----------------'); const bulkWriter = new writers_1.BulkWriter({ projectId, queueName, batchSize: 100, flushIntervalMs: 2000, maxBufferSize: 1000, enableDeduplication: true, }); // Add messages one by one (they get batched automatically) for (let i = 0; i < 50; i++) { await bulkWriter.add({ type: 'analytics_event', payload: { event: 'page_view', userId: `user${i}`, page: `/page${i % 5}`, timestamp: Date.now(), }, priority: 5, tags: ['analytics'], }); } // Add from CSV data const csvData = ` userId,event,page,timestamp user001,click,/home,${Date.now()} user002,view,/about,${Date.now()} user003,scroll,/contact,${Date.now()} `.trim(); await bulkWriter.addFromCSV(csvData, { 'userId': 'payload.userId', 'event': 'payload.event', 'page': 'payload.page', 'timestamp': 'payload.timestamp', }, { type: 'csv_analytics', priority: 4, tags: ['csv', 'analytics'], }); // Add from JSON Lines const jsonlData = ` {"type":"log_entry","payload":{"level":"info","message":"User logged in"},"priority":3} {"type":"log_entry","payload":{"level":"warn","message":"Rate limit exceeded"},"priority":2} {"type":"log_entry","payload":{"level":"error","message":"Database connection failed"},"priority":1} `.trim(); await bulkWriter.addFromJSONLines(jsonlData); // Force flush and show stats await bulkWriter.flush(); const stats = bulkWriter.getStats(); console.log(` ๐Ÿ“Š Bulk stats: ${stats.totalQueued} queued, ${stats.batchesProcessed} batches`); // Example 4: Cloud Function Code Generation console.log('\n4๏ธโƒฃ Cloud Function Generation'); console.log('-----------------------------'); const cloudFunctionWriter = new writers_1.CloudFunctionWriter({ projectId, queueName, functionName: 'autoQueueWriter', triggerType: 'firestore', region: 'us-central1', }); const functionCode = cloudFunctionWriter.generateCloudFunction(); console.log(' ๐Ÿ“„ Generated Cloud Function code:'); console.log(functionCode.substring(0, 300) + '...'); const packageJson = cloudFunctionWriter.generatePackageJson(); console.log('\n ๐Ÿ“ฆ Generated package.json:'); console.log(JSON.stringify(packageJson, null, 2)); // Example 5: Real-time Stream Processing console.log('\n5๏ธโƒฃ Stream Processing'); console.log('--------------------'); // Simulate a data stream const { Readable } = require('stream'); const dataStream = new Readable({ objectMode: true, read() { // Generate streaming data for (let i = 0; i < 5; i++) { this.push(JSON.stringify({ type: 'stream_data', payload: { sensorId: `sensor${i}`, value: Math.random() * 100, timestamp: Date.now(), }, priority: 3, tags: ['sensor', 'stream'], })); } this.push(null); // End stream } }); const streamBulkWriter = new writers_1.BulkWriter({ projectId, queueName: 'stream_queue', batchSize: 10, flushIntervalMs: 1000, }); await streamBulkWriter.addFromStream(dataStream); console.log(' โœ… Stream processing completed'); // Example 6: Configuration-driven Writing console.log('\n6๏ธโƒฃ Configuration-driven Setup'); console.log('------------------------------'); // Example configuration file const writerConfigs = [ { type: 'firestore', name: 'primary_writer', config: { projectId, queueName: 'primary_queue', enableBatching: true, batchSize: 50, }, }, { type: 'http', name: 'backup_writer', config: { endpoint: 'https://backup-queue-api.com', queueName: 'backup_queue', projectId, authToken: 'your-api-token', }, }, ]; for (const writerConfig of writerConfigs) { console.log(` ๐Ÿ”ง Setting up ${writerConfig.name} (${writerConfig.type})`); if (writerConfig.type === 'firestore') { const writer = new writers_1.FirestoreWriter(writerConfig.config); await writer.write({ type: 'config_test', payload: { message: `Hello from ${writerConfig.name}` }, priority: 3, }); } } // Example 7: Error Handling and Retries console.log('\n7๏ธโƒฃ Error Handling'); console.log('------------------'); const robustWriter = new writers_1.FirestoreWriter({ projectId, queueName, retryAttempts: 3, retryDelayMs: 500, deadLetterQueue: 'failed_messages', }); // Test with invalid message const invalidResult = await robustWriter.write({ type: '', // Invalid: empty type payload: { test: true }, priority: 3, }); console.log(` โš ๏ธ Invalid message result: ${invalidResult.success ? 'passed' : 'failed'} - ${invalidResult.error}`); // Example 8: Performance Monitoring console.log('\n8๏ธโƒฃ Performance Monitoring'); console.log('--------------------------'); const monitoredWriter = new writers_1.FirestoreWriter({ projectId, queueName, enableMetrics: true, metricsCallback: (metrics) => { if (metrics.totalWrites % 10 === 0) { console.log(` ๐Ÿ“ˆ Performance: ${metrics.averageLatencyMs.toFixed(1)}ms avg latency`); } }, }); // Write messages and monitor performance const performanceTest = Array.from({ length: 20 }, (_, i) => monitoredWriter.write({ type: 'performance_test', payload: { iteration: i, timestamp: Date.now() }, priority: 5, })); await Promise.all(performanceTest); const finalMetrics = monitoredWriter.getMetrics(); console.log(` ๐Ÿ“Š Final metrics: ${finalMetrics.successfulWrites} writes, ${finalMetrics.averageLatencyMs.toFixed(1)}ms avg`); // Cleanup console.log('\n๐Ÿงน Cleaning up...'); await Promise.all([ firestoreWriter.close(), bulkWriter.close(), streamBulkWriter.close(), robustWriter.close(), monitoredWriter.close(), httpServer.stop(), queue.shutdown(), ]); console.log('โœ… Data entry examples completed!'); } // Run examples if (require.main === module) { dataEntryExamples().catch(console.error); } //# sourceMappingURL=data-entry-examples.js.map