digitaltwin-core
Version:
Minimalist framework to collect and handle data in a Digital Twin project
101 lines • 4.18 kB
JavaScript
import { Worker } from 'bullmq';
import { extractAndStoreArchive } from '../utils/zip_utils.js';
import fs from 'fs/promises';
/**
* Background worker for processing large file uploads (tileset extraction).
* Prevents HTTP timeout by queuing jobs and processing asynchronously.
*
* Flow:
* 1. Read ZIP from temp file
* 2. Extract and upload all files to storage (OVH S3)
* 3. Update database with tileset_url and base_path
* 4. Clean up temp file
*/
export class UploadProcessor {
constructor(storage, db) {
this.worker = null;
this.storage = storage;
this.db = db;
}
start(connection) {
this.worker = new Worker('dt-uploads', async (job) => this.processJob(job), {
connection,
concurrency: 2,
limiter: { max: 5, duration: 60000 }
});
this.worker.on('completed', job => console.log(`[UploadProcessor] Job ${job.id} completed`));
this.worker.on('failed', (job, err) => console.error(`[UploadProcessor] Job ${job?.id} failed:`, err.message));
}
async stop() {
if (this.worker) {
await this.worker.close();
this.worker = null;
}
}
async processJob(job) {
if (job.data.type === 'tileset') {
await this.processTilesetUpload(job);
}
else {
throw new Error(`Unknown upload job type: ${job.data.type}`);
}
}
async processTilesetUpload(job) {
const { recordId, tempFilePath, componentName } = job.data;
let basePath = null;
try {
await this.updateRecordStatus(recordId, componentName, 'processing');
await job.updateProgress(10);
// Read ZIP file
const zipBuffer = await fs.readFile(tempFilePath).catch(err => {
throw new Error(`Failed to read temp file: ${err.message}`);
});
await job.updateProgress(20);
// Generate unique base path
basePath = `${componentName}/${Date.now()}`;
// Extract and upload all files to storage
const extractResult = await extractAndStoreArchive(zipBuffer, this.storage, basePath);
await job.updateProgress(80);
// Validate tileset.json exists
if (!extractResult.root_file) {
// Clean up uploaded files
await this.storage.deleteByPrefix(basePath).catch(() => { });
throw new Error('Invalid tileset: no tileset.json found in the ZIP archive');
}
// Build the public URL for tileset.json
const tilesetPath = `${basePath}/${extractResult.root_file}`;
const tilesetUrl = this.storage.getPublicUrl(tilesetPath);
// Update database record (url = basePath for deletion)
await this.db.updateById(componentName, recordId, {
url: basePath,
tileset_url: tilesetUrl,
upload_status: 'completed'
});
await job.updateProgress(90);
// Clean up temp file
await fs.unlink(tempFilePath).catch(() => { });
await job.updateProgress(100);
console.log(`[UploadProcessor] Tileset ${recordId} uploaded: ${extractResult.file_count} files`);
}
catch (error) {
// Update record as failed (don't delete - keep for debugging)
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
await this.db
.updateById(componentName, recordId, {
upload_status: 'failed',
upload_error: errorMessage
})
.catch(() => { });
// Clean up: uploaded files and temp file
if (basePath) {
await this.storage.deleteByPrefix(basePath).catch(() => { });
}
await fs.unlink(tempFilePath).catch(() => { });
throw error;
}
}
async updateRecordStatus(id, tableName, status) {
await this.db.updateById(tableName, id, { upload_status: status });
}
}
//# sourceMappingURL=upload_processor.js.map