@mbc-cqrs-serverless/import
Version:
183 lines • 8.92 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var ZipImportQueueEventHandler_1;
Object.defineProperty(exports, "__esModule", { value: true });
exports.ZipImportQueueEventHandler = void 0;
const client_s3_1 = require("@aws-sdk/client-s3");
const lib_storage_1 = require("@aws-sdk/lib-storage");
const core_1 = require("@mbc-cqrs-serverless/core");
const common_1 = require("@nestjs/common");
const config_1 = require("@nestjs/config");
const JSZip = __importStar(require("jszip"));
const stream_1 = require("stream");
const constant_1 = require("../constant");
const enum_1 = require("../enum");
const import_service_1 = require("../import.service");
const import_queue_event_1 = require("./import.queue.event");
let ZipImportQueueEventHandler = ZipImportQueueEventHandler_1 = class ZipImportQueueEventHandler {
constructor(configService, sfnService, importService, s3Service) {
this.configService = configService;
this.sfnService = sfnService;
this.importService = importService;
this.s3Service = s3Service;
this.logger = new common_1.Logger(ZipImportQueueEventHandler_1.name);
this.zipOrchestratorArn = this.configService.get('SFN_IMPORT_ZIP_ORCHESTRATOR_ARN');
}
async execute(event) {
const importEntity = event.importEvent.importEntity;
if (!importEntity.pk.startsWith(constant_1.ZIP_IMPORT_PK_PREFIX)) {
return;
}
const importKey = event.importEvent.importKey;
const zipJobAttributes = importEntity.attributes;
this.logger.log(`Received master ZIP job from queue: ${importEntity.id} for file ${zipJobAttributes.key}`);
try {
await this.importService.updateStatus(importKey, enum_1.ImportStatusEnum.PROCESSING, { result: { step: 'Unzipping archive' } });
const s3ReadStream = await this.getS3Stream(zipJobAttributes);
const extractedFileKeys = await this.unzipAndUpload(s3ReadStream, zipJobAttributes, importEntity.id);
if (extractedFileKeys.length === 0) {
throw new Error('No CSV files found in the ZIP archive.');
}
// Sort the file keys alphabetically to ensure sequential processing order
extractedFileKeys.sort();
await this.importService.updateImportJob(importKey, {
set: {
attributes: {
...zipJobAttributes,
extractedFileKeys, // Save the list of files to the master job
},
},
});
// Start the main orchestrator with the sorted list of files
await this.sfnService.startExecution(this.zipOrchestratorArn, {
masterJobKey: importKey,
sortedS3Keys: extractedFileKeys,
// Pass through original attributes needed by the sub-workflows
parameters: {
bucket: zipJobAttributes.bucket,
tenantCode: zipJobAttributes.tenantCode,
},
}, `${zipJobAttributes.tenantCode}-zip-import-${Date.now()}`);
this.logger.log(`Started ZIP Orchestrator Step Function for master job ${importEntity.id}`);
}
catch (error) {
this.logger.error(`Failed to process ZIP job ${importEntity.id}`, error);
await this.importService.updateStatus(importKey, enum_1.ImportStatusEnum.FAILED, {
error: {
message: `Failed during unzip and preparation: ${error.message}`,
},
});
throw error;
}
}
async getS3Stream(attributes) {
const { Body: s3Stream } = await this.s3Service.client.send(new client_s3_1.GetObjectCommand({
Bucket: attributes.bucket,
Key: attributes.key,
}));
if (!(s3Stream instanceof stream_1.Readable)) {
throw new Error('Failed to get a readable stream from S3 object.');
}
return s3Stream;
}
/**
* Helper function to convert a Readable stream into a Buffer.
* Required for yauzl, which operates on a complete data source.
*/
streamToBuffer(stream) {
return new Promise((resolve, reject) => {
const chunks = [];
stream.on('data', (chunk) => chunks.push(chunk));
stream.on('error', reject);
stream.on('end', () => resolve(Buffer.concat(chunks)));
});
}
/**
* Re-implemented using the `jszip` library, which is more tolerant
* of malformed ZIP archives.
*/
async unzipAndUpload(zipStream, attributes, jobId) {
const tempS3Prefix = `unzipped/${attributes.tenantCode}/${jobId.replace(/[^a-zA-Z0-9]/g, '_')}`;
// 1. JSZip operates on a buffer, so we first load the stream into memory.
const zipBuffer = await this.streamToBuffer(zipStream);
// 2. Load the ZIP data.
const zip = await JSZip.loadAsync(zipBuffer);
const uploadPromises = [];
const extractedFileKeys = [];
// 3. Loop through each file in the archive.
for (const file of Object.values(zip.files)) {
// 4. Skip directories and process only CSV files.
if (!file.dir && file.name.toLowerCase().endsWith('.csv')) {
this.logger.debug(`Extracting ${file.name} ...`);
// 5. Decompress the file content into a buffer.
const contentBuffer = await file.async('nodebuffer');
const s3UploadKey = `${tempS3Prefix}/${file.name}`;
extractedFileKeys.push(s3UploadKey);
// 6. Create and track the S3 upload promise.
const upload = new lib_storage_1.Upload({
client: this.s3Service.client,
params: {
Bucket: attributes.bucket,
Key: s3UploadKey,
Body: contentBuffer,
},
});
uploadPromises.push(upload.done());
}
}
// 7. Wait for all files to be uploaded to S3.
await Promise.all(uploadPromises);
this.logger.log(`Finished unzipping with JSZip. Uploaded ${extractedFileKeys.length} CSV files.`);
return extractedFileKeys;
}
};
exports.ZipImportQueueEventHandler = ZipImportQueueEventHandler;
exports.ZipImportQueueEventHandler = ZipImportQueueEventHandler = ZipImportQueueEventHandler_1 = __decorate([
(0, core_1.EventHandler)(import_queue_event_1.ImportQueueEvent),
__metadata("design:paramtypes", [config_1.ConfigService,
core_1.StepFunctionService,
import_service_1.ImportService,
core_1.S3Service])
], ZipImportQueueEventHandler);
//# sourceMappingURL=zip-import.queue.event.handler.js.map