UNPKG

media-exporter-processor

Version:

Media processing API with thumbnail generation and cloud storage

188 lines 7.33 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.UploadService = void 0; const client_s3_1 = require("@aws-sdk/client-s3"); const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner"); class UploadService { constructor(config) { this.config = config; this.s3Client = new client_s3_1.S3Client({ region: config.region || "auto", endpoint: config.endpoint, credentials: { accessKeyId: config.accessKeyId, secretAccessKey: config.secretAccessKey, }, // Disable AWS session tokens for R2 compatibility forcePathStyle: true, }); } async uploadVideo(videoBuffer, filename, prefix, metadata) { const baseKey = `${Date.now()}-${filename}`; const key = prefix ? `${prefix}/original/${baseKey}` : `original/${baseKey}`; try { const command = new client_s3_1.PutObjectCommand({ Bucket: this.config.bucket, Key: key, Body: videoBuffer, ContentType: "video/mp4", ...(metadata && { Metadata: metadata }), }); await this.s3Client.send(command); return { key, url: await this.getPublicUrl(key), size: videoBuffer.length, }; } catch (error) { console.error(`Error uploading video to R2:`, error); throw error; } } async uploadThumbnails(thumbnails, videoKey, prefix) { const results = {}; // Upload thumbnails in parallel const uploadPromises = thumbnails.map(async (thumbnail) => { // Extract the base filename from videoKey (remove prefix and original/ directory) const baseVideoKey = videoKey.includes("/original/") ? videoKey.split("/original/")[1] : videoKey.replace("original/", ""); const thumbnailKey = `${baseVideoKey}-${thumbnail.size}x${thumbnail.size}.webp`; const key = prefix ? `${prefix}/thumbnails/${thumbnailKey}` : `thumbnails/${thumbnailKey}`; try { const command = new client_s3_1.PutObjectCommand({ Bucket: this.config.bucket, Key: key, Body: thumbnail.buffer, ContentType: "image/webp", }); await this.s3Client.send(command); results[thumbnail.size] = { key, url: await this.getPublicUrl(key), size: thumbnail.buffer.length, }; } catch (error) { console.error(`Error uploading thumbnail ${thumbnail.size}x${thumbnail.size} to R2:`, error); throw error; } }); await Promise.all(uploadPromises); return results; } async uploadImage(imageBuffer, filename, prefix, metadata) { const baseKey = `${Date.now()}-${filename}`; const key = prefix ? `${prefix}/original/${baseKey}` : `original/${baseKey}`; try { const command = new client_s3_1.PutObjectCommand({ Bucket: this.config.bucket, Key: key, Body: imageBuffer, ContentType: "image/jpeg", ...(metadata && { Metadata: metadata }), }); await this.s3Client.send(command); return { key, url: await this.getPublicUrl(key), size: imageBuffer.length, }; } catch (error) { console.error(`Error uploading image to R2:`, error); throw error; } } async uploadVideoWithThumbnails(videoBuffer, filename, duration, thumbnails, prefix, metadata) { // Upload video first const videoResult = await this.uploadVideo(videoBuffer, filename, prefix, metadata); // Then upload thumbnails const thumbnailResults = await this.uploadThumbnails(thumbnails, videoResult.key, prefix); return { video: videoResult, thumbnails: thumbnailResults, duration, }; } async uploadImageWithThumbnails(imageBuffer, filename, thumbnails, prefix, metadata) { // Upload image first const imageResult = await this.uploadImage(imageBuffer, filename, prefix, metadata); // Then upload thumbnails const thumbnailResults = await this.uploadThumbnails(thumbnails, imageResult.key, prefix); return { image: imageResult, thumbnails: thumbnailResults, }; } async deleteFile(key) { const command = new client_s3_1.DeleteObjectCommand({ Bucket: this.config.bucket, Key: key, }); await this.s3Client.send(command); } async deleteVideoWithThumbnails(videoKey, thumbnailKeys) { const deletePromises = [ this.deleteFile(videoKey), ...thumbnailKeys.map((key) => this.deleteFile(key)), ]; await Promise.allSettled(deletePromises); } async getPublicUrl(key) { // For public buckets, construct the URL directly // For private buckets, you might want to use presigned URLs const endpoint = this.config.endpoint || "https://s3.amazonaws.com"; const bucket = this.config.bucket; // Remove protocol from endpoint for URL construction const cleanEndpoint = endpoint.replace(/^https?:\/\//, ""); return `https://${bucket}.${cleanEndpoint}/${key}`; } async getPresignedUrl(key, expiresIn = 3600) { const command = new client_s3_1.GetObjectCommand({ Bucket: this.config.bucket, Key: key, }); return await (0, s3_request_presigner_1.getSignedUrl)(this.s3Client, command, { expiresIn }); } async getPresignedUploadUrl(key, contentType, expiresIn = 3600) { const command = new client_s3_1.PutObjectCommand({ Bucket: this.config.bucket, Key: key, ContentType: contentType, }); return await (0, s3_request_presigner_1.getSignedUrl)(this.s3Client, command, { expiresIn }); } /** * Download a file from S3 and return it as a Buffer */ async downloadFile(key) { const command = new client_s3_1.GetObjectCommand({ Bucket: this.config.bucket, Key: key, }); const response = await this.s3Client.send(command); if (!response.Body) { throw new Error(`No file found in S3 for key: ${key}`); } // response.Body is a ReadableStream or Buffer depending on environment if (Buffer.isBuffer(response.Body)) { return response.Body; } // Node.js: response.Body is a Readable const chunks = []; for await (const chunk of response.Body) { chunks.push(chunk); } return Buffer.concat(chunks); } } exports.UploadService = UploadService; //# sourceMappingURL=UploadService.js.map