UNPKG

@bugsplat/symbol-upload

Version:
122 lines 5.49 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.UploadWorker = void 0; exports.createWorkersFromSymbolFiles = createWorkersFromSymbolFiles; const fs_1 = require("fs"); const promises_1 = require("node:fs/promises"); const node_path_1 = require("node:path"); const pretty_bytes_1 = __importDefault(require("pretty-bytes")); const promise_retry_1 = __importDefault(require("promise-retry")); const tmp_1 = require("./tmp"); function createWorkersFromSymbolFiles(workerPool, workerCount, symbolFiles, clients) { const numberOfSymbols = symbolFiles.length; if (workerCount >= numberOfSymbols) { return symbolFiles.map((symbolFile, i) => new UploadWorker(i + 1, [symbolFile], workerPool, ...clients)); } const symbolFilesChunks = splitToChunks(symbolFiles, workerCount); return symbolFilesChunks.map((chunk, i) => new UploadWorker(i + 1, chunk, workerPool, ...clients)); } class UploadWorker { constructor(id, symbolFileInfos, pool, symbolsClient, versionsClient) { this.id = id; this.symbolFileInfos = symbolFileInfos; this.pool = pool; this.symbolsClient = symbolsClient; this.versionsClient = versionsClient; this.createReadStream = fs_1.createReadStream; this.retryPromise = promise_retry_1.default; this.stat = promises_1.stat; this.toWeb = fs_1.ReadStream.toWeb; } async upload(database, application, version) { console.log(`Worker ${this.id} uploading ${this.symbolFileInfos.length} symbol files...`); const results = []; for (const symbolFileInfo of this.symbolFileInfos) { results.push(await this.uploadSingle(database, application, version, symbolFileInfo)); } return results; } async uploadSingle(database, application, version, symbolFileInfo) { const { dbgId, moduleName, path } = symbolFileInfo; const fileName = (0, node_path_1.basename)(path); const uncompressedSize = await this.stat(path).then(stats => stats.size); const uuid = crypto.randomUUID(); const isZip = (0, node_path_1.extname)(path).toLowerCase().includes('.zip'); let client = this.versionsClient; let name = (0, node_path_1.basename)(path); let tmpFileName = ''; if (dbgId && !isZip) { tmpFileName = (0, node_path_1.join)(tmp_1.tmpDir, `${fileName}-${dbgId}-${uuid}.gz`); client = this.symbolsClient; await this.pool.exec('createGzipFile', [path, tmpFileName]); } else if (!isZip) { name = `${name}.zip`; tmpFileName = (0, node_path_1.join)(tmp_1.tmpDir, `${fileName}-${dbgId}-${uuid}.zip`); await this.pool.exec('createZipFile', [path, tmpFileName]); } else { tmpFileName = path; } const { mtime: lastModified } = await this.stat(path); const { size } = await this.stat(tmpFileName); const startTime = new Date(); console.log(`Worker ${this.id} uploading ${name}...`); await this.retryPromise(async (retry) => { const symFileReadStream = this.createReadStream(tmpFileName); const file = this.toWeb(symFileReadStream); const symbolFile = { name, size, file, uncompressedSize, dbgId, lastModified, moduleName }; return client.postSymbols(database, application, version, [symbolFile]) .catch((error) => { // Don't try and cancel the web stream, it's locked by the tee operation in the symbols client. // Cancelling the file stream should be safe and seems like a good thing to do... symFileReadStream.destroy(); if (isAuthenticationError(error)) { console.error(`Worker ${this.id} failed to upload ${name}: ${error.message}!`); throw error; } if (isMaxSizeExceededError(error)) { console.error(`Worker ${this.id} failed to upload ${name}: ${error.message}!`); throw error; } console.error(`Worker ${this.id} failed to upload ${name} with error: ${error.message}! Retrying...`); retry(error); }); }); const endTime = new Date(); const seconds = (endTime.getTime() - startTime.getTime()) / 1000; const rate = size / seconds || 0; console.log(`Worker ${this.id} uploaded ${name}! (${(0, pretty_bytes_1.default)(size)} @ ${(0, pretty_bytes_1.default)(rate)}/sec)`); return { name, size }; } } exports.UploadWorker = UploadWorker; function splitToChunks(array, parts) { const copy = [...array]; const result = []; for (let i = parts; i > 0; i--) { result.push(copy.splice(0, Math.ceil(array.length / parts))); } return result; } function isAuthenticationError(error) { return error.isAuthenticationError; } function isMaxSizeExceededError(error) { return error.message.includes('Symbol file max size') || error.message.includes('Symbol table max size'); } //# sourceMappingURL=worker.js.map