file-chunkify
Version:
259 lines (255 loc) • 7.84 kB
JavaScript
var __knownSymbol = (name, symbol) => (symbol = Symbol[name]) ? symbol : Symbol.for("Symbol." + name);
var __async = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
step(generator.next(value));
} catch (e) {
reject(e);
}
};
var rejected = (value) => {
try {
step(generator.throw(value));
} catch (e) {
reject(e);
}
};
var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
step((generator = generator.apply(__this, __arguments)).next());
});
};
var __await = function(promise, isYieldStar) {
this[0] = promise;
this[1] = isYieldStar;
};
var __asyncGenerator = (__this, __arguments, generator) => {
var resume = (k, v, yes, no) => {
try {
var x = generator[k](v), isAwait = (v = x.value) instanceof __await, done = x.done;
Promise.resolve(isAwait ? v[0] : v).then((y) => isAwait ? resume(k === "return" ? k : "next", v[1] ? { done: y.done, value: y.value } : y, yes, no) : yes({ value: y, done })).catch((e) => resume("throw", e, yes, no));
} catch (e) {
no(e);
}
}, method = (k) => it[k] = (x) => new Promise((yes, no) => resume(k, x, yes, no)), it = {};
return generator = generator.apply(__this, __arguments), it[__knownSymbol("asyncIterator")] = () => it, method("next"), method("throw"), method("return"), it;
};
// src/common.ts
function logMessage(message, isError = false, options) {
if (options == null ? void 0 : options.debugMode) {
isError ? console.error(message) : console.log(message);
}
}
// src/split-file.ts
function generateUUID() {
return `${Date.now()}-${Math.floor(Math.random() * 1e9)}`;
}
function validateParams({ file, chunkSize }) {
if (!(file instanceof File)) {
throw new Error("Invalid file: Expected a valid File object.");
}
if (!chunkSize || chunkSize <= 0) {
throw new Error("Invalid chunk size: Must be a positive number.");
}
}
function splitFile(_0) {
return __asyncGenerator(this, arguments, function* ({
file,
chunkSize = 5 * 1024 * 1024,
options
}) {
var _a, _b;
try {
validateParams({ file, chunkSize });
const totalChunks = Math.ceil(file.size / chunkSize);
if (totalChunks === 0) {
throw new Error("File size is too small to be split.");
}
const fileExtension = ((_b = (_a = file == null ? void 0 : file.name) == null ? void 0 : _a.split(".")) == null ? void 0 : _b.pop()) || "bin";
const fileName = `${generateUUID()}.${fileExtension}`;
let chunkNumber = 0;
let start = 0;
let end = chunkSize;
while (start < file.size) {
const chunk = file.slice(start, end);
yield {
chunk: new File([chunk], fileName),
chunkNumber,
totalChunks,
fileName,
progress: Math.ceil((chunkNumber + 1) / totalChunks * 100)
};
chunkNumber++;
start = end;
end = start + chunkSize;
}
} catch (err) {
logMessage(
`Error during file splitting: ${err.message}`,
true,
options
);
throw err;
}
});
}
// src/merge-file.ts
import {
createWriteStream,
existsSync,
mkdirSync,
readFileSync,
unlinkSync,
writeFileSync
} from "fs";
import path from "path";
function validateParams2({
file,
chunkNumber,
totalChunks,
outputDir,
chunkDir
}) {
chunkNumber = Number(chunkNumber);
totalChunks = Number(totalChunks);
if (!file || !file.originalname || !file.buffer) {
throw new Error(
"Invalid file: Expected a valid file object with originalname and buffer."
);
}
if (isNaN(chunkNumber) || chunkNumber < 0) {
throw new Error(
`Invalid chunk number: Received ${chunkNumber}, expected a non-negative number.`
);
}
if (isNaN(totalChunks) || totalChunks <= 0 || chunkNumber >= totalChunks) {
throw new Error(
`Invalid totalChunks: Received ${totalChunks}, expected a positive number greater than chunkNumber.`
);
}
if (typeof outputDir !== "string" || typeof chunkDir !== "string") {
throw new Error(
"Invalid directory paths: outputDir and chunkDir must be valid strings."
);
}
}
function saveChunk(_0) {
return __async(this, arguments, function* ({
file,
chunkNumber,
totalChunks,
outputDir = "./uploads",
chunkDir = "./uploads/chunks",
options
}) {
try {
validateParams2({ file, chunkNumber, totalChunks, outputDir, chunkDir });
const extension = path.extname(file.originalname) || ".bin";
const fileName = path.basename(file.originalname, extension);
chunkNumber = Number(chunkNumber);
totalChunks = Number(totalChunks);
chunkDir = path.resolve(chunkDir);
outputDir = path.resolve(outputDir);
if (!existsSync(chunkDir)) mkdirSync(chunkDir, { recursive: true });
const chunkFilePath = path.join(
chunkDir,
`${fileName}.part_${chunkNumber}`
);
try {
writeFileSync(chunkFilePath, file.buffer);
logMessage(
`Chunk ${chunkNumber + 1}/${totalChunks} saved: ${chunkFilePath}`,
false,
options
);
} catch (err) {
logMessage(`Error saving chunk ${chunkNumber}: ${err}`, true, options);
throw new Error("Failed to save chunk");
}
if (chunkNumber === totalChunks - 1) {
const mergedFilePath = yield mergeChunks({
fileName,
totalChunks,
outputDir,
chunkDir,
extension,
options
});
return {
success: true,
message: "All chunks uploaded and merged successfully",
mergedFilePath
};
}
return {
success: true,
message: `Chunk ${chunkNumber} uploaded successfully`
};
} catch (err) {
logMessage(
`Error merging chunks: ${err.message}`,
true,
options
);
return {
success: false,
message: err.message
};
}
});
}
function mergeChunks(_0) {
return __async(this, arguments, function* ({
fileName,
totalChunks,
outputDir,
chunkDir,
extension,
options
}) {
if (!existsSync(outputDir)) mkdirSync(outputDir, { recursive: true });
const finalFilePath = path.join(outputDir, `${fileName}${extension}`);
logMessage(`Merging chunks into: ${finalFilePath}`, false, options);
return new Promise((resolve, reject) => {
const writeStream = createWriteStream(finalFilePath);
let currentChunk = 0;
writeStream.on("error", (err) => {
logMessage(`Error writing to final file: ${err}`, true, options);
reject(err);
});
writeStream.on("finish", () => {
logMessage("Merging completed successfully!", false, options);
resolve(finalFilePath);
});
const mergeNextChunk = () => {
if (currentChunk < totalChunks) {
const chunkFilePath = path.join(
chunkDir,
`${fileName}.part_${currentChunk}`
);
try {
if (!existsSync(chunkFilePath))
throw new Error(`Missing chunk file: ${chunkFilePath}`);
const chunkBuffer = readFileSync(chunkFilePath);
writeStream.write(chunkBuffer, () => {
unlinkSync(chunkFilePath);
currentChunk++;
mergeNextChunk();
});
} catch (err) {
logMessage(`Error processing chunk: ${err}`, true, options);
writeStream.destroy();
reject(err);
}
} else {
writeStream.end();
}
};
mergeNextChunk();
});
});
}
export {
saveChunk,
splitFile
};