UNPKG

@azure/storage-file-datalake

Version:
91 lines 3.45 kB
"use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", { value: true }); exports.fsCreateReadStream = exports.fsStat = void 0; exports.streamToBuffer = streamToBuffer; exports.streamToBuffer2 = streamToBuffer2; const tslib_1 = require("tslib"); const node_fs_1 = tslib_1.__importDefault(require("node:fs")); const node_util_1 = tslib_1.__importDefault(require("node:util")); /** * Reads a readable stream into buffer. Fill the buffer from offset to end. * * @param stream - A Node.js Readable stream * @param buffer - Buffer to be filled, length must greater than or equal to offset * @param offset - From which position in the buffer to be filled, inclusive * @param end - To which position in the buffer to be filled, exclusive * @param encoding - Encoding of the Readable stream */ async function streamToBuffer(stream, buffer, offset, end, encoding) { let pos = 0; // Position in stream const count = end - offset; // Total amount of data needed in stream return new Promise((resolve, reject) => { stream.on("readable", () => { if (pos >= count) { resolve(); return; } let chunk = stream.read(); if (!chunk) { return; } if (typeof chunk === "string") { chunk = Buffer.from(chunk, encoding); } // How much data needed in this chunk const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); pos += chunkLength; }); stream.on("end", () => { if (pos < count) { reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); } resolve(); }); stream.on("error", reject); }); } /** * Reads a readable stream into buffer entirely. * * @param stream - A Node.js Readable stream * @param buffer - Buffer to be filled, length must greater than or equal to offset * @param encoding - Encoding of the Readable stream * @returns with the count of bytes read. * @throws `RangeError` If buffer size is not big enough. */ async function streamToBuffer2(stream, buffer, encoding) { let pos = 0; // Position in stream const bufferSize = buffer.length; return new Promise((resolve, reject) => { stream.on("readable", () => { let chunk = stream.read(); if (!chunk) { return; } if (typeof chunk === "string") { chunk = Buffer.from(chunk, encoding); } if (pos + chunk.length > bufferSize) { reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); return; } buffer.fill(chunk, pos, pos + chunk.length); pos += chunk.length; }); stream.on("end", () => { resolve(pos); }); stream.on("error", reject); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Promisified version of fs.stat(). */ exports.fsStat = node_util_1.default.promisify(node_fs_1.default.stat); exports.fsCreateReadStream = node_fs_1.default.createReadStream; //# sourceMappingURL=utils.js.map