snyk-docker-plugin
Version:
Snyk CLI docker plugin
83 lines • 2.83 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.streamToJson = exports.streamToSha1 = exports.streamToSha256 = exports.streamToBuffer = exports.streamToString = void 0;
const crypto = require("crypto");
const types_1 = require("./types");
const HASH_ENCODING = "hex";
const MEGABYTE = 1 * 1024 * 1024;
async function streamToString(stream, streamSize, encoding = "utf8") {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on("end", () => {
resolve(Buffer.concat(chunks).toString(encoding));
});
stream.on("error", (error) => reject(error));
stream.on("data", (chunk) => {
chunks.push(chunk);
});
});
}
exports.streamToString = streamToString;
async function streamToBuffer(stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on("end", () => {
resolve(Buffer.concat(chunks));
});
stream.on("error", (error) => reject(error));
stream.on("data", (chunk) => {
chunks.push(Buffer.from(chunk));
});
});
}
exports.streamToBuffer = streamToBuffer;
async function streamToHash(stream, hashAlgorithm) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash(hashAlgorithm);
hash.setEncoding(HASH_ENCODING);
stream.on("end", () => {
hash.end();
resolve(hash.read().toString(HASH_ENCODING));
});
stream.on("error", (error) => reject(error));
stream.pipe(hash);
});
}
async function streamToSha256(stream) {
return streamToHash(stream, types_1.HashAlgorithm.Sha256);
}
exports.streamToSha256 = streamToSha256;
async function streamToSha1(stream) {
return streamToHash(stream, types_1.HashAlgorithm.Sha1);
}
exports.streamToSha1 = streamToSha1;
/**
* Reads up to 2 megabytes from the stream and tries to JSON.parse the result.
* Will reject if an error occurs from within the stream or when parsing cannot be done.
*/
async function streamToJson(stream) {
return new Promise((resolve, reject) => {
const chunks = [];
let bytes = 0;
stream.on("end", () => {
try {
resolve(JSON.parse(chunks.join("")));
}
catch (error) {
reject(error);
}
});
stream.on("error", (error) => reject(error));
stream.on("data", (chunk) => {
bytes += chunk.length;
if (bytes <= 2 * MEGABYTE) {
chunks.push(chunk.toString("utf8"));
}
else {
reject(new Error("The stream is too large to parse as JSON"));
}
});
});
}
exports.streamToJson = streamToJson;
//# sourceMappingURL=stream-utils.js.map
;