UNPKG

snyk-docker-plugin

Version:
135 lines 5.47 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.decompressMaybe = void 0; const fzstd_1 = require("fzstd"); const stream_1 = require("stream"); const zlib_1 = require("zlib"); /** * Creates a transform stream that automatically detects and decompresses data based on magic numbers. * * Supports three formats: * - gzip (magic: 1f 8b) - Streamed through Node.js built-in zlib * - zstd (magic: 28 b5 2f fd) - Streamed through fzstd library * - uncompressed - Passed through unchanged * * Both gzip and zstd use streaming decompression to avoid buffering entire layers in memory. * This is critical for handling large image layers (multiple GB) without excessive memory usage. * * OCI images from containerd may use zstd compression, while older Docker archives use gzip. * Manifest and config files within OCI archives are typically uncompressed JSON. * * Named after the gunzip-maybe library, which only handled gzip detection. */ function decompressMaybe() { let headerRead = false; let compressionType = null; let gzipStream = null; let zstdStream = null; const buffer = []; const transform = new stream_1.Transform({ transform(chunk, _encoding, callback) { if (!headerRead) { buffer.push(chunk); const combined = Buffer.concat(buffer); // Check for gzip magic number (1f 8b) if (combined.length >= 2 && combined[0] === 0x1f && combined[1] === 0x8b) { compressionType = "gzip"; headerRead = true; // Setup gzip decompressor gzipStream = (0, zlib_1.createGunzip)(); gzipStream.on("data", (data) => transform.push(data)); gzipStream.on("error", (err) => transform.destroy(err)); // Write buffered data gzipStream.write(combined); buffer.length = 0; callback(); } // Check for zstd magic number (28 b5 2f fd) else if (combined.length >= 4 && combined[0] === 0x28 && combined[1] === 0xb5 && combined[2] === 0x2f && combined[3] === 0xfd) { compressionType = "zstd"; headerRead = true; // Setup zstd decompressor with streaming API zstdStream = new fzstd_1.Decompress((data, final) => { transform.push(Buffer.from(data)); }); // Write buffered data try { zstdStream.push(new Uint8Array(combined), false); } catch (err) { callback(new Error(`zstd decompression failed: ${err instanceof Error ? err.message : String(err)}`)); return; } buffer.length = 0; callback(); } // After 8 bytes, assume uncompressed else if (combined.length >= 8) { compressionType = "none"; headerRead = true; // Push buffered data as-is this.push(combined); buffer.length = 0; callback(); } else { // Need more data callback(); } } else { // Header already read if (compressionType === "gzip" && gzipStream) { gzipStream.write(chunk); callback(); } else if (compressionType === "zstd" && zstdStream) { try { zstdStream.push(new Uint8Array(chunk), false); callback(); } catch (err) { callback(new Error(`zstd decompression failed: ${err instanceof Error ? err.message : String(err)}`)); } } else { // No compression callback(null, chunk); } } }, async flush(callback) { if (compressionType === "gzip" && gzipStream) { gzipStream.once("end", () => callback()); gzipStream.end(); } else if (compressionType === "zstd" && zstdStream) { // Signal end of zstd stream try { zstdStream.push(new Uint8Array(0), true); callback(); } catch (err) { callback(new Error(`zstd decompression failed: ${err instanceof Error ? err.message : String(err)}`)); } } else if (!headerRead && buffer.length > 0) { // Stream ended before determining compression, assume uncompressed this.push(Buffer.concat(buffer)); callback(); } else { callback(); } }, }); return transform; } exports.decompressMaybe = decompressMaybe; //# sourceMappingURL=decompress-maybe.js.map