askexperts
Version:
AskExperts SDK: build and use AI experts - ask them questions and pay with bitcoin on an open protocol
687 lines • 25.6 kB
JavaScript
/**
* Compression utilities for NIP-174
* Supports both browser (using Compression Streams API) and Node.js (using zlib)
*/
import { debugCompression, debugError } from "../common/debug.js";
// Compression methods
export const COMPRESSION_NONE = "none";
export const COMPRESSION_GZIP = "gzip";
// Check if we're in a browser environment with Compression Streams API
const isBrowser = typeof window !== "undefined";
// Define Node.js specific functions that will be replaced with actual implementations
// only when running in Node.js environment
let nodeCreateGzip = null;
let nodeCreateGunzip = null;
let nodeFlushGzip = null;
// Track initialization status
let nodeInitialized = false;
let nodeInitPromise = null;
// Only initialize Node.js functions if not in browser
// This code is isolated in a way that esbuild can eliminate it when targeting browser
if (!isBrowser) {
// We're using a function that will be called only at runtime in Node.js
// This prevents esbuild from trying to resolve the imports during bundling
const initNodeFunctions = async () => {
if (nodeInitialized)
return;
try {
// Dynamic imports that will only be executed at runtime in Node.js
const zlibModule = await import("zlib");
// Add streaming functions
nodeCreateGzip = () => zlibModule.createGzip();
nodeCreateGunzip = () => zlibModule.createGunzip();
nodeFlushGzip = async (stream) => {
return new Promise((ok) => stream.flush(zlibModule.constants.Z_SYNC_FLUSH, ok));
};
nodeInitialized = true;
debugCompression(`Node gzip compression initialized`);
}
catch (error) {
debugError("Failed to initialize Node.js compression functions:", error);
throw error;
}
};
// Initialize Node.js functions and store the promise
nodeInitPromise = initNodeFunctions();
}
/**
* Ensures that Node.js compression functions are initialized
* @returns Promise that resolves when initialization is complete
*/
async function ensureNodeInitialized() {
if (isBrowser || nodeInitialized)
return;
if (nodeInitPromise)
await nodeInitPromise;
}
/**
* Error thrown when compression or decompression result exceeds the maximum allowed size
*/
export class CompressionSizeLimitExceeded extends Error {
constructor(currentSize, maxSize) {
super(`Compression result size (${currentSize} bytes) exceeds the maximum allowed size (${maxSize} bytes)`);
this.name = "CompressionSizeLimitExceeded";
}
}
/**
* Common browser compression implementation using Compression Streams API
*/
class BrowserCompression {
constructor(stream, method, decompress, maxResultSize) {
this.output = [];
this.totalSize = 0;
this.decompress = decompress;
this.writer = stream.writable.getWriter();
this.reader = stream.readable.getReader();
this.maxResultSize = maxResultSize;
this.method = method;
}
async maxChunkSize() {
return this.maxResultSizeSafe();
}
maxResultSizeSafe() {
if (!this.maxResultSize)
return this.maxResultSize;
// Allow room for 1Kb trailer, crop to 64 from the bottom
return Math.max(64, this.maxResultSize - 1024);
}
checkResultSize(packetSize) {
const maxResultSize = this.maxResultSizeSafe();
if (!maxResultSize)
return;
const potentialTotalSize = this.totalSize + packetSize;
if (potentialTotalSize <= maxResultSize)
return;
this.lastError = new CompressionSizeLimitExceeded(potentialTotalSize, maxResultSize);
}
async read() {
const newChunks = [];
let newSize = 0;
while (true) {
const { value, done } = await this.reader.read();
if (done)
break;
if (value) {
newChunks.push(value);
newSize += value.length;
}
}
return { newChunks, newSize };
}
async write(inputData) {
if (this.lastError)
throw this.lastError;
// Check if adding these chunks would exceed the size limit,
// assume compressed input will be <= input size.
// NOTE: ideally we should compress the input first and then measure
// the size, but we can't 'unzip' this packet, so the only
// option left is to measure the input.
if (!this.decompress) {
this.checkResultSize(inputData.length);
if (this.lastError)
throw this.lastError;
}
// Send to compressor
await this.writer.write(inputData);
// Read any available output
const { newChunks, newSize } = await this.read();
// For decompression, we check after decompression
// and throw the results away if exceeded
if (this.decompress) {
this.checkResultSize(newSize);
if (this.lastError)
throw this.lastError;
}
// Add the chunks
this.output.push(...newChunks);
this.totalSize += newSize;
return this.totalSize;
}
async finalize() {
// If we already have an error, reject immediately
if (this.lastError &&
!(this.lastError instanceof CompressionSizeLimitExceeded)) {
throw this.lastError;
}
// Write trailer, etc
await this.writer.close();
// Read any remaining output
const { newChunks, newSize } = await this.read();
// For decompression, we check after decompression
// and throw the results away if exceeded
if (this.decompress) {
this.checkResultSize(newSize);
if (this.lastError)
throw this.lastError;
}
// Add the chunks
this.output.push(...newChunks);
this.totalSize += newSize;
// Concatenate all chunks
const result = new Uint8Array(this.totalSize);
let offset = 0;
for (const chunk of this.output) {
result.set(chunk, offset);
offset += chunk.length;
}
return result;
}
dispose() {
// Release resources
this.writer.releaseLock();
this.reader.releaseLock();
this.output = [];
}
}
/**
* Browser implementation of CompressionInstance using Compression Streams API
*/
class BrowserCompressionInstance {
constructor(stream, method, binary, maxResultSize) {
this.binary = binary;
this.compression = new BrowserCompression(stream, method, false, maxResultSize);
}
async maxChunkSize() {
return this.compression.maxChunkSize();
}
async add(chunk) {
if (typeof chunk === "string" && this.binary)
throw new Error("String input in binary mode");
if (typeof chunk !== "string" && !this.binary)
throw new Error("Uint8Array input in binary mode");
// Convert string to Uint8Array if needed
const inputData = typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk;
return this.compression.write(inputData);
}
async finish() {
return this.compression.finalize();
}
[Symbol.dispose]() {
this.compression.dispose();
}
}
/**
* Browser implementation of DecompressionInstance using Compression Streams API
*/
class BrowserDecompressionInstance {
constructor(stream, method, binary = false, maxResultSize) {
this.compression = new BrowserCompression(stream, method, true, maxResultSize);
this.binary = binary;
}
async maxChunkSize() {
return this.compression.maxChunkSize();
}
async add(chunk) {
// Convert string to Uint8Array if needed for internal processing
const inputData = typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk;
return this.compression.write(inputData);
}
async finish() {
const result = await this.compression.finalize();
// Return binary data or convert to string
if (this.binary) {
return result;
}
else {
return new TextDecoder().decode(result);
}
}
[Symbol.dispose]() {
this.compression.dispose();
}
}
/**
* Common Node.js compression implementation using zlib
*/
class NodeCompression {
constructor(stream, method, decompress, maxResultSize) {
this.chunks = [];
this.totalSize = 0;
this.streamFinished = false;
this.lastError = null;
this.bufferedSize = 0;
this.stream = stream;
this.decompress = decompress;
this.maxResultSize = maxResultSize;
this.method = method;
// Set up data handling
this.stream.on("data", this.onData.bind(this));
// Set up error handling
this.stream.on("error", (err) => {
debugCompression(`Node gzip error ${err}`);
// Store the error for later use
this.onError(err instanceof Error ? err : new Error(String(err)));
});
}
onError(error) {
this.lastError = error;
}
onData(chunk) {
// Ignore data after we've failed
if (this.lastError)
return;
// clear the buffered size
this.bufferedSize = 0;
// Current chunk
const data = new Uint8Array(chunk);
// Check result chunk after decompression
if (this.decompress) {
this.checkResultSize(data.length);
if (this.lastError)
return;
}
this.chunks.push(data);
this.totalSize += data.length;
}
async maxChunkSize() {
return this.maxResultSizeSafe();
}
maxResultSizeSafe() {
if (!this.maxResultSize)
return this.maxResultSize;
// Allow room for 1Kb trailer, crop to 64 from the bottom
return Math.max(64, this.maxResultSize - 1024);
}
potentialTotalSize(packetSize) {
return this.totalSize + packetSize + this.bufferedSize;
}
isResultTooBig(packetSize) {
const maxResultSize = this.maxResultSizeSafe();
if (!maxResultSize)
return false;
return this.potentialTotalSize(packetSize) > maxResultSize;
}
checkResultSize(packetSize) {
if (!this.isResultTooBig(packetSize))
return;
const maxResultSize = this.maxResultSizeSafe();
const potentialTotalSize = this.potentialTotalSize(packetSize);
debugCompression(`Node gzip result size would be exceeded: ${potentialTotalSize} > ${maxResultSize}`);
this.lastError = new CompressionSizeLimitExceeded(potentialTotalSize, maxResultSize);
}
async write(inputData) {
// If we already have an error or the stream is finished, reject immediately
if (this.lastError) {
throw this.lastError;
}
if (this.streamFinished) {
throw new Error("Stream is already finished");
}
// Write data to the stream
debugCompression(`Node gzip writing ${inputData.length} bytes`);
// For compression, we check the result before compressing,
// assume compressed input will be <= input size
// NOTE: ideally we should compress the input first and then measure
// the size, but we can't 'unzip' this packet, so the only
// option left is to measure the input.
if (!this.decompress) {
// Try to flush the current stream if we're approaching the limit
if (this.isResultTooBig(inputData.length) && this.bufferedSize) {
debugCompression(`Node gzip approaching result limit, flushing...`);
await nodeFlushGzip(this.stream);
debugCompression(`Node gzip flushed`);
}
this.checkResultSize(inputData.length);
if (this.lastError)
throw this.lastError;
}
// Write to compressor
const bufferHasSpace = this.stream.write(Buffer.from(inputData));
// If the stream is not accepting more data and we haven't gotten a data event yet,
// we need to wait for the drain event
if (!bufferHasSpace) {
// Wait until 'data' events
debugCompression("Node gzip draining...");
await new Promise((ok) => this.stream.once("drain", ok));
debugCompression("Node gzip drained");
// Error might have happened
if (this.lastError) {
throw this.lastError;
}
}
else {
// Account for this input that didn't produce
// output and thus wasn't checked
this.bufferedSize += inputData.length;
}
// Return updated size
return this.totalSize;
}
async finalize() {
debugCompression(`Node gzip finalize, buffered ${this.totalSize} bytes`);
// If we already have an error, reject immediately
if (this.lastError) {
if (this.lastError instanceof CompressionSizeLimitExceeded) {
// If we rejected the previous packet for compression
// then we should allow to try to finalize the buffered archive
if (!this.decompress)
this.lastError = undefined;
}
else {
throw this.lastError;
}
}
if (this.streamFinished) {
throw new Error("Already finalized");
}
// Mark as finished
this.streamFinished = true;
// Set up one-time end and error handlers before ending the stream
const endPromise = new Promise((ok) => this.stream.once("end", ok));
// End the stream after handlers are attached
this.stream.end();
// Wait until 'end' event is received
await endPromise;
// Error might have happened, we throw even if it's result-size error,
// it might only happen on decompression and client would have to
// reject the whole packet (finalize won't return)
if (this.lastError) {
throw this.lastError;
}
debugCompression(`Node gzip finalized, total size ${this.totalSize} bytes`);
// Return result
return this.concatenateChunks();
}
concatenateChunks() {
const result = new Uint8Array(this.totalSize);
let offset = 0;
for (const chunk of this.chunks) {
result.set(chunk, offset);
offset += chunk.length;
}
return result;
}
dispose() {
// Release resources
if (!this.streamFinished) {
this.stream.removeAllListeners();
this.stream.end();
}
this.stream.destroy();
this.chunks = [];
}
}
/**
* Node.js implementation of CompressionInstance using zlib
*/
class NodeCompressionInstance {
constructor(stream, method, binary, maxResultSize) {
this.binary = binary;
this.compression = new NodeCompression(stream, method, false, maxResultSize);
}
async maxChunkSize() {
return this.compression.maxChunkSize();
}
async add(chunk) {
if (typeof chunk === "string" && this.binary)
throw new Error("String input in binary mode");
if (typeof chunk !== "string" && !this.binary)
throw new Error("Uint8Array input in binary mode");
// Convert string to Uint8Array if needed
const inputData = typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk;
return this.compression.write(inputData);
}
async finish() {
return this.compression.finalize();
}
[Symbol.dispose]() {
this.compression.dispose();
}
}
/**
* Node.js implementation of DecompressionInstance using zlib
*/
class NodeDecompressionInstance {
constructor(stream, method, binary = false, maxResultSize) {
this.compression = new NodeCompression(stream, method, true, maxResultSize);
this.binary = binary;
this.method = method;
}
async maxChunkSize() {
return this.compression.maxChunkSize();
}
async add(chunk) {
// Convert string to Uint8Array if needed for internal processing
const inputData = typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk;
return this.compression.write(inputData);
}
async finish() {
const result = await this.compression.finalize();
// Return binary data or convert to string
if (this.binary) {
return result;
}
else {
return new TextDecoder().decode(result);
}
}
[Symbol.dispose]() {
this.compression.dispose();
}
}
/**
* No-compression implementation,
* passes through whatever given (string or bytes)
* works on bytes because it need to measure the size
* in bytes.
*/
class NoCompression {
constructor(binary, maxResultSize) {
this.chunks = [];
this.totalSize = 0;
this.binary = binary;
this.maxResultSize = maxResultSize;
}
async maxChunkSize() {
return this.maxResultSizeSafe();
}
maxResultSizeSafe() {
if (!this.maxResultSize)
return this.maxResultSize;
return this.maxResultSize;
}
checkResultSize(packetSize) {
const maxResultSize = this.maxResultSizeSafe();
if (!maxResultSize)
return;
const potentialTotalSize = this.totalSize + packetSize;
if (potentialTotalSize <= maxResultSize)
return;
this.lastError = new CompressionSizeLimitExceeded(potentialTotalSize, maxResultSize);
}
async write(inputData) {
this.checkResultSize(inputData.length);
if (this.lastError)
throw this.lastError;
this.chunks.push(inputData);
this.totalSize += inputData.length;
return this.totalSize;
}
async finalize() {
// If we already have an error, reject immediately
if (this.lastError &&
!(this.lastError instanceof CompressionSizeLimitExceeded)) {
throw this.lastError;
}
// Concatenate all chunks
const result = new Uint8Array(this.totalSize);
let offset = 0;
for (const chunk of this.chunks) {
result.set(chunk, offset);
offset += chunk.length;
}
return result;
}
dispose() {
// Release resources
this.chunks = [];
}
}
/**
* No-compression implementation of CompressionInstance (no compression),
* same algo for both directions since those are equivalent (pass-through)
*/
class NoCompressionDecompressionInstance {
constructor(binary, maxResultSize) {
this.compression = new NoCompression(binary, maxResultSize);
this.binary = binary;
}
async maxChunkSize() {
return this.compression.maxChunkSize();
}
async add(chunk) {
if (typeof chunk === "string" && this.binary)
throw new Error("String input in binary mode");
if (typeof chunk !== "string" && !this.binary)
throw new Error("Uint8Array input in binary mode");
// Convert string to Uint8Array if needed for internal processing
const inputData = typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk;
return this.compression.write(inputData);
}
async finish() {
const result = await this.compression.finalize();
// Return binary data or convert to string
if (this.binary) {
return result;
}
else {
return new TextDecoder().decode(result);
}
}
[Symbol.dispose]() {
this.compression.dispose();
}
}
/**
* Default implementation of the Compression interface
* Provides built-in support for 'none' and 'gzip' compression methods
*/
export class DefaultCompression {
/**
* Starts a streaming compression process
*
* @param method - The compression method
* @param binary - Binary mode
* @param maxResultSize - Optional maximum size (in bytes) for the compressed result.
* Note: The finish() method might produce additional data that exceeds this limit.
* It's recommended to add a margin of 1KB to your actual limit to accommodate this.
* @returns A CompressionInstance for handling the stream
*/
async startCompress(method, binary = false, maxResultSize) {
// Ensure Node.js functions are initialized if needed
await ensureNodeInitialized();
if (method === COMPRESSION_NONE) {
return new NoCompressionDecompressionInstance(binary, maxResultSize);
}
else if (method === COMPRESSION_GZIP) {
if (isBrowser) {
if (typeof window.CompressionStream === "undefined")
throw new Error("Browser CompressionStream for gzip is not available");
return new BrowserCompressionInstance(new CompressionStream("gzip"), method, binary, maxResultSize);
}
else {
if (nodeCreateGzip === null) {
throw new Error("Node.js compression functions not initialized");
}
return new NodeCompressionInstance(nodeCreateGzip(), method, binary, maxResultSize);
}
}
else {
throw new Error(`Unsupported compression method: ${method}`);
}
}
/**
* Starts a streaming decompression process
*
* @param method - The compression method
* @param binary - Whether to return binary data instead of string
* @param maxResultSize - Optional maximum size (in bytes) for the decompressed result.
* Note: The finish() method might produce additional data that exceeds this limit.
* It's recommended to add a margin of 1KB to your actual limit to accommodate this.
* @returns A DecompressionInstance for handling the stream
*/
async startDecompress(method, binary = false, maxResultSize) {
// Ensure Node.js functions are initialized if needed
await ensureNodeInitialized();
if (method === COMPRESSION_NONE) {
return new NoCompressionDecompressionInstance(binary, maxResultSize);
}
else if (method === COMPRESSION_GZIP) {
if (isBrowser) {
if (typeof window.CompressionStream === "undefined")
throw new Error("Browser CompressionStream for gzip is not available");
return new BrowserDecompressionInstance(new DecompressionStream("gzip"), method, binary, maxResultSize);
}
else {
if (nodeCreateGunzip === null) {
throw new Error("Node.js decompression functions not initialized");
}
return new NodeDecompressionInstance(nodeCreateGunzip(), method, binary, maxResultSize);
}
}
else {
throw new Error(`Unsupported compression method: ${method}`);
}
}
/**
* Compresses data using the specified compression method
*
* @param data - The data to compress as string or Uint8Array
* @param method - The compression method
* @returns Promise resolving to compressed data as Uint8Array
*/
async compress(data, method) {
// Use the streaming API
const compressor = await this.startCompress(method);
try {
await compressor.add(data);
const compressed = await compressor.finish();
// Debug logging needs to handle both string and Uint8Array
if (typeof compressed === "string") {
debugCompression(`Compressed ${typeof data === "string" ? data.length : data.byteLength} bytes to ${compressed.length} bytes`);
}
else {
debugCompression(`Compressed ${typeof data === "string" ? data.length : data.byteLength} bytes to ${compressed.byteLength} bytes`);
}
return compressed;
}
finally {
compressor[Symbol.dispose]();
}
}
/**
* Returns a list of supported compression methods
*
* @returns Array of supported compression method names
*/
list() {
return [COMPRESSION_NONE, COMPRESSION_GZIP];
}
/**
* Decompresses data using the specified compression method
*
* @param data - The compressed data as Uint8Array
* @param method - The compression method
* @param binary - Whether to return binary data instead of string
* @returns Promise resolving to decompressed data as string or Uint8Array (if binary)
*/
async decompress(data, method, binary = false) {
// Use the streaming API
const decompressor = await this.startDecompress(method, binary);
try {
await decompressor.add(data);
const decompressed = await decompressor.finish();
return decompressed;
}
finally {
decompressor[Symbol.dispose]();
}
}
}
let compression;
export function getCompression() {
if (!compression)
compression = new DefaultCompression();
return compression;
}
//# sourceMappingURL=compression.js.map