image-stitch
Version:
Stitch images together efficiently with multi-format support (PNG, JPEG, HEIC), streaming, for node.js and web
149 lines • 5.67 kB
JavaScript
import { unfilterScanline, filterScanline, getBytesPerPixel } from './png-filter.js';
import { getSamplesPerPixel } from './utils.js';
import { deinterlaceAdam7 } from './adam7.js';
import { createDecompressionStream } from './streaming-inflate.js';
import { StreamingDeflator } from './streaming-deflate.js';
/**
* Decompress data using Web Compression Streams API
* Works in both Node.js (18+) and modern browsers
*/
export async function decompressData(data) {
// Use Blob.stream() if available, otherwise create stream from buffer manually
let stream;
if (typeof Blob !== 'undefined' && typeof new Blob([data]).stream === 'function') {
stream = new Blob([data]).stream();
}
else {
stream = new ReadableStream({
start(controller) {
controller.enqueue(data);
controller.close();
}
});
}
const decompressedStream = stream.pipeThrough(createDecompressionStream('deflate'));
const chunks = [];
const reader = decompressedStream.getReader();
while (true) {
const { value, done } = await reader.read();
if (done)
break;
chunks.push(value);
}
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const result = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
result.set(chunk, offset);
offset += chunk.length;
}
return result;
}
/**
* Compress data using Web Compression Streams API
* Works in both Node.js (18+) and modern browsers
*/
async function compressData(data) {
const chunks = [];
const deflator = new StreamingDeflator();
await deflator.initialize((chunk) => {
chunks.push(chunk);
});
await deflator.push(data);
await deflator.finish();
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const result = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
result.set(chunk, offset);
offset += chunk.length;
}
return result;
}
/**
* Decompress and unfilter PNG image data
* @param idatChunks Array of IDAT chunks containing compressed image data
* @param header PNG header information
* @returns Unfiltered raw pixel data
*/
export async function decompressImageData(idatChunks, header) {
// Concatenate all IDAT chunk data
let totalLength = 0;
for (const chunk of idatChunks) {
if (chunk.type === 'IDAT') {
totalLength += chunk.data.length;
}
}
const compressedData = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of idatChunks) {
if (chunk.type === 'IDAT') {
compressedData.set(chunk.data, offset);
offset += chunk.data.length;
}
}
// Decompress using Web Compression Streams API
const decompressed = await decompressData(compressedData);
// Handle interlaced images (Adam7)
if (header.interlaceMethod === 1) {
return deinterlaceAdam7(decompressed, header);
}
// Handle non-interlaced images
const bytesPerPixel = getBytesPerPixel(header.bitDepth, header.colorType);
const scanlineLength = Math.ceil((header.width * header.bitDepth * getSamplesPerPixel(header.colorType)) / 8);
const unfilteredData = new Uint8Array(header.height * scanlineLength);
let previousLine = null;
let srcOffset = 0;
let dstOffset = 0;
for (let y = 0; y < header.height; y++) {
if (srcOffset >= decompressed.length) {
throw new Error('Unexpected end of decompressed data');
}
const filterType = decompressed[srcOffset++];
const scanline = decompressed.slice(srcOffset, srcOffset + scanlineLength);
srcOffset += scanlineLength;
const unfilteredLine = unfilterScanline(filterType, scanline, previousLine, bytesPerPixel);
unfilteredData.set(unfilteredLine, dstOffset);
dstOffset += scanlineLength;
previousLine = unfilteredLine;
}
return unfilteredData;
}
/**
* Filter and compress raw pixel data into PNG format
* @param pixelData Raw unfiltered pixel data
* @param header PNG header information
* @returns Compressed IDAT chunk data
*/
export async function compressImageData(pixelData, header) {
const bytesPerPixel = getBytesPerPixel(header.bitDepth, header.colorType);
const scanlineLength = Math.ceil((header.width * header.bitDepth * getSamplesPerPixel(header.colorType)) / 8);
// Add filter type bytes and filter each scanline
const filteredData = new Uint8Array(header.height * (scanlineLength + 1));
let srcOffset = 0;
let dstOffset = 0;
let previousLine = null;
for (let y = 0; y < header.height; y++) {
const scanline = pixelData.slice(srcOffset, srcOffset + scanlineLength);
srcOffset += scanlineLength;
const { filterType, filtered } = filterScanline(scanline, previousLine, bytesPerPixel);
filteredData[dstOffset++] = filterType;
filteredData.set(filtered, dstOffset);
dstOffset += filtered.length;
previousLine = scanline;
}
// Compress using Web Compression Streams API
const compressed = await compressData(filteredData);
return compressed;
}
/**
* Extract pixel data from a PNG file
*/
export async function extractPixelData(chunks, header) {
const idatChunks = chunks.filter(chunk => chunk.type === 'IDAT');
if (idatChunks.length === 0) {
throw new Error('No IDAT chunks found in PNG');
}
return await decompressImageData(idatChunks, header);
}
//# sourceMappingURL=png-decompress.js.map