UNPKG

@pkerschbaum/code-oss-file-service

Version:

VS Code ([microsoft/vscode](https://github.com/microsoft/vscode)) includes a rich "`FileService`" and "`DiskFileSystemProvider`" abstraction built on top of Node.js core modules (`fs`, `path`) and Electron's `shell` module. This package allows to use that

474 lines 16.3 kB
"use strict"; /*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ Object.defineProperty(exports, "__esModule", { value: true }); exports.prefixedStream = exports.prefixedReadable = exports.transform = exports.toReadable = exports.emptyStream = exports.toStream = exports.peekStream = exports.listenStream = exports.consumeStream = exports.peekReadable = exports.consumeReadable = exports.newWriteableStream = exports.isReadableBufferedStream = exports.isReadableStream = void 0; const errors_1 = require("../../base/common/errors"); const lifecycle_1 = require("../../base/common/lifecycle"); function isReadableStream(obj) { const candidate = obj; if (!candidate) { return false; } return [candidate.on, candidate.pause, candidate.resume, candidate.destroy].every(fn => typeof fn === 'function'); } exports.isReadableStream = isReadableStream; function isReadableBufferedStream(obj) { const candidate = obj; if (!candidate) { return false; } return isReadableStream(candidate.stream) && Array.isArray(candidate.buffer) && typeof candidate.ended === 'boolean'; } exports.isReadableBufferedStream = isReadableBufferedStream; function newWriteableStream(reducer, options) { return new WriteableStreamImpl(reducer, options); } exports.newWriteableStream = newWriteableStream; class WriteableStreamImpl { constructor(reducer, options) { this.reducer = reducer; this.options = options; this.state = { flowing: false, ended: false, destroyed: false }; this.buffer = { data: [], error: [] }; this.listeners = { data: [], error: [], end: [] }; this.pendingWritePromises = []; } pause() { if (this.state.destroyed) { return; } this.state.flowing = false; } resume() { if (this.state.destroyed) { return; } if (!this.state.flowing) { this.state.flowing = true; // emit buffered events this.flowData(); this.flowErrors(); this.flowEnd(); } } write(data) { var _a; if (this.state.destroyed) { return; } // flowing: directly send the data to listeners if (this.state.flowing) { this.emitData(data); } // not yet flowing: buffer data until flowing else { this.buffer.data.push(data); // highWaterMark: if configured, signal back when buffer reached limits if (typeof ((_a = this.options) === null || _a === void 0 ? void 0 : _a.highWaterMark) === 'number' && this.buffer.data.length > this.options.highWaterMark) { return new Promise(resolve => this.pendingWritePromises.push(resolve)); } } } error(error) { if (this.state.destroyed) { return; } // flowing: directly send the error to listeners if (this.state.flowing) { this.emitError(error); } // not yet flowing: buffer errors until flowing else { this.buffer.error.push(error); } } end(result) { if (this.state.destroyed) { return; } // end with data if provided if (typeof result !== 'undefined') { this.write(result); } // flowing: send end event to listeners if (this.state.flowing) { this.emitEnd(); this.destroy(); } // not yet flowing: remember state else { this.state.ended = true; } } emitData(data) { this.listeners.data.slice(0).forEach(listener => listener(data)); // slice to avoid listener mutation from delivering event } emitError(error) { if (this.listeners.error.length === 0) { (0, errors_1.onUnexpectedError)(error); // nobody listened to this error so we log it as unexpected } else { this.listeners.error.slice(0).forEach(listener => listener(error)); // slice to avoid listener mutation from delivering event } } emitEnd() { this.listeners.end.slice(0).forEach(listener => listener()); // slice to avoid listener mutation from delivering event } on(event, callback) { if (this.state.destroyed) { return; } switch (event) { case 'data': this.listeners.data.push(callback); // switch into flowing mode as soon as the first 'data' // listener is added and we are not yet in flowing mode this.resume(); break; case 'end': this.listeners.end.push(callback); // emit 'end' event directly if we are flowing // and the end has already been reached // // finish() when it went through if (this.state.flowing && this.flowEnd()) { this.destroy(); } break; case 'error': this.listeners.error.push(callback); // emit buffered 'error' events unless done already // now that we know that we have at least one listener if (this.state.flowing) { this.flowErrors(); } break; } } removeListener(event, callback) { if (this.state.destroyed) { return; } let listeners = undefined; switch (event) { case 'data': listeners = this.listeners.data; break; case 'end': listeners = this.listeners.end; break; case 'error': listeners = this.listeners.error; break; } if (listeners) { const index = listeners.indexOf(callback); if (index >= 0) { listeners.splice(index, 1); } } } flowData() { if (this.buffer.data.length > 0) { const fullDataBuffer = this.reducer(this.buffer.data); this.emitData(fullDataBuffer); this.buffer.data.length = 0; // When the buffer is empty, resolve all pending writers const pendingWritePromises = [...this.pendingWritePromises]; this.pendingWritePromises.length = 0; pendingWritePromises.forEach(pendingWritePromise => pendingWritePromise()); } } flowErrors() { if (this.listeners.error.length > 0) { for (const error of this.buffer.error) { this.emitError(error); } this.buffer.error.length = 0; } } flowEnd() { if (this.state.ended) { this.emitEnd(); return this.listeners.end.length > 0; } return false; } destroy() { if (!this.state.destroyed) { this.state.destroyed = true; this.state.ended = true; this.buffer.data.length = 0; this.buffer.error.length = 0; this.listeners.data.length = 0; this.listeners.error.length = 0; this.listeners.end.length = 0; this.pendingWritePromises.length = 0; } } } /** * Helper to fully read a T readable into a T. */ function consumeReadable(readable, reducer) { const chunks = []; let chunk; while ((chunk = readable.read()) !== null) { chunks.push(chunk); } return reducer(chunks); } exports.consumeReadable = consumeReadable; /** * Helper to read a T readable up to a maximum of chunks. If the limit is * reached, will return a readable instead to ensure all data can still * be read. */ function peekReadable(readable, reducer, maxChunks) { const chunks = []; let chunk = undefined; while ((chunk = readable.read()) !== null && chunks.length < maxChunks) { chunks.push(chunk); } // If the last chunk is null, it means we reached the end of // the readable and return all the data at once if (chunk === null && chunks.length > 0) { return reducer(chunks); } // Otherwise, we still have a chunk, it means we reached the maxChunks // value and as such we return a new Readable that first returns // the existing read chunks and then continues with reading from // the underlying readable. return { read: () => { // First consume chunks from our array if (chunks.length > 0) { return chunks.shift(); } // Then ensure to return our last read chunk if (typeof chunk !== 'undefined') { const lastReadChunk = chunk; // explicitly use undefined here to indicate that we consumed // the chunk, which could have either been null or valued. chunk = undefined; return lastReadChunk; } // Finally delegate back to the Readable return readable.read(); } }; } exports.peekReadable = peekReadable; function consumeStream(stream, reducer) { return new Promise((resolve, reject) => { const chunks = []; listenStream(stream, { onData: chunk => { if (reducer) { chunks.push(chunk); } }, onError: error => { if (reducer) { reject(error); } else { resolve(undefined); } }, onEnd: () => { if (reducer) { resolve(reducer(chunks)); } else { resolve(undefined); } } }); }); } exports.consumeStream = consumeStream; /** * Helper to listen to all events of a T stream in proper order. */ function listenStream(stream, listener) { let destroyed = false; stream.on('error', error => { if (!destroyed) { listener.onError(error); } }); stream.on('end', () => { if (!destroyed) { listener.onEnd(); } }); // Adding the `data` listener will turn the stream // into flowing mode. As such it is important to // add this listener last (DO NOT CHANGE!) stream.on('data', data => { if (!destroyed) { listener.onData(data); } }); return (0, lifecycle_1.toDisposable)(() => destroyed = true); } exports.listenStream = listenStream; /** * Helper to peek up to `maxChunks` into a stream. The return type signals if * the stream has ended or not. If not, caller needs to add a `data` listener * to continue reading. */ function peekStream(stream, maxChunks) { return new Promise((resolve, reject) => { const streamListeners = new lifecycle_1.DisposableStore(); const buffer = []; // Data Listener const dataListener = (chunk) => { // Add to buffer buffer.push(chunk); // We reached maxChunks and thus need to return if (buffer.length > maxChunks) { // Dispose any listeners and ensure to pause the // stream so that it can be consumed again by caller streamListeners.dispose(); stream.pause(); return resolve({ stream, buffer, ended: false }); } }; // Error Listener const errorListener = (error) => { return reject(error); }; // End Listener const endListener = () => { return resolve({ stream, buffer, ended: true }); }; streamListeners.add((0, lifecycle_1.toDisposable)(() => stream.removeListener('error', errorListener))); stream.on('error', errorListener); streamListeners.add((0, lifecycle_1.toDisposable)(() => stream.removeListener('end', endListener))); stream.on('end', endListener); // Important: leave the `data` listener last because // this can turn the stream into flowing mode and we // want `error` events to be received as well. streamListeners.add((0, lifecycle_1.toDisposable)(() => stream.removeListener('data', dataListener))); stream.on('data', dataListener); }); } exports.peekStream = peekStream; /** * Helper to create a readable stream from an existing T. */ function toStream(t, reducer) { const stream = newWriteableStream(reducer); stream.end(t); return stream; } exports.toStream = toStream; /** * Helper to create an empty stream */ function emptyStream() { const stream = newWriteableStream(() => { throw new Error('not supported'); }); stream.end(); return stream; } exports.emptyStream = emptyStream; /** * Helper to convert a T into a Readable<T>. */ function toReadable(t) { let consumed = false; return { read: () => { if (consumed) { return null; } consumed = true; return t; } }; } exports.toReadable = toReadable; /** * Helper to transform a readable stream into another stream. */ function transform(stream, transformer, reducer) { const target = newWriteableStream(reducer); listenStream(stream, { onData: data => target.write(transformer.data(data)), onError: error => target.error(transformer.error ? transformer.error(error) : error), onEnd: () => target.end() }); return target; } exports.transform = transform; /** * Helper to take an existing readable that will * have a prefix injected to the beginning. */ function prefixedReadable(prefix, readable, reducer) { let prefixHandled = false; return { read: () => { const chunk = readable.read(); // Handle prefix only once if (!prefixHandled) { prefixHandled = true; // If we have also a read-result, make // sure to reduce it to a single result if (chunk !== null) { return reducer([prefix, chunk]); } // Otherwise, just return prefix directly return prefix; } return chunk; } }; } exports.prefixedReadable = prefixedReadable; /** * Helper to take an existing stream that will * have a prefix injected to the beginning. */ function prefixedStream(prefix, stream, reducer) { let prefixHandled = false; const target = newWriteableStream(reducer); listenStream(stream, { onData: data => { // Handle prefix only once if (!prefixHandled) { prefixHandled = true; return target.write(reducer([prefix, data])); } return target.write(data); }, onError: error => target.error(error), onEnd: () => { // Handle prefix only once if (!prefixHandled) { prefixHandled = true; target.write(prefix); } target.end(); } }); return target; } exports.prefixedStream = prefixedStream; //# sourceMappingURL=stream.js.map