UNPKG

@sanity/migrate

Version:

Tooling for running data migrations on Sanity.io projects

253 lines (252 loc) • 9.53 kB
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } function _async_to_generator(fn) { return function() { var self = this, args = arguments; return new Promise(function(resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } function _ts_generator(thisArg, body) { var f, y, t, _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype), d = Object.defineProperty; return d(g, "next", { value: verb(0) }), d(g, "throw", { value: verb(1) }), d(g, "return", { value: verb(2) }), typeof Symbol === "function" && d(g, Symbol.iterator, { value: function() { return this; } }), g; function verb(n) { return function(v) { return step([ n, v ]); }; } function step(op) { if (f) throw new TypeError("Generator is already executing."); while(g && (g = 0, op[0] && (_ = 0)), _)try { if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; if (y = 0, t) op = [ op[0] & 2, t.value ]; switch(op[0]){ case 0: case 1: t = op; break; case 4: _.label++; return { value: op[1], done: false }; case 5: _.label++; y = op[1]; op = [ 0 ]; continue; case 7: op = _.ops.pop(); _.trys.pop(); continue; default: if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) { _.label = op[1]; break; } if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } if (t[2]) _.ops.pop(); _.trys.pop(); continue; } op = body.call(thisArg, _); } catch (e) { op = [ 6, e ]; y = 0; } finally{ f = t = 0; } if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; } } import { ReadableStream } from 'node:stream/web'; import { BufferList } from './BufferList.js'; import * as headers from './headers.js'; // Inspired by // - https://github.com/alanshaw/it-tar/blob/master/src/extract.ts // - https://github.com/mafintosh/tar-stream/blob/master/extract.js var emptyReadableStream = function() { return new ReadableStream({ pull: function pull(controller) { controller.close(); } }); }; export function untar(stream) { var options = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : {}; var buffer = new BufferList(); var reader = stream.getReader(); var readingChunk = false; return new ReadableStream({ pull: function pull(controller) { return _async_to_generator(function() { var _options_filenameEncoding, _options_allowUnknownFormat, _ref, done, value, headerChunk, header; return _ts_generator(this, function(_state) { switch(_state.label){ case 0: if (readingChunk) { return [ 2 ]; } return [ 4, reader.read() ]; case 1: _ref = _state.sent(), done = _ref.done, value = _ref.value; if (!done) { buffer.push(value); } headerChunk = buffer.shift(512); if (!headerChunk) { throw new Error('Unexpected end of tar file. Expected 512 bytes of headers.'); } header = headers.decode(headerChunk, (_options_filenameEncoding = options.filenameEncoding) !== null && _options_filenameEncoding !== void 0 ? _options_filenameEncoding : 'utf8', (_options_allowUnknownFormat = options.allowUnknownFormat) !== null && _options_allowUnknownFormat !== void 0 ? _options_allowUnknownFormat : false); if (header) { if (header.size === null || header.size === 0 || header.type === 'directory') { controller.enqueue([ header, emptyReadableStream() ]); } else { readingChunk = true; controller.enqueue([ header, entryStream(reader, header.size, buffer, function() { readingChunk = false; }) ]); } } else if (done) { // note - there might be more data in the buffer, after the input stream is done // so only complete if we couldn't find a header controller.close(); } return [ 2 ]; } }); })(); } }); } function entryStream(reader, expectedBytes, buffer, next) { var totalBytesRead = 0; // let pulling = false return new ReadableStream({ pull: function pull(controller) { return _async_to_generator(function() { var _ref, done, value, remaining, chunk; return _ts_generator(this, function(_state) { switch(_state.label){ case 0: return [ 4, reader.read() ]; case 1: _ref = _state.sent(), done = _ref.done, value = _ref.value; remaining = expectedBytes - totalBytesRead; if (!done) { buffer.push(value); } chunk = buffer.shiftFirst(remaining); if (!chunk) { throw new Error('Premature end of tar stream'); } controller.enqueue(chunk); totalBytesRead += chunk.byteLength; if ((chunk === null || chunk === void 0 ? void 0 : chunk.byteLength) === remaining) { // We've reached the end of the entry, discard any padding at the end ( discardPadding(buffer, expectedBytes); controller.close(); next(); } return [ 2 ]; } }); })(); } }); } function getPadding(size) { size &= 511; return size === 0 ? 0 : 512 - size; } function discardPadding(bl, size) { var overflow = getPadding(size); if (overflow > 0) { bl.shift(overflow); } }