@sanity/migrate
Version:
Tooling for running data migrations on Sanity.io projects
480 lines (479 loc) • 18.6 kB
JavaScript
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) {
try {
var info = gen[key](arg);
var value = info.value;
} catch (error) {
reject(error);
return;
}
if (info.done) {
resolve(value);
} else {
Promise.resolve(value).then(_next, _throw);
}
}
function _async_to_generator(fn) {
return function() {
var self = this, args = arguments;
return new Promise(function(resolve, reject) {
var gen = fn.apply(self, args);
function _next(value) {
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value);
}
function _throw(err) {
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err);
}
_next(undefined);
});
};
}
function _ts_generator(thisArg, body) {
var f, y, t, _ = {
label: 0,
sent: function() {
if (t[0] & 1) throw t[1];
return t[1];
},
trys: [],
ops: []
}, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype), d = Object.defineProperty;
return d(g, "next", {
value: verb(0)
}), d(g, "throw", {
value: verb(1)
}), d(g, "return", {
value: verb(2)
}), typeof Symbol === "function" && d(g, Symbol.iterator, {
value: function() {
return this;
}
}), g;
function verb(n) {
return function(v) {
return step([
n,
v
]);
};
}
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while(g && (g = 0, op[0] && (_ = 0)), _)try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [
op[0] & 2,
t.value
];
switch(op[0]){
case 0:
case 1:
t = op;
break;
case 4:
_.label++;
return {
value: op[1],
done: false
};
case 5:
_.label++;
y = op[1];
op = [
0
];
continue;
case 7:
op = _.ops.pop();
_.trys.pop();
continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
_ = 0;
continue;
}
if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
_.label = op[1];
break;
}
if (op[0] === 6 && _.label < t[1]) {
_.label = t[1];
t = op;
break;
}
if (t && _.label < t[2]) {
_.label = t[2];
_.ops.push(op);
break;
}
if (t[2]) _.ops.pop();
_.trys.pop();
continue;
}
op = body.call(thisArg, _);
} catch (e) {
op = [
6,
e
];
y = 0;
} finally{
f = t = 0;
}
if (op[0] & 5) throw op[1];
return {
value: op[0] ? op[1] : void 0,
done: true
};
}
}
import { open, unlink } from 'node:fs/promises';
import baseDebug from '../debug.js';
var debug = baseDebug.extend('bufferThroughFile');
var CHUNK_SIZE = 1024;
/**
* Takes a source stream that will be drained and written to the provided file name as fast as possible.
* and returns a function that can be called to create multiple readable stream on top of the buffer file.
* It will start pulling data from the source stream once the first readableStream is created, writing to the buffer file in the background.
* The readable streams and can be read at any rate (but will not receive data faster than the buffer file is written to).
* Note: by default, buffering will run to completion, and this may prevent the process from exiting after done reading from the
* buffered streams. To stop writing to the buffer file, an AbortSignal can be provided and once it's controller aborts, the buffer file will
* stop. After the signal is aborted, no new buffered readers can be created.
*
* @param source - The source readable stream. Will be drained as fast as possible.
* @param filename - The filename to write to.
* @param options - Optional AbortSignal to stop writing to the buffer file.
* @returns A function that can be called multiple times to create a readable stream on top of the buffer file.
*/ export function bufferThroughFile(source, filename, options) {
var signal = options === null || options === void 0 ? void 0 : options.signal;
var writeHandle;
var readHandle;
// Whether the all data has been written to the buffer file.
var bufferDone = false;
signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', function() {
debug('Aborting bufferThroughFile');
Promise.all([
writeHandle && writeHandle.close(),
readHandle && readHandle.then(function(handle) {
return handle.close();
})
]).catch(function(error) {
debug('Error closing handles on abort', error);
});
});
// Number of active readers. When this reaches 0, the read handle will be closed.
var readerCount = 0;
var ready;
function pump(reader) {
return _async_to_generator(function() {
var _ref, done, value;
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
_state.trys.push([
0,
,
5,
7
]);
_state.label = 1;
case 1:
if (!true) return [
3,
4
];
return [
4,
reader.read()
];
case 2:
_ref = _state.sent(), done = _ref.done, value = _ref.value;
if (done || (signal === null || signal === void 0 ? void 0 : signal.aborted)) {
// if we're done reading, or the primary reader has been cancelled, stop writing to the buffer file
return [
2
];
}
return [
4,
writeHandle.write(value)
];
case 3:
_state.sent();
return [
3,
1
];
case 4:
return [
3,
7
];
case 5:
return [
4,
writeHandle.close()
];
case 6:
_state.sent();
bufferDone = true;
reader.releaseLock();
return [
7
];
case 7:
return [
2
];
}
});
})();
}
function createBufferedReader() {
var totalBytesRead = 0;
return function tryReadFromBuffer(handle) {
return _async_to_generator(function() {
var _ref, buffer, bytesRead;
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
return [
4,
handle.read(new Uint8Array(CHUNK_SIZE), 0, CHUNK_SIZE, totalBytesRead)
];
case 1:
_ref = _state.sent(), buffer = _ref.buffer, bytesRead = _ref.bytesRead;
if (bytesRead === 0 && !bufferDone && !(signal === null || signal === void 0 ? void 0 : signal.aborted)) {
debug('Not enough data in buffer file, waiting for more data to be written');
// we're waiting for more data to be written to the buffer file, try again
return [
2,
tryReadFromBuffer(handle)
];
}
totalBytesRead += bytesRead;
return [
2,
{
buffer: buffer,
bytesRead: bytesRead
}
];
}
});
})();
};
}
function init() {
if (ready === undefined) {
ready = function() {
return _async_to_generator(function() {
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
debug('Initializing bufferThroughFile');
return [
4,
open(filename, 'w')
];
case 1:
writeHandle = _state.sent();
// start pumping data from the source stream to the buffer file
debug('Start buffering source stream to file');
// note, don't await this, as it will block the ReadableStream.start() method
pump(source.getReader()).then(function() {
debug('Buffering source stream to buffer file');
}).catch(function(error) {
debug('Error pumping source stream', error);
});
return [
2
];
}
});
})();
}();
}
return ready;
}
function getReadHandle() {
if (!readHandle) {
debug('Opening read handle on %s', filename);
readHandle = open(filename, 'r');
}
return readHandle;
}
function onReaderStart() {
readerCount++;
}
function onReaderEnd() {
return _async_to_generator(function() {
var handle;
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
readerCount--;
if (!(readerCount === 0 && readHandle)) return [
3,
4
];
handle = readHandle;
readHandle = null;
debug('Closing read handle on %s', filename);
return [
4,
handle
];
case 1:
return [
4,
_state.sent().close()
];
case 2:
_state.sent();
if (!((options === null || options === void 0 ? void 0 : options.keepFile) !== true)) return [
3,
4
];
debug('Removing buffer file', filename);
return [
4,
unlink(filename)
];
case 3:
_state.sent();
_state.label = 4;
case 4:
return [
2
];
}
});
})();
}
return function() {
var onEnd = function onEnd() {
return _async_to_generator(function() {
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
if (didEnd) {
return [
2
];
}
didEnd = true;
return [
4,
onReaderEnd()
];
case 1:
_state.sent();
return [
2
];
}
});
})();
};
var readChunk = createBufferedReader();
var didEnd = false;
return new ReadableStream({
cancel: function cancel() {
return _async_to_generator(function() {
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
return [
4,
onEnd()
];
case 1:
_state.sent();
return [
2
];
}
});
})();
},
pull: function pull(controller) {
return _async_to_generator(function() {
var _ref, buffer, bytesRead;
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
if (!readHandle) {
throw new Error('Cannot read from closed handle');
}
return [
4,
readHandle
];
case 1:
return [
4,
readChunk.apply(void 0, [
_state.sent()
])
];
case 2:
_ref = _state.sent(), buffer = _ref.buffer, bytesRead = _ref.bytesRead;
if (!(bytesRead === 0 && bufferDone)) return [
3,
4
];
debug('Reader done reading from file handle');
return [
4,
onEnd()
];
case 3:
_state.sent();
controller.close();
return [
3,
5
];
case 4:
controller.enqueue(buffer.subarray(0, bytesRead));
_state.label = 5;
case 5:
return [
2
];
}
});
})();
},
start: function start() {
return _async_to_generator(function() {
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
throw new Error('Cannot create new buffered readers on aborted stream');
}
debug('Reader started reading from file handle');
onReaderStart();
return [
4,
init()
];
case 1:
_state.sent();
return [
4,
getReadHandle()
];
case 2:
_state.sent();
return [
2
];
}
});
})();
}
});
};
}