firebase-tools
Version:
Command-Line Interface for Firebase
219 lines (218 loc) • 8.42 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
const Chain = require("stream-chain");
const clc = require("colorette");
const Filter = require("stream-json/filters/Filter");
const stream = require("stream");
const StreamObject = require("stream-json/streamers/StreamObject");
const apiv2_1 = require("../apiv2");
const node_fetch_1 = require("node-fetch");
const error_1 = require("../error");
const pLimit = require("p-limit");
class BatchChunks extends stream.Transform {
constructor(maxSize, opts) {
super(Object.assign(Object.assign({}, opts), { objectMode: true }));
this.maxSize = maxSize;
this.batch = [];
this.size = 0;
}
_transform(chunk, _, callback) {
const totalChunkSize = chunk.size + chunk.pathname.length;
if (this.size + totalChunkSize > this.maxSize) {
this.push(this.transformBatchToPatchData(this.batch));
this.batch = [];
this.size = 0;
}
this.batch.push(chunk);
this.size += totalChunkSize;
callback(null);
}
transformBatchToPatchData(batch) {
return this.sanitizePatchData(this.compactData(batch));
}
compactData(batch) {
if (batch.length === 1) {
return batch[0];
}
const pathname = this.findLongestCommonPrefixArray(batch.map((d) => d.pathname));
let json = {};
let size = 0;
for (const chunk of batch) {
const truncatedPath = chunk.pathname.substring(pathname.length + 1);
json = Object.assign({}, json, { [truncatedPath]: chunk.json });
size += chunk.size;
}
return { json, pathname, size };
}
sanitizePatchData({ json, pathname, size }) {
if (typeof json === "string" || typeof json === "number" || typeof json === "boolean") {
const tokens = pathname.split("/");
const lastToken = tokens.pop();
return { json: { [lastToken]: json }, pathname: tokens.join("/"), size };
}
if (Array.isArray(json)) {
return { json: Object.assign({}, json), pathname, size };
}
return { json, pathname, size };
}
findLongestCommonPrefixArray(paths) {
const findLongestCommonPrefixPair = (p, q) => {
const pTokens = p.split("/");
const qTokens = q.split("/");
let prefix = pTokens.slice(0, qTokens.length);
for (let i = 0; i < prefix.length; i++) {
if (prefix[i] !== qTokens[i]) {
prefix = prefix.slice(0, i);
break;
}
}
return prefix.join("/");
};
if (paths.length === 0) {
return "";
}
let prefix = paths[0];
for (let i = 1; i < paths.length; i++) {
prefix = findLongestCommonPrefixPair(prefix, paths[i]);
}
return prefix;
}
_flush(callback) {
if (this.size > 0) {
this.push(this.transformBatchToPatchData(this.batch));
}
callback(null);
}
}
class DatabaseImporter {
constructor(dbUrl, inStream, dataPath, payloadSize, concurrency) {
this.dbUrl = dbUrl;
this.inStream = inStream;
this.dataPath = dataPath;
this.payloadSize = payloadSize;
this.nonFatalRetryTimeout = 1000;
this.client = new apiv2_1.Client({ urlPrefix: dbUrl.origin, auth: true });
this.limit = pLimit(concurrency);
}
async execute() {
await this.checkLocationIsEmpty();
return this.readAndWriteChunks();
}
async checkLocationIsEmpty() {
const response = await this.client.request({
method: "GET",
path: this.dbUrl.pathname + ".json",
queryParams: { shallow: "true" },
});
if (response.body) {
throw new error_1.FirebaseError("Importing is only allowed for an empty location. Delete all data by running " +
clc.bold(`firebase database:remove ${this.dbUrl.pathname} --disable-triggers`) +
", then rerun this command.", { exit: 2 });
}
}
readAndWriteChunks() {
const { dbUrl, payloadSize } = this;
const chunkData = this.chunkData.bind(this);
const doWriteBatch = this.doWriteBatch.bind(this);
const getJoinedPath = this.joinPath.bind(this);
const readChunks = new stream.Transform({ objectMode: true });
readChunks._transform = function (chunk, _, done) {
const data = { json: chunk.value, pathname: getJoinedPath(dbUrl.pathname, chunk.key) };
const chunkedData = chunkData(data);
const chunks = chunkedData.chunks || [Object.assign(Object.assign({}, data), { size: JSON.stringify(data.json).length })];
for (const chunk of chunks) {
this.push(chunk);
}
done();
};
const writeBatch = new stream.Transform({ objectMode: true });
writeBatch._transform = async function (batch, _, done) {
const res = await doWriteBatch(batch);
this.push(res);
done();
};
return new Promise((resolve, reject) => {
const responses = [];
const pipeline = new Chain([
this.inStream,
Filter.withParser({
filter: this.computeFilterString(this.dataPath) || (() => true),
pathSeparator: "/",
}),
StreamObject.streamObject(),
]);
pipeline
.on("error", (err) => reject(new error_1.FirebaseError(`Invalid data; couldn't parse JSON object, array, or value. ${err.message}`, {
original: err,
exit: 2,
})))
.pipe(readChunks)
.pipe(new BatchChunks(payloadSize))
.pipe(writeBatch)
.on("data", (res) => responses.push(res))
.on("error", reject)
.once("end", () => resolve(responses));
});
}
doWriteBatch(batch) {
const doRequest = () => {
return this.client.request({
method: "PATCH",
path: `${batch.pathname}.json`,
body: batch.json,
queryParams: this.dbUrl.searchParams,
});
};
return this.limit(async () => {
try {
return await doRequest();
}
catch (err) {
const isTimeoutErr = err instanceof error_1.FirebaseError &&
err.original instanceof node_fetch_1.FetchError &&
err.original.code === "ETIMEDOUT";
if (isTimeoutErr) {
await new Promise((res) => setTimeout(res, this.nonFatalRetryTimeout));
return await doRequest();
}
throw err;
}
});
}
chunkData({ json, pathname }) {
if (typeof json === "string" || typeof json === "number" || typeof json === "boolean") {
return { chunks: null, size: JSON.stringify(json).length };
}
else {
let size = 2;
const chunks = [];
let hasChunkedChild = false;
for (const [key, val] of Object.entries(json)) {
size += key.length + 3;
const child = { json: val, pathname: this.joinPath(pathname, key) };
const childChunks = this.chunkData(child);
size += childChunks.size;
if (childChunks.chunks) {
hasChunkedChild = true;
chunks.push(...childChunks.chunks);
}
else {
chunks.push(Object.assign(Object.assign({}, child), { size: childChunks.size }));
}
}
if (hasChunkedChild || size >= this.payloadSize) {
return { chunks, size };
}
else {
return { chunks: null, size };
}
}
}
computeFilterString(dataPath) {
return dataPath.split("/").filter(Boolean).join("/");
}
joinPath(root, key) {
return [root, key].join("/").replace("//", "/");
}
}
exports.default = DatabaseImporter;
;