convex
Version:
Client for the Convex Cloud
328 lines (327 loc) • 12.4 kB
JavaScript
;
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var convexImport_exports = {};
__export(convexImport_exports, {
convexImport: () => convexImport
});
module.exports = __toCommonJS(convexImport_exports);
var import_chalk = __toESM(require("chalk"), 1);
var import_inquirer = __toESM(require("inquirer"), 1);
var import_utils = require("./lib/utils.js");
var import_version = require("./version.js");
var import_context = require("../bundler/context.js");
var import_api = require("./lib/api.js");
var import_path = __toESM(require("path"), 1);
var import_run = require("./lib/run.js");
var import_extra_typings = require("@commander-js/extra-typings");
var import_command = require("./lib/command.js");
const CHUNK_SIZE = 5 * 1024 * 1024;
const convexImport = new import_extra_typings.Command("import").summary("Import data from a file to your deployment").description(
"Import data from a file to your Convex deployment.\n\n From a snapshot: `npx convex import snapshot.zip`\n For a single table: `npx convex import --table tableName file.json`\n\nBy default, this imports into your dev deployment."
).addOption(
new import_extra_typings.Option(
"--table <table>",
"Destination table name. Required if format is csv, jsonLines, or jsonArray. Not supported if format is zip."
)
).addOption(
new import_extra_typings.Option(
"--replace",
"Replace all existing data in any of the imported tables"
).conflicts("--append")
).addOption(
new import_extra_typings.Option(
"--append",
"Append imported data to any existing tables"
).conflicts("--replace")
).option(
"-y, --yes",
"Skip confirmation prompt when import leads to deleting existing documents"
).addOption(
new import_extra_typings.Option(
"--format <format>",
"Input file format. This flag is only required if the filename is missing an extension.\n- CSV files must have a header, and each row's entries are interpreted either as a (floating point) number or a string.\n- JSON files must be an array of JSON objects.\n- JSONLines files must have a JSON object per line.\n- ZIP files must have one directory per table, containing <table>/documents.jsonl. Snapshot exports from the Convex dashboard have this format."
).choices(["csv", "jsonLines", "jsonArray", "zip"])
).addDeploymentSelectionOptions((0, import_command.actionDescription)("Import data into")).argument("<path>", "Path to the input file").showHelpAfterError().action(async (filePath, options, command) => {
const ctx = import_context.oneoffContext;
if (command.args.length > 1) {
(0, import_context.logFailure)(
ctx,
`Error: Too many positional arguments. If you're specifying a table name, use the \`--table\` option.`
);
return await ctx.crash(1, "fatal");
}
const deploymentSelection = (0, import_api.deploymentSelectionFromOptions)(options);
const { adminKey, url: deploymentUrl } = await (0, import_api.fetchDeploymentCredentialsProvisionProd)(ctx, deploymentSelection);
if (!ctx.fs.exists(filePath)) {
(0, import_context.logFailure)(ctx, `Error: Path ${import_chalk.default.bold(filePath)} does not exist.`);
return await ctx.crash(1, "invalid filesystem data");
}
const format = await determineFormat(ctx, filePath, options.format ?? null);
const tableName = options.table ?? null;
if (tableName === null) {
if (format !== "zip") {
(0, import_context.logFailure)(
ctx,
`Error: The \`--table\` option is required for format ${format}`
);
return await ctx.crash(1, "fatal");
}
} else {
if (format === "zip") {
(0, import_context.logFailure)(
ctx,
`Error: The \`--table\` option is not allowed for format ${format}`
);
return await ctx.crash(1, "fatal");
}
}
await (0, import_utils.ensureHasConvexDependency)(ctx, "import");
const data = ctx.fs.createReadStream(filePath, {
highWaterMark: CHUNK_SIZE
});
const fileStats = ctx.fs.stat(filePath);
(0, import_context.showSpinner)(ctx, `Importing ${filePath} (${(0, import_utils.formatSize)(fileStats.size)})`);
const client = (0, import_utils.deploymentClient)(deploymentUrl);
let mode = "requireEmpty";
if (options.append) {
mode = "append";
} else if (options.replace) {
mode = "replace";
}
const importArgs = {
tableName: tableName === null ? void 0 : tableName,
mode,
format
};
const headers = {
Authorization: `Convex ${adminKey}`,
"Convex-Client": `npm-cli-${import_version.version}`
};
const deploymentNotice = options.prod ? ` in your ${import_chalk.default.bold("prod")} deployment` : "";
const tableNotice = tableName ? ` to table "${import_chalk.default.bold(tableName)}"` : "";
let importId;
try {
const startResp = await client.post("/api/import/start_upload", null, {
headers
});
const { uploadToken } = startResp.data;
const partTokens = [];
let partNumber = 1;
for await (const chunk of data) {
const partUrl = `/api/import/upload_part?uploadToken=${encodeURIComponent(
uploadToken
)}&partNumber=${partNumber}`;
const partResp = await client.post(partUrl, chunk, { headers });
partTokens.push(partResp.data);
partNumber += 1;
(0, import_context.changeSpinner)(
ctx,
`Uploading ${filePath} (${(0, import_utils.formatSize)(data.bytesRead)}/${(0, import_utils.formatSize)(
fileStats.size
)})`
);
}
const finishResp = await client.post(
"/api/import/finish_upload",
{
import: importArgs,
uploadToken,
partTokens
},
{ headers }
);
importId = finishResp.data.importId;
} catch (e) {
(0, import_context.logFailure)(
ctx,
`Importing data from "${import_chalk.default.bold(
filePath
)}"${tableNotice}${deploymentNotice} failed`
);
return await (0, import_utils.logAndHandleAxiosError)(ctx, e);
}
(0, import_context.changeSpinner)(ctx, "Parsing uploaded data");
while (true) {
const snapshotImportState = await waitForStableImportState(
ctx,
importId,
deploymentUrl,
adminKey
);
switch (snapshotImportState.state) {
case "completed":
(0, import_context.logFinishedStep)(
ctx,
`Added ${snapshotImportState.num_rows_written} documents${tableNotice}${deploymentNotice}.`
);
return;
case "failed":
(0, import_context.logFailure)(
ctx,
`Importing data from "${import_chalk.default.bold(
filePath
)}"${tableNotice}${deploymentNotice} failed`
);
(0, import_context.logError)(ctx, import_chalk.default.red(snapshotImportState.error_message));
return await ctx.crash(1);
case "waiting_for_confirmation": {
(0, import_context.stopSpinner)(ctx);
await askToConfirmImport(
ctx,
snapshotImportState.message_to_confirm,
snapshotImportState.require_manual_confirmation,
options.yes
);
(0, import_context.showSpinner)(ctx, `Importing`);
const performUrl = `/api/perform_import`;
try {
await client.post(performUrl, { importId }, { headers });
} catch (e) {
(0, import_context.logFailure)(
ctx,
`Importing data from "${import_chalk.default.bold(
filePath
)}"${tableNotice}${deploymentNotice} failed`
);
return await (0, import_utils.logAndHandleAxiosError)(ctx, e);
}
break;
}
case "uploaded": {
(0, import_context.logFailure)(ctx, `Import canceled while parsing uploaded file`);
return await ctx.crash(1);
}
case "in_progress": {
(0, import_context.logFailure)(ctx, `WARNING: Import is continuing to run on the server`);
return await ctx.crash(1);
}
default: {
const _ = snapshotImportState;
(0, import_context.logFailure)(
ctx,
`unknown error: unexpected state ${snapshotImportState}`
);
return await ctx.crash(1);
}
}
}
});
async function askToConfirmImport(ctx, messageToConfirm, requireManualConfirmation, yes) {
if (!messageToConfirm?.length) {
return;
}
(0, import_context.logMessage)(ctx, messageToConfirm);
if (requireManualConfirmation !== false && !yes) {
const { confirmed } = await import_inquirer.default.prompt([
{
type: "confirm",
name: "confirmed",
message: `Perform the import?`,
default: true
}
]);
if (!confirmed) {
return await ctx.crash(1);
}
}
}
async function waitForStableImportState(ctx, importId, deploymentUrl, adminKey) {
const [donePromise, onDone] = (0, import_utils.waitUntilCalled)();
let snapshotImportState;
let checkpointCount = 0;
await (0, import_run.subscribe)(
ctx,
deploymentUrl,
adminKey,
"_system/cli/queryImport",
{ importId },
donePromise,
{
onChange: (value) => {
snapshotImportState = value.state;
switch (snapshotImportState.state) {
case "waiting_for_confirmation":
case "completed":
case "failed":
onDone();
break;
case "uploaded":
return;
case "in_progress":
(0, import_context.stopSpinner)(ctx);
while ((snapshotImportState.checkpoint_messages?.length ?? 0) > checkpointCount) {
(0, import_context.logFinishedStep)(
ctx,
snapshotImportState.checkpoint_messages[checkpointCount]
);
checkpointCount += 1;
}
(0, import_context.showSpinner)(
ctx,
snapshotImportState.progress_message ?? "Importing"
);
return;
}
}
}
);
return snapshotImportState;
}
async function determineFormat(ctx, filePath, format) {
const fileExtension = import_path.default.extname(filePath);
if (fileExtension !== "") {
const formatToExtension = {
csv: ".csv",
jsonLines: ".jsonl",
jsonArray: ".json",
zip: ".zip"
};
const extensionToFormat = Object.fromEntries(
Object.entries(formatToExtension).map((a) => a.reverse())
);
if (format !== null && fileExtension !== formatToExtension[format]) {
(0, import_context.logWarning)(
ctx,
import_chalk.default.yellow(
`Warning: Extension of file ${filePath} (${fileExtension}) does not match specified format: ${format} (${formatToExtension[format]}).`
)
);
}
format ?? (format = extensionToFormat[fileExtension] ?? null);
}
if (format === null) {
(0, import_context.logFailure)(
ctx,
"No input file format inferred by the filename extension or specified. Specify your input file's format using the `--format` flag."
);
return await ctx.crash(1, "fatal");
}
return format;
}
//# sourceMappingURL=convexImport.js.map