UNPKG

convex

Version:

Client for the Convex Cloud

425 lines (423 loc) 15.4 kB
"use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( // If the importer is in node compatibility mode or this is not an ESM // file that has been converted to a CommonJS file using a Babel- // compatible transform (i.e. "__esModule" has not been set), then set // "default" to the CommonJS "module.exports" for node compatibility. isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); var convexImport_exports = {}; __export(convexImport_exports, { confirmImport: () => confirmImport, convexImport: () => convexImport, uploadForImport: () => uploadForImport, waitForStableImportState: () => waitForStableImportState }); module.exports = __toCommonJS(convexImport_exports); var import_chalk = __toESM(require("chalk"), 1); var import_utils = require("./lib/utils/utils.js"); var import_context = require("../bundler/context.js"); var import_api = require("./lib/api.js"); var import_path = __toESM(require("path"), 1); var import_run = require("./lib/run.js"); var import_extra_typings = require("@commander-js/extra-typings"); var import_command = require("./lib/command.js"); var import_http_client = require("../browser/http_client.js"); var import_server = require("../server/index.js"); var import_dashboard = require("./dashboard.js"); var import_prompts = require("./lib/utils/prompts.js"); const CHUNK_SIZE = 5 * 1024 * 1024; const convexImport = new import_extra_typings.Command("import").summary("Import data from a file to your deployment").description( "Import data from a file to your Convex deployment.\n\n From a snapshot: `npx convex import snapshot.zip`\n For a single table: `npx convex import --table tableName file.json`\n\nBy default, this imports into your dev deployment." ).addOption( new import_extra_typings.Option( "--table <table>", "Destination table name. Required if format is csv, jsonLines, or jsonArray. Not supported if format is zip." ) ).addOption( new import_extra_typings.Option( "--replace", "Replace all existing data in any of the imported tables" ).conflicts("--append") ).addOption( new import_extra_typings.Option( "--append", "Append imported data to any existing tables" ).conflicts("--replace") ).option( "-y, --yes", "Skip confirmation prompt when import leads to deleting existing documents" ).addOption( new import_extra_typings.Option( "--format <format>", "Input file format. This flag is only required if the filename is missing an extension.\n- CSV files must have a header, and each row's entries are interpreted either as a (floating point) number or a string.\n- JSON files must be an array of JSON objects.\n- JSONLines files must have a JSON object per line.\n- ZIP files must have one directory per table, containing <table>/documents.jsonl. Snapshot exports from the Convex dashboard have this format." ).choices(["csv", "jsonLines", "jsonArray", "zip"]) ).addDeploymentSelectionOptions((0, import_command.actionDescription)("Import data into")).argument("<path>", "Path to the input file").showHelpAfterError().action(async (filePath, options, command) => { const ctx = import_context.oneoffContext; if (command.args.length > 1) { return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: `Error: Too many positional arguments. If you're specifying a table name, use the \`--table\` option.` }); } const deploymentSelection = (0, import_api.deploymentSelectionFromOptions)(options); const { adminKey, url: deploymentUrl, deploymentName } = await (0, import_api.fetchDeploymentCredentialsProvisionProd)(ctx, deploymentSelection); if (!ctx.fs.exists(filePath)) { return await ctx.crash({ exitCode: 1, errorType: "invalid filesystem data", printedMessage: `Error: Path ${import_chalk.default.bold(filePath)} does not exist.` }); } const format = await determineFormat(ctx, filePath, options.format ?? null); const tableName = options.table ?? null; if (tableName === null) { if (format !== "zip") { return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: `Error: The \`--table\` option is required for format ${format}` }); } } else { if (format === "zip") { return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: `Error: The \`--table\` option is not allowed for format ${format}` }); } } await (0, import_utils.ensureHasConvexDependency)(ctx, "import"); const convexClient = new import_http_client.ConvexHttpClient(deploymentUrl); convexClient.setAdminAuth(adminKey); const existingImports = await convexClient.query( (0, import_server.makeFunctionReference)( "_system/cli/queryImport:list" ), {} ); const ongoingImports = existingImports.filter( (i) => i.state.state === "in_progress" ); if (ongoingImports.length > 0) { await askToConfirmImportWithExistingImports( ctx, deploymentName, options.yes ); } const fileStats = ctx.fs.stat(filePath); (0, import_context.showSpinner)(ctx, `Importing ${filePath} (${(0, import_utils.formatSize)(fileStats.size)})`); let mode = "requireEmpty"; if (options.append) { mode = "append"; } else if (options.replace) { mode = "replace"; } const importArgs = { tableName: tableName === null ? void 0 : tableName, mode, format }; const deploymentNotice = options.prod ? ` in your ${import_chalk.default.bold("prod")} deployment` : ""; const tableNotice = tableName ? ` to table "${import_chalk.default.bold(tableName)}"` : ""; const onFailure = async () => { (0, import_context.logFailure)( ctx, `Importing data from "${import_chalk.default.bold( filePath )}"${tableNotice}${deploymentNotice} failed` ); }; const importId = await uploadForImport(ctx, { deploymentUrl, adminKey, filePath, importArgs, onImportFailed: onFailure }); (0, import_context.changeSpinner)(ctx, "Parsing uploaded data"); const onProgress = (ctx2, state, checkpointCount) => { (0, import_context.stopSpinner)(ctx2); while ((state.checkpoint_messages?.length ?? 0) > checkpointCount) { (0, import_context.logFinishedStep)(ctx2, state.checkpoint_messages[checkpointCount]); checkpointCount += 1; } (0, import_context.showSpinner)(ctx2, state.progress_message ?? "Importing"); return checkpointCount; }; while (true) { const snapshotImportState = await waitForStableImportState(ctx, { importId, deploymentUrl, adminKey, onProgress }); switch (snapshotImportState.state) { case "completed": (0, import_context.logFinishedStep)( ctx, `Added ${snapshotImportState.num_rows_written} documents${tableNotice}${deploymentNotice}.` ); return; case "failed": return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: `Importing data from "${import_chalk.default.bold( filePath )}"${tableNotice}${deploymentNotice} failed ${import_chalk.default.red(snapshotImportState.error_message)}` }); case "waiting_for_confirmation": { (0, import_context.stopSpinner)(ctx); await askToConfirmImport( ctx, snapshotImportState.message_to_confirm, snapshotImportState.require_manual_confirmation, options.yes ); (0, import_context.showSpinner)(ctx, `Importing`); await confirmImport(ctx, { importId, adminKey, deploymentUrl, onError: async () => { (0, import_context.logFailure)( ctx, `Importing data from "${import_chalk.default.bold( filePath )}"${tableNotice}${deploymentNotice} failed` ); } }); break; } case "uploaded": { return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: `Import canceled while parsing uploaded file` }); } case "in_progress": { return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: `WARNING: Import is continuing to run on the server. Visit ${snapshotImportDashboardLink(deploymentName)} to monitor its progress.` }); } default: { const _ = snapshotImportState; return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: `unknown error: unexpected state ${snapshotImportState}`, errForSentry: `unexpected snapshot import state ${snapshotImportState.state}` }); } } } }); async function askToConfirmImport(ctx, messageToConfirm, requireManualConfirmation, yes) { if (!messageToConfirm?.length) { return; } (0, import_context.logMessage)(ctx, messageToConfirm); if (requireManualConfirmation !== false && !yes) { const confirmed = await (0, import_prompts.promptYesNo)(ctx, { message: "Perform import?", default: true }); if (!confirmed) { return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: "Import canceled" }); } } } function snapshotImportDashboardLink(deploymentName) { return deploymentName === void 0 ? "https://dashboard.convex.dev/d/settings/snapshot-export" : (0, import_dashboard.deploymentDashboardUrlPage)(deploymentName, "/settings/snapshot-export"); } async function askToConfirmImportWithExistingImports(ctx, deploymentName, yes) { (0, import_context.logMessage)( ctx, `There is already a snapshot import in progress. You can view its progress at ${snapshotImportDashboardLink(deploymentName)}.` ); if (yes) { return; } const confirmed = await (0, import_prompts.promptYesNo)(ctx, { message: "Start another import?", default: true }); if (!confirmed) { return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: "Import canceled" }); } } async function waitForStableImportState(ctx, args) { const { importId, deploymentUrl, adminKey, onProgress } = args; const [donePromise, onDone] = (0, import_utils.waitUntilCalled)(); let snapshotImportState; let checkpointCount = 0; await (0, import_run.subscribe)( ctx, deploymentUrl, adminKey, "_system/cli/queryImport", { importId }, donePromise, { onChange: (value) => { snapshotImportState = value.state; switch (snapshotImportState.state) { case "waiting_for_confirmation": case "completed": case "failed": onDone(); break; case "uploaded": return; case "in_progress": checkpointCount = onProgress( ctx, snapshotImportState, checkpointCount ); return; } } } ); return snapshotImportState; } async function determineFormat(ctx, filePath, format) { const fileExtension = import_path.default.extname(filePath); if (fileExtension !== "") { const formatToExtension = { csv: ".csv", jsonLines: ".jsonl", jsonArray: ".json", zip: ".zip" }; const extensionToFormat = Object.fromEntries( Object.entries(formatToExtension).map((a) => a.reverse()) ); if (format !== null && fileExtension !== formatToExtension[format]) { (0, import_context.logWarning)( ctx, import_chalk.default.yellow( `Warning: Extension of file ${filePath} (${fileExtension}) does not match specified format: ${format} (${formatToExtension[format]}).` ) ); } format ?? (format = extensionToFormat[fileExtension] ?? null); } if (format === null) { return await ctx.crash({ exitCode: 1, errorType: "fatal", printedMessage: "No input file format inferred by the filename extension or specified. Specify your input file's format using the `--format` flag." }); } return format; } async function confirmImport(ctx, args) { const { importId, adminKey, deploymentUrl } = args; const fetch = (0, import_utils.deploymentFetch)(deploymentUrl, adminKey); const performUrl = `/api/perform_import`; try { await fetch(performUrl, { method: "POST", body: JSON.stringify({ importId }) }); } catch (e) { await args.onError(e); return await (0, import_utils.logAndHandleFetchError)(ctx, e); } } async function uploadForImport(ctx, args) { const { deploymentUrl, adminKey, filePath } = args; const fetch = (0, import_utils.deploymentFetch)(deploymentUrl, adminKey); const data = ctx.fs.createReadStream(filePath, { highWaterMark: CHUNK_SIZE }); const fileStats = ctx.fs.stat(filePath); (0, import_context.showSpinner)(ctx, `Importing ${filePath} (${(0, import_utils.formatSize)(fileStats.size)})`); let importId; try { const startResp = await fetch("/api/import/start_upload", { method: "POST" }); const { uploadToken } = await startResp.json(); const partTokens = []; let partNumber = 1; for await (const chunk of data) { const partUrl = `/api/import/upload_part?uploadToken=${encodeURIComponent( uploadToken )}&partNumber=${partNumber}`; const partResp = await fetch(partUrl, { headers: { "Content-Type": "application/octet-stream" }, body: chunk, method: "POST" }); partTokens.push(await partResp.json()); partNumber += 1; (0, import_context.changeSpinner)( ctx, `Uploading ${filePath} (${(0, import_utils.formatSize)(data.bytesRead)}/${(0, import_utils.formatSize)( fileStats.size )})` ); } const finishResp = await fetch("/api/import/finish_upload", { body: JSON.stringify({ import: args.importArgs, uploadToken, partTokens }), method: "POST" }); const body = await finishResp.json(); importId = body.importId; } catch (e) { await args.onImportFailed(e); return await (0, import_utils.logAndHandleFetchError)(ctx, e); } return importId; } //# sourceMappingURL=convexImport.js.map