convex
Version:
Client for the Convex Cloud
136 lines (135 loc) • 5.43 kB
JavaScript
;
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var convexImport_exports = {};
__export(convexImport_exports, {
convexImport: () => convexImport
});
module.exports = __toCommonJS(convexImport_exports);
var import_commander = require("commander");
var import_chalk = __toESM(require("chalk"));
var import_config = require("./lib/config");
var import_utils = require("./lib/utils");
var import_axios = __toESM(require("axios"));
var import__ = require("../index.js");
var import_api = require("./lib/api");
var import_context = require("./lib/context");
const convexImport = new import_commander.Command("import").description("Import data from a file into a Convex table").addOption(
new import_commander.Option(
"--format <format>",
"Input file format. This flag is only required if the filename is missing an extension. CSV files must have a header, and each rows' entries are interpreted either as a (floating point) number or a string. JSONLines files must have a JSON object per line. JSON files must be an array of JSON objects."
).choices(["csv", "jsonLines", "jsonArray"])
).option(
"--prod",
"Import data into this project's production deployment. Defaults to your dev deployment without this flag."
).addOption(
new import_commander.Option("--replace", "Replace any existing data in the table").conflicts(
"--append"
)
).addOption(
new import_commander.Option(
"--append",
"Append to any existing data in the table"
).conflicts("--replace")
).addOption(new import_commander.Option("--url <url>").hideHelp()).addOption(new import_commander.Option("--admin-key <adminKey>").hideHelp()).argument("<tableName>", "Destination table name").argument("<path>", "Path to the input file").action(async (tableName, path, options) => {
const ctx = import_context.oneoffContext;
let format = options.format;
const pathParts = path.split(".");
if (pathParts.length > 1) {
const fileType = pathParts[pathParts.length - 1];
let inferredFormat;
switch (fileType) {
case "csv":
inferredFormat = "csv";
break;
case "jsonl":
inferredFormat = "jsonLines";
break;
case "json":
inferredFormat = "jsonArray";
break;
}
if (format && format != inferredFormat) {
throw new Error(
`Format of file ${path} does not match specified format: ${format}`
);
}
format = inferredFormat;
}
if (!format) {
throw new Error(
"No input file format inferred by the filename extension or specified. Specify your input file's format using the `--format` flag."
);
}
const { projectConfig } = await (0, import_config.readProjectConfig)(ctx);
const deploymentType = options.prod ? "prod" : "dev";
let deploymentUrl, adminKey;
if (!options.url || !options.adminKey) {
let url;
({ url, adminKey } = await (0, import_api.getUrlAndAdminKey)(
ctx,
projectConfig.project,
projectConfig.team,
deploymentType
));
deploymentUrl = url;
}
adminKey = options.adminKey ?? adminKey;
deploymentUrl = options.url ?? deploymentUrl;
await (0, import_utils.ensureHasConvexDependency)(ctx, "import");
if (!ctx.fs.exists(path)) {
console.error(import_chalk.default.gray(`Error: Path ${path} does not exist.`));
return await ctx.fatalError(1, "fs");
}
const data = ctx.fs.createReadStream(path);
const fileStats = ctx.fs.stat(path);
console.log(
import_chalk.default.gray(`Importing ${path} (${(0, import_utils.formatSize)(fileStats.size)})...`)
);
const urlName = encodeURIComponent(tableName);
const urlFormat = encodeURIComponent(format);
const client = import_axios.default.create();
let resp;
let mode = "requireEmpty";
if (options.append) {
mode = "append";
} else if (options.replace) {
mode = "replace";
}
try {
const url = `${deploymentUrl}/api/${import__.version}/import?tableName=${urlName}&format=${urlFormat}&mode=${mode}`;
resp = await client.post(url, data, {
headers: {
Authorization: `Convex ${adminKey}`,
"Content-Type": "text/plain"
}
});
} catch (e) {
return await (0, import_utils.fatalServerErr)(ctx, e);
}
console.log(
import_chalk.default.green(`Wrote ${resp.data.numWritten} rows to ${tableName}.`)
);
});
//# sourceMappingURL=convexImport.js.map