convex
Version:
Client for the Convex Cloud
224 lines (221 loc) • 8.67 kB
JavaScript
;
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var deploy2_exports = {};
__export(deploy2_exports, {
finishPush: () => finishPush,
reportPushCompleted: () => reportPushCompleted,
startPush: () => startPush,
waitForSchema: () => waitForSchema
});
module.exports = __toCommonJS(deploy2_exports);
var import_context = require("../../bundler/context.js");
var import_utils = require("./utils/utils.js");
var import_startPush = require("./deployApi/startPush.js");
var import_chalk = __toESM(require("chalk"), 1);
var import_deployment = require("./deployment.js");
var import_dashboard = require("../dashboard.js");
var import_finishPush = require("./deployApi/finishPush.js");
async function startPush(ctx, span, request, options) {
if (options.verbose) {
const custom = (_k, s) => typeof s === "string" ? s.slice(0, 40) + (s.length > 40 ? "..." : "") : s;
console.log(JSON.stringify(request, custom, 2));
}
const onError = (err) => {
if (err.toString() === "TypeError: fetch failed") {
(0, import_context.changeSpinner)(
ctx,
`Fetch failed, is ${options.url} correct? Retrying...`
);
}
};
const fetch = (0, import_utils.deploymentFetch)(options.url, request.adminKey, onError);
(0, import_context.changeSpinner)(ctx, "Analyzing and deploying source code...");
try {
const response = await fetch("/api/deploy2/start_push", {
body: JSON.stringify(request),
method: "POST",
headers: {
traceparent: span.encodeW3CTraceparent()
}
});
return import_startPush.startPushResponse.parse(await response.json());
} catch (error) {
const data = error instanceof import_utils.ThrowingFetchError ? error.serverErrorData : void 0;
if (data?.code === "AuthConfigMissingEnvironmentVariable") {
const errorMessage = data.message || "(no error message given)";
const configuredDeployment = (0, import_deployment.getTargetDeploymentName)();
const [, variableName] = errorMessage.match(/Environment variable (\S+)/i) ?? [];
const variableQuery = variableName !== void 0 ? `?var=${variableName}` : "";
const dashboardUrl = (0, import_dashboard.deploymentDashboardUrlPage)(
configuredDeployment,
`/settings/environment-variables${variableQuery}`
);
const message = `Environment variable ${import_chalk.default.bold(
variableName
)} is used in auth config file but its value was not set. Go to:
${import_chalk.default.bold(
dashboardUrl
)}
to set it up. `;
await ctx.crash({
exitCode: 1,
errorType: "invalid filesystem or env vars",
errForSentry: error,
printedMessage: message
});
}
(0, import_context.logFailure)(ctx, "Error: Unable to start push to " + options.url);
return await (0, import_utils.logAndHandleFetchError)(ctx, error);
}
}
const SCHEMA_TIMEOUT_MS = 1e4;
async function waitForSchema(ctx, span, startPush2, options) {
const fetch = (0, import_utils.deploymentFetch)(options.url, options.adminKey);
(0, import_context.changeSpinner)(
ctx,
"Backfilling indexes and checking that documents match your schema..."
);
while (true) {
let currentStatus;
try {
const response = await fetch("/api/deploy2/wait_for_schema", {
body: JSON.stringify({
adminKey: options.adminKey,
schemaChange: startPush2.schemaChange,
timeoutMs: SCHEMA_TIMEOUT_MS,
dryRun: options.dryRun
}),
method: "POST",
headers: {
traceparent: span.encodeW3CTraceparent()
}
});
currentStatus = import_startPush.schemaStatus.parse(await response.json());
} catch (error) {
(0, import_context.logFailure)(ctx, "Error: Unable to wait for schema from " + options.url);
return await (0, import_utils.logAndHandleFetchError)(ctx, error);
}
switch (currentStatus.type) {
case "inProgress": {
let schemaDone = true;
let indexesComplete = 0;
let indexesTotal = 0;
for (const componentStatus of Object.values(currentStatus.components)) {
if (!componentStatus.schemaValidationComplete) {
schemaDone = false;
}
indexesComplete += componentStatus.indexesComplete;
indexesTotal += componentStatus.indexesTotal;
}
const indexesDone = indexesComplete === indexesTotal;
let msg;
if (!indexesDone && !schemaDone) {
msg = `Backfilling indexes (${indexesComplete}/${indexesTotal} ready) and checking that documents match your schema...`;
} else if (!indexesDone) {
msg = `Backfilling indexes (${indexesComplete}/${indexesTotal} ready)...`;
} else {
msg = "Checking that documents match your schema...";
}
(0, import_context.changeSpinner)(ctx, msg);
break;
}
case "failed": {
let msg = "Schema validation failed";
if (currentStatus.componentPath) {
msg += ` in component "${currentStatus.componentPath}"`;
}
msg += ".";
(0, import_context.logFailure)(ctx, msg);
(0, import_context.logError)(ctx, import_chalk.default.red(`${currentStatus.error}`));
return await ctx.crash({
exitCode: 1,
errorType: {
"invalid filesystem or db data": currentStatus.tableName ? {
tableName: currentStatus.tableName,
componentPath: currentStatus.componentPath
} : null
},
printedMessage: null
// TODO - move logging into here
});
}
case "raceDetected": {
return await ctx.crash({
exitCode: 1,
errorType: "fatal",
printedMessage: `Schema was overwritten by another push.`
});
}
case "complete": {
(0, import_context.changeSpinner)(ctx, "Schema validation complete.");
return;
}
}
}
}
async function finishPush(ctx, span, startPush2, options) {
(0, import_context.changeSpinner)(ctx, "Finalizing push...");
const fetch = (0, import_utils.deploymentFetch)(options.url, options.adminKey);
try {
const response = await fetch("/api/deploy2/finish_push", {
body: JSON.stringify({
adminKey: options.adminKey,
startPush: startPush2,
dryRun: options.dryRun
}),
method: "POST",
headers: {
traceparent: span.encodeW3CTraceparent()
}
});
return import_finishPush.finishPushDiff.parse(await response.json());
} catch (error) {
(0, import_context.logFailure)(ctx, "Error: Unable to finish push to " + options.url);
return await (0, import_utils.logAndHandleFetchError)(ctx, error);
}
}
async function reportPushCompleted(ctx, adminKey, url, reporter) {
const fetch = (0, import_utils.deploymentFetch)(url, adminKey);
try {
const response = await fetch("/api/deploy2/report_push_completed", {
body: JSON.stringify({
adminKey,
spans: reporter.spans
}),
method: "POST"
});
await response.json();
} catch (error) {
(0, import_context.logFailure)(
ctx,
"Error: Unable to report push completed to " + url + ": " + error
);
}
}
//# sourceMappingURL=deploy2.js.map