sanity
Version:
Sanity is a real-time content infrastructure with a scalable, hosted backend featuring a Graph Oriented Query Language (GROQ), asset pipelines and fast edge caches
1,191 lines (1,178 loc) • 167 kB
JavaScript
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf, __hasOwnProp = Object.prototype.hasOwnProperty;
var __copyProps = (to, from, except, desc) => {
if (from && typeof from == "object" || typeof from == "function")
for (let key of __getOwnPropNames(from))
!__hasOwnProp.call(to, key) && key !== except && __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: !0 }) : target,
mod
));
var debug$3 = require("debug"), fs = require("node:fs"), fs$1 = require("node:fs/promises"), os = require("node:os"), path = require("node:path"), promises = require("node:stream/promises"), fs$2 = require("@sanity/util/fs"), asyncMutex = require("async-mutex"), isString = require("lodash/isString.js"), prettyMs = require("pretty-ms"), helpers = require("yargs/helpers"), yargs = require("yargs/yargs"), zlib = require("node:zlib"), rimraf = require("rimraf"), getIt = require("get-it"), middleware = require("get-it/middleware"), node_stream = require("node:stream"), consoleTablePrinter = require("console-table-printer"), dateFns = require("date-fns"), url = require("node:url"), logSymbols = require("log-symbols"), oneline = require("oneline"), EventSource = require("@sanity/eventsource"), rxjs = require("rxjs"), exportDataset = require("@sanity/export"), sanityImport = require("@sanity/import"), padStart = require("lodash/padStart.js"), uuid = require("@sanity/uuid"), chokidar = require("chokidar"), execa = require("execa"), json5 = require("json5"), isEqual = require("lodash/isEqual.js"), isPlainObject = require("lodash/isPlainObject.js"), noop$1 = require("lodash/noop.js"), pluralize = require("pluralize-esm"), tokenize = require("json-lexer"), open = require("open"), node_util = require("node:util"), groupBy = require("lodash/groupBy.js"), deburr = require("lodash/deburr.js"), node = require("esbuild-register/dist/node"), migrate = require("@sanity/migrate"), node_tty = require("node:tty"), types = require("@sanity/types"), size = require("lodash/size.js"), sortBy = require("lodash/sortBy.js");
function _interopDefaultCompat(e) {
return e && typeof e == "object" && "default" in e ? e : { default: e };
}
var debug__default = /* @__PURE__ */ _interopDefaultCompat(debug$3), fs__default = /* @__PURE__ */ _interopDefaultCompat(fs), fs__default$1 = /* @__PURE__ */ _interopDefaultCompat(fs$1), os__default = /* @__PURE__ */ _interopDefaultCompat(os), path__default = /* @__PURE__ */ _interopDefaultCompat(path), isString__default = /* @__PURE__ */ _interopDefaultCompat(isString), prettyMs__default = /* @__PURE__ */ _interopDefaultCompat(prettyMs), yargs__default = /* @__PURE__ */ _interopDefaultCompat(yargs), zlib__default = /* @__PURE__ */ _interopDefaultCompat(zlib), url__default = /* @__PURE__ */ _interopDefaultCompat(url), logSymbols__default = /* @__PURE__ */ _interopDefaultCompat(logSymbols), oneline__default = /* @__PURE__ */ _interopDefaultCompat(oneline), EventSource__default = /* @__PURE__ */ _interopDefaultCompat(EventSource), exportDataset__default = /* @__PURE__ */ _interopDefaultCompat(exportDataset), sanityImport__default = /* @__PURE__ */ _interopDefaultCompat(sanityImport), padStart__default = /* @__PURE__ */ _interopDefaultCompat(padStart), chokidar__default = /* @__PURE__ */ _interopDefaultCompat(chokidar), execa__default = /* @__PURE__ */ _interopDefaultCompat(execa), json5__default = /* @__PURE__ */ _interopDefaultCompat(json5), isEqual__default = /* @__PURE__ */ _interopDefaultCompat(isEqual), isPlainObject__default = /* @__PURE__ */ _interopDefaultCompat(isPlainObject), noop__default = /* @__PURE__ */ _interopDefaultCompat(noop$1), pluralize__default = /* @__PURE__ */ _interopDefaultCompat(pluralize), tokenize__default = /* @__PURE__ */ _interopDefaultCompat(tokenize), open__default = /* @__PURE__ */ _interopDefaultCompat(open), groupBy__default = /* @__PURE__ */ _interopDefaultCompat(groupBy), deburr__default = /* @__PURE__ */ _interopDefaultCompat(deburr), size__default = /* @__PURE__ */ _interopDefaultCompat(size), sortBy__default = /* @__PURE__ */ _interopDefaultCompat(sortBy);
const defaultApiVersion$1 = "v2024-02-21", datasetBackupGroup = {
name: "backup",
signature: "[COMMAND]",
description: "Manage backups.",
isGroupRoot: !0
};
function parseApiErr(err) {
const apiErr = {};
return err.code ? apiErr.statusCode = err.code : err.statusCode && (apiErr.statusCode = err.statusCode), err.message ? apiErr.message = err.message : err.statusMessage ? apiErr.message = err.statusMessage : err?.response?.body?.message ? apiErr.message = err.response.body.message : err?.response?.data?.message ? apiErr.message = err.response.data.message : apiErr.message = JSON.stringify(err), apiErr;
}
const debug$2 = debug__default.default("sanity:core");
function validateDatasetName(datasetName) {
if (!datasetName)
return "Dataset name is missing";
const name = `${datasetName}`;
return name.toLowerCase() !== name ? "Dataset name must be all lowercase characters" : name.length < 2 ? "Dataset name must be at least two characters long" : name.length > 64 ? "Dataset name must be at most 64 characters" : /^[a-z0-9]/.test(name) ? /^[a-z0-9][-_a-z0-9]+$/.test(name) ? /[-_]$/.test(name) ? "Dataset name must not end with a dash or an underscore" : !1 : "Dataset name must only contain letters, numbers, dashes and underscores" : "Dataset name must start with a letter or a number";
}
function promptForDatasetName(prompt, options = {}) {
return prompt.single({
type: "input",
message: "Dataset name:",
validate: (name) => validateDatasetName(name) || !0,
...options
});
}
async function chooseDatasetPrompt(context, options = {}) {
const {
apiClient,
prompt
} = context, {
message,
allowCreation
} = options, client = apiClient(), datasets = await client.datasets.list(), hasProduction = datasets.find((dataset) => dataset.name === "production"), datasetChoices = datasets.map((dataset) => ({
value: dataset.name
})), selected = await prompt.single({
message: message || "Select dataset to use",
type: "list",
choices: allowCreation ? [{
value: "new",
name: "Create new dataset"
}, new prompt.Separator(), ...datasetChoices] : datasetChoices
});
if (selected === "new") {
debug$2("User wants to create a new dataset, prompting for name");
const newDatasetName = await promptForDatasetName(prompt, {
message: "Name your dataset:",
default: hasProduction ? void 0 : "production"
});
return await client.datasets.create(newDatasetName), newDatasetName;
}
return selected;
}
async function resolveApiClient(context, datasetName, apiVersion) {
const {
apiClient
} = context;
let client = apiClient();
const {
projectId,
token
} = client.config();
if (!projectId)
throw new Error("Project ID not defined");
let selectedDataset = datasetName;
return selectedDataset || (selectedDataset = await chooseDatasetPrompt(context, {
message: "Select the dataset name:"
})), client = client.withConfig({
dataset: datasetName,
apiVersion
}), {
projectId,
datasetName: selectedDataset,
token,
client
};
}
const helpText$K = `
Examples
sanity backup disable DATASET_NAME
`, disableDatasetBackupCommand = {
name: "disable",
group: "backup",
signature: "[DATASET_NAME]",
description: "Disable backup for a dataset.",
helpText: helpText$K,
action: async (args, context) => {
const {
output,
chalk
} = context, [dataset] = args.argsWithoutOptions, {
projectId,
datasetName,
token,
client
} = await resolveApiClient(context, dataset, defaultApiVersion$1);
try {
await client.request({
method: "PUT",
headers: {
Authorization: `Bearer ${token}`
},
uri: `/projects/${projectId}/datasets/${datasetName}/settings/backups`,
body: {
enabled: !1
}
}), output.print(`${chalk.green(`Disabled daily backups for dataset ${datasetName}
`)}`);
} catch (error) {
const {
message
} = parseApiErr(error);
output.print(`${chalk.red(`Disabling dataset backup failed: ${message}`)}
`);
}
}
};
var debug$1 = require("debug")("sanity:backup");
const archiver = require("archiver");
function archiveDir(tmpOutDir, outFilePath, progressCb) {
return new Promise((resolve, reject) => {
const archiveDestination = fs.createWriteStream(outFilePath);
archiveDestination.on("error", (err) => {
reject(err);
}), archiveDestination.on("close", () => {
resolve();
});
const archive = archiver("tar", {
gzip: !0,
gzipOptions: {
level: zlib__default.default.constants.Z_DEFAULT_COMPRESSION
}
});
archive.on("error", (err) => {
debug$1(`Archiving errored!
%s`, err.stack), reject(err);
}), archive.on("warning", (err) => {
debug$1("Archive warning: %s", err.message);
}), archive.on("progress", (progress2) => {
progressCb(progress2.fs.processedBytes);
}), archive.pipe(archiveDestination), archive.directory(tmpOutDir, !1), archive.finalize();
});
}
const maxBackupIdsShown = 100;
async function chooseBackupIdPrompt(context, datasetName) {
const {
prompt
} = context, {
projectId,
token,
client
} = await resolveApiClient(context, datasetName, defaultApiVersion$1);
try {
const response = await client.request({
headers: {
Authorization: `Bearer ${token}`
},
uri: `/projects/${projectId}/datasets/${datasetName}/backups`,
query: {
limit: maxBackupIdsShown.toString()
}
});
if (response?.backups?.length > 0) {
const backupIdChoices = response.backups.map((backup) => ({
value: backup.id
}));
return await prompt.single({
message: `Select backup ID to use (only last ${maxBackupIdsShown} shown)`,
type: "list",
choices: backupIdChoices
});
}
} catch (err) {
throw new Error(`Failed to fetch backups for dataset ${datasetName}: ${err.message}`);
}
throw new Error("No backups found");
}
async function cleanupTmpDir(tmpDir) {
try {
await rimraf.rimraf(tmpDir);
} catch (err) {
debug$1(`Error cleaning up temporary files: ${err.message}`);
}
}
const MAX_RETRIES = 5, BACKOFF_DELAY_BASE = 200, exponentialBackoff = (retryCount) => Math.pow(2, retryCount) * BACKOFF_DELAY_BASE;
async function withRetry(operation, maxRetries = MAX_RETRIES) {
for (let retryCount = 0; retryCount < maxRetries; retryCount++)
try {
return await operation();
} catch (err) {
if (err.response && err.response.statusCode && err.response.statusCode < 500)
throw err;
const retryDelay = exponentialBackoff(retryCount);
debug$1(`Error encountered, retrying after ${retryDelay}ms: %s`, err.message), await new Promise((resolve) => setTimeout(resolve, retryDelay));
}
throw new Error("Operation failed after all retries");
}
const CONNECTION_TIMEOUT$1 = 15 * 1e3, READ_TIMEOUT$1 = 3 * 60 * 1e3, request$1 = getIt.getIt([middleware.keepAlive(), middleware.promise()]);
async function downloadAsset(url2, fileName, fileType, outDir) {
const normalizedFileName = path__default.default.basename(fileName), assetFilePath = getAssetFilePath(normalizedFileName, fileType, outDir);
await withRetry(async () => {
const response = await request$1({
url: url2,
maxRedirects: 5,
timeout: {
connect: CONNECTION_TIMEOUT$1,
socket: READ_TIMEOUT$1
},
stream: !0
});
debug$1("Received asset %s with status code %d", normalizedFileName, response?.statusCode), await promises.pipeline(response.body, fs.createWriteStream(assetFilePath));
});
}
function getAssetFilePath(fileName, fileType, outDir) {
let assetFilePath = "";
return fileType === "image" ? assetFilePath = path__default.default.join(outDir, "images", fileName) : fileType === "file" && (assetFilePath = path__default.default.join(outDir, "files", fileName)), assetFilePath;
}
const CONNECTION_TIMEOUT = 15 * 1e3, READ_TIMEOUT = 3 * 60 * 1e3, request = getIt.getIt([middleware.keepAlive(), middleware.promise()]);
async function downloadDocument(url2) {
const response = await withRetry(() => request({
url: url2,
maxRedirects: 5,
timeout: {
connect: CONNECTION_TIMEOUT,
socket: READ_TIMEOUT
}
}));
return debug$1("Received document from %s with status code %d", url2, response?.statusCode), response.body;
}
class PaginatedGetBackupStream extends node_stream.Readable {
cursor = "";
totalFiles = 0;
constructor(client, projectId, datasetName, backupId, token) {
super({
objectMode: !0
}), this.client = client, this.projectId = projectId, this.datasetName = datasetName, this.backupId = backupId, this.token = token;
}
async _read() {
try {
const data = await this.fetchNextBackupPage();
this.totalFiles === 0 && (this.totalFiles = data.totalFiles), data.files.forEach((file) => this.push(file)), typeof data.nextCursor == "string" && data.nextCursor !== "" ? this.cursor = data.nextCursor : this.push(null);
} catch (err) {
this.destroy(err);
}
}
// fetchNextBackupPage fetches the next page of backed up files from the backup API.
async fetchNextBackupPage() {
const query = this.cursor === "" ? {} : {
nextCursor: this.cursor
};
try {
return await this.client.request({
headers: {
Authorization: `Bearer ${this.token}`
},
uri: `/projects/${this.projectId}/datasets/${this.datasetName}/backups/${this.backupId}`,
query
});
} catch (error) {
let msg = error.statusCode ? error.response.body.message : error.message;
throw msg === void 0 && (msg = String(error)), new Error(`Downloading dataset backup failed: ${msg}`);
}
}
}
const newProgress = (output, startStep) => {
let spinner = output.spinner(startStep).start(), lastProgress = {
step: startStep
}, start = Date.now();
const print = (progress2) => {
const elapsed = prettyMs__default.default(Date.now() - start);
progress2.current && progress2.current > 0 && progress2.total && progress2.total > 0 ? spinner.text = `${progress2.step} (${progress2.current}/${progress2.total}) [${elapsed}]` : spinner.text = `${progress2.step} [${elapsed}]`;
};
return {
set: (progress2) => {
progress2.step !== lastProgress.step ? (print(lastProgress), spinner.succeed(), spinner = output.spinner(progress2.step).start(), start = Date.now()) : progress2.step === lastProgress.step && progress2.update && print(progress2), lastProgress = progress2;
},
update: (progress2) => {
print(progress2), lastProgress = progress2;
},
succeed: () => {
spinner.succeed(), start = Date.now();
},
fail: () => {
spinner.fail(), start = Date.now();
}
};
};
function humanFileSize(size2) {
const i = size2 == 0 ? 0 : Math.floor(Math.log(size2) / Math.log(1024));
return `${(size2 / Math.pow(1024, i)).toFixed(2)} ${["B", "kB", "MB", "GB", "TB"][i]}`;
}
function isPathDirName(filepath) {
return !/\.\w+$/.test(filepath);
}
const debug = debug__default.default("sanity:backup"), DEFAULT_DOWNLOAD_CONCURRENCY = 10, MAX_DOWNLOAD_CONCURRENCY = 24, helpText$J = `
Options
--backup-id <string> The backup ID to download. (required)
--out <string> The file or directory path the backup should download to.
--overwrite Allows overwriting of existing backup file.
--concurrency <num> Concurrent number of backup item downloads. (max: 24)
Examples
sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-1
sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-2 --out /path/to/file
sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-3 --out /path/to/file --overwrite
`;
function parseCliFlags$7(args) {
return yargs__default.default(helpers.hideBin(args.argv || process.argv).slice(2)).options("backup-id", {
type: "string"
}).options("out", {
type: "string"
}).options("concurrency", {
type: "number",
default: DEFAULT_DOWNLOAD_CONCURRENCY
}).options("overwrite", {
type: "boolean",
default: !1
}).argv;
}
const downloadBackupCommand = {
name: "download",
group: "backup",
signature: "[DATASET_NAME]",
description: "Download a dataset backup to a local file.",
helpText: helpText$J,
// eslint-disable-next-line max-statements
action: async (args, context) => {
const {
output,
chalk
} = context, [client, opts] = await prepareBackupOptions(context, args), {
projectId,
datasetName,
backupId,
outDir,
outFileName
} = opts;
if (outDir === "" || outFileName === "") {
output.print("Operation cancelled.");
return;
}
const outFilePath = path__default.default.join(outDir, outFileName);
output.print("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), output.print("\u2502 \u2502"), output.print("\u2502 Downloading backup for: \u2502"), output.print(`\u2502 ${chalk.bold("projectId")}: ${chalk.cyan(projectId).padEnd(56)} \u2502`), output.print(`\u2502 ${chalk.bold("dataset")}: ${chalk.cyan(datasetName).padEnd(58)} \u2502`), output.print(`\u2502 ${chalk.bold("backupId")}: ${chalk.cyan(backupId).padEnd(56)} \u2502`), output.print("\u2502 \u2502"), output.print("\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256F"), output.print(""), output.print(`Downloading backup to "${chalk.cyan(outFilePath)}"`);
const start = Date.now(), progressSpinner = newProgress(output, "Setting up backup environment..."), tmpOutDir = await fs$1.mkdtemp(path__default.default.join(os.tmpdir(), "sanity-backup-"));
for (const dir of [outDir, path__default.default.join(tmpOutDir, "images"), path__default.default.join(tmpOutDir, "files")])
fs.mkdirSync(dir, {
recursive: !0
});
debug("Writing to temporary directory %s", tmpOutDir);
const tmpOutDocumentsFile = path__default.default.join(tmpOutDir, "data.ndjson"), docOutStream = fs.createWriteStream(tmpOutDocumentsFile), docWriteMutex = new asyncMutex.Mutex();
try {
const backupFileStream = new PaginatedGetBackupStream(client, opts.projectId, opts.datasetName, opts.backupId, opts.token), files = [];
let i = 0;
for await (const file of backupFileStream)
files.push(file), i++, progressSpinner.set({
step: "Reading backup files...",
update: !0,
current: i,
total: backupFileStream.totalFiles
});
let totalItemsDownloaded = 0;
const {
default: pMap
} = await import("p-map");
await pMap(files, async (file) => {
if (file.type === "file" || file.type === "image")
await downloadAsset(file.url, file.name, file.type, tmpOutDir);
else {
const doc = await downloadDocument(file.url);
await docWriteMutex.runExclusive(() => {
docOutStream.write(`${doc}
`);
});
}
totalItemsDownloaded += 1, progressSpinner.set({
step: "Downloading documents and assets...",
update: !0,
current: totalItemsDownloaded,
total: backupFileStream.totalFiles
});
}, {
concurrency: opts.concurrency
});
} catch (error) {
progressSpinner.fail();
const {
message
} = parseApiErr(error);
throw new Error(`Downloading dataset backup failed: ${message}`);
}
docOutStream.end(), await promises.finished(docOutStream), progressSpinner.set({
step: "Archiving files into a tarball...",
update: !0
});
try {
await archiveDir(tmpOutDir, outFilePath, (processedBytes) => {
progressSpinner.update({
step: `Archiving files into a tarball, ${humanFileSize(processedBytes)} bytes written...`
});
});
} catch (err) {
throw progressSpinner.fail(), new Error(`Archiving backup failed: ${err.message}`);
}
progressSpinner.set({
step: `Cleaning up temporary files at ${chalk.cyan(`${tmpOutDir}`)}`
}), await cleanupTmpDir(tmpOutDir), progressSpinner.set({
step: `Backup download complete [${prettyMs__default.default(Date.now() - start)}]`
}), progressSpinner.succeed();
}
};
async function prepareBackupOptions(context, args) {
const flags = await parseCliFlags$7(args), [dataset] = args.argsWithoutOptions, {
prompt,
workDir
} = context, {
projectId,
datasetName,
client
} = await resolveApiClient(context, dataset, defaultApiVersion$1), {
token
} = client.config();
if (!isString__default.default(token) || token.length < 1)
throw new Error("token is missing");
if (!isString__default.default(datasetName) || datasetName.length < 1)
throw new Error(`dataset ${datasetName} must be a valid dataset name`);
const backupId = String(flags["backup-id"] || await chooseBackupIdPrompt(context, datasetName));
if (backupId.length < 1)
throw new Error(`backup-id ${flags["backup-id"]} should be a valid string`);
if ("concurrency" in flags && (flags.concurrency < 1 || flags.concurrency > MAX_DOWNLOAD_CONCURRENCY))
throw new Error(`concurrency should be in 1 to ${MAX_DOWNLOAD_CONCURRENCY} range`);
const defaultOutFileName = `${datasetName}-backup-${backupId}.tar.gz`;
let out = await (async () => flags.out !== void 0 ? fs$2.absolutify(flags.out) : await prompt.single({
type: "input",
message: "Output path:",
default: path__default.default.join(workDir, defaultOutFileName),
filter: fs$2.absolutify
}))();
return isPathDirName(out) && (out = path__default.default.join(out, defaultOutFileName)), !flags.overwrite && fs.existsSync(out) && (await prompt.single({
type: "confirm",
message: `File "${out}" already exists, would you like to overwrite it?`,
default: !1
}) || (out = "")), [client, {
projectId,
datasetName,
backupId,
token,
outDir: path__default.default.dirname(out),
outFileName: path__default.default.basename(out),
overwrite: flags.overwrite,
concurrency: flags.concurrency || DEFAULT_DOWNLOAD_CONCURRENCY
}];
}
const helpText$I = `
Examples
sanity backup enable DATASET_NAME
`, enableDatasetBackupCommand = {
name: "enable",
group: "backup",
signature: "[DATASET_NAME]",
description: "Enable backup for a dataset.",
helpText: helpText$I,
action: async (args, context) => {
const {
output,
chalk
} = context, [dataset] = args.argsWithoutOptions, {
projectId,
datasetName,
token,
client
} = await resolveApiClient(context, dataset, defaultApiVersion$1);
try {
await client.request({
method: "PUT",
headers: {
Authorization: `Bearer ${token}`
},
uri: `/projects/${projectId}/datasets/${datasetName}/settings/backups`,
body: {
enabled: !0
}
}), output.print(`${chalk.green(`Enabled backups for dataset ${datasetName}.
Please note that it may take up to 24 hours before the first backup is created.
`)}`), output.print(`${chalk.bold(`Retention policies may apply depending on your plan and agreement.
`)}`);
} catch (error) {
const {
message
} = parseApiErr(error);
output.print(`${chalk.red(`Enabling dataset backup failed: ${message}`)}
`);
}
}
}, DEFAULT_LIST_BACKUP_LIMIT = 30, helpText$H = `
Options
--limit <int> Maximum number of backups returned. Default 30.
--after <string> Only return backups after this date (inclusive)
--before <string> Only return backups before this date (exclusive). Cannot be younger than <after> if specified.
Examples
sanity backup list DATASET_NAME
sanity backup list DATASET_NAME --limit 50
sanity backup list DATASET_NAME --after 2024-01-31 --limit 10
sanity backup list DATASET_NAME --after 2024-01-31 --before 2024-01-10
`;
function parseCliFlags$6(args) {
return yargs__default.default(helpers.hideBin(args.argv || process.argv).slice(2)).options("after", {
type: "string"
}).options("before", {
type: "string"
}).options("limit", {
type: "number",
default: DEFAULT_LIST_BACKUP_LIMIT,
alias: "l"
}).argv;
}
const listDatasetBackupCommand = {
name: "list",
group: "backup",
signature: "[DATASET_NAME]",
description: "List available backups for a dataset.",
helpText: helpText$H,
action: async (args, context) => {
const {
output,
chalk
} = context, flags = await parseCliFlags$6(args), [dataset] = args.argsWithoutOptions, {
projectId,
datasetName,
token,
client
} = await resolveApiClient(context, dataset, defaultApiVersion$1), query = {
limit: DEFAULT_LIST_BACKUP_LIMIT.toString()
};
if (flags.limit) {
if (flags.limit < 1 || flags.limit > Number.MAX_SAFE_INTEGER)
throw new Error(`Parsing --limit: must be an integer between 1 and ${Number.MAX_SAFE_INTEGER}`);
query.limit = flags.limit.toString();
}
if (flags.before || flags.after)
try {
const parsedBefore = processDateFlags(flags.before), parsedAfter = processDateFlags(flags.after);
if (parsedAfter && parsedBefore && dateFns.isAfter(parsedAfter, parsedBefore))
throw new Error("--after date must be before --before");
query.before = flags.before, query.after = flags.after;
} catch (err) {
throw new Error(`Parsing date flags: ${err}`);
}
let response;
try {
response = await client.request({
headers: {
Authorization: `Bearer ${token}`
},
uri: `/projects/${projectId}/datasets/${datasetName}/backups`,
query: {
...query
}
});
} catch (error) {
const {
message
} = parseApiErr(error);
output.error(`${chalk.red(`List dataset backup failed: ${message}`)}
`);
}
if (response && response.backups) {
if (response.backups.length === 0) {
output.print("No backups found.");
return;
}
const table = new consoleTablePrinter.Table({
columns: [{
name: "resource",
title: "RESOURCE",
alignment: "left"
}, {
name: "createdAt",
title: "CREATED AT",
alignment: "left"
}, {
name: "backupId",
title: "BACKUP ID",
alignment: "left"
}]
});
response.backups.forEach((backup) => {
const {
id,
createdAt
} = backup;
table.addRow({
resource: "Dataset",
createdAt: dateFns.lightFormat(Date.parse(createdAt), "yyyy-MM-dd HH:mm:ss"),
backupId: id
});
}), table.printTable();
}
}
};
function processDateFlags(date) {
if (!date) return;
const parsedDate = dateFns.parse(date, "yyyy-MM-dd", /* @__PURE__ */ new Date());
if (dateFns.isValid(parsedDate))
return parsedDate;
throw new Error(`Invalid ${date} date format. Use YYYY-MM-DD`);
}
function determineIsApp(cliConfig) {
return !!(cliConfig && "app" in cliConfig);
}
const helpText$G = `
Options
--source-maps Enable source maps for built bundles (increases size of bundle)
--no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)
-y, --yes Unattended mode, answers "yes" to any "yes/no" prompt and otherwise uses defaults
--schema-path If you are storing your schemas in a different path than the default one, you need to specify it here.
Examples
sanity build
sanity build --no-minify --source-maps
`, buildCommand = {
name: "build",
signature: "[OUTPUT_DIR]",
description: "Builds the Sanity Studio configuration into a static bundle",
action: async (args, context, overrides) => (await getBuildAction(context))(args, context, overrides),
helpText: helpText$G
};
async function getBuildAction(context) {
return determineIsApp(context.cliConfig) ? (await Promise.resolve().then(function() {
return require("./buildAction.js");
})).default : (await Promise.resolve().then(function() {
return require("./buildAction2.js");
})).default;
}
const wildcardReplacement = "a-wild-card-r3pl4c3m3n7-a", portReplacement = ":7777777";
async function addCorsOrigin(givenOrigin, flags, context) {
const {
apiClient,
prompt,
output
} = context, origin = await (givenOrigin ? filterAndValidateOrigin(givenOrigin) : promptForOrigin$1(prompt)), hasWildcard = origin.includes("*");
if (hasWildcard && !await promptForWildcardConfirmation(origin, context))
return !1;
const allowCredentials = typeof flags.credentials > "u" ? await promptForCredentials(hasWildcard, context) : !!flags.credentials;
return givenOrigin !== origin && output.print(`Normalized origin to ${origin}`), await apiClient({
requireUser: !0,
requireProject: !0
}).request({
method: "POST",
url: "/cors",
body: {
origin,
allowCredentials
},
maxRedirects: 0
}), !0;
}
function promptForCredentials(hasWildcard, context) {
const {
prompt,
output,
chalk
} = context;
return output.print(""), hasWildcard ? output.print(oneline__default.default`
${chalk.yellow(`${logSymbols__default.default.warning} Warning:`)}
We ${chalk.red(chalk.underline("HIGHLY"))} recommend NOT allowing credentials
on origins containing wildcards. If you are logged in to a studio, people will
be able to send requests ${chalk.underline("on your behalf")} to read and modify
data, from any matching origin. Please tread carefully!
`) : output.print(oneline__default.default`
${chalk.yellow(`${logSymbols__default.default.warning} Warning:`)}
Should this origin be allowed to send requests using authentication tokens or
session cookies? Be aware that any script on this origin will be able to send
requests ${chalk.underline("on your behalf")} to read and modify data if you
are logged in to a Sanity studio. If this origin hosts a studio, you will need
this, otherwise you should probably answer "No" (n).
`), output.print(""), prompt.single({
type: "confirm",
message: oneline__default.default`
Allow credentials to be sent from this origin? Please read the warning above.
`,
default: !1
});
}
function promptForWildcardConfirmation(origin, context) {
const {
prompt,
output,
chalk
} = context;
return output.print(""), output.print(chalk.yellow(`${logSymbols__default.default.warning} Warning: Examples of allowed origins:`)), origin === "*" ? (output.print("- http://www.some-malicious.site"), output.print("- https://not.what-you-were-expecting.com"), output.print("- https://high-traffic-site.com"), output.print("- http://192.168.1.1:8080")) : (output.print(`- ${origin.replace(/:\*/, ":1234").replace(/\*/g, "foo")}`), output.print(`- ${origin.replace(/:\*/, ":3030").replace(/\*/g, "foo.bar")}`)), output.print(""), prompt.single({
type: "confirm",
message: oneline__default.default`
Using wildcards can be ${chalk.red("risky")}.
Are you ${chalk.underline("absolutely sure")} you want to allow this origin?`,
default: !1
});
}
function promptForOrigin$1(prompt) {
return prompt.single({
type: "input",
message: "Origin (including protocol):",
filter: filterOrigin,
validate: (origin) => validateOrigin(origin, origin)
});
}
function filterOrigin(origin) {
if (origin === "*" || origin === "file:///*" || origin === "null")
return origin;
try {
const example = origin.replace(/([^:])\*/g, `$1${wildcardReplacement}`).replace(/:\*/, portReplacement), parsed = url__default.default.parse(example);
let host = parsed.host || "";
return /^https?:$/.test(parsed.protocol || "") && (host = host.replace(/:(80|443)$/, "")), host = host.replace(portReplacement, ":*").replace(new RegExp(wildcardReplacement, "g"), "*"), `${parsed.protocol}//${host}`;
} catch {
return null;
}
}
function validateOrigin(origin, givenOrigin) {
if (origin === "*" || origin === "file:///*" || origin === "null")
return !0;
try {
return url__default.default.parse(origin || 0), !0;
} catch {
}
return givenOrigin.startsWith("file://") ? "Only a local file wildcard is currently allowed: file:///*" : `Invalid origin "${givenOrigin}", must include protocol (https://some.host)`;
}
function filterAndValidateOrigin(givenOrigin) {
const origin = filterOrigin(givenOrigin), result = validateOrigin(origin, givenOrigin);
if (result !== !0)
throw new Error(result);
if (!origin)
throw new Error("Invalid origin");
return origin;
}
const helpText$F = `
Options
--credentials Allow credentials (token/cookie) to be sent from this origin
--no-credentials Disallow credentials (token/cookie) to be sent from this origin
Examples
sanity cors add
sanity cors add http://localhost:3000 --no-credentials
`, addCorsOriginCommand = {
name: "add",
group: "cors",
signature: "[ORIGIN]",
helpText: helpText$F,
description: "Allow a new origin to use your project API through CORS",
action: async (args, context) => {
const {
output
} = context, [origin] = args.argsWithoutOptions;
if (!origin)
throw new Error("No origin specified, use `sanity cors add <origin-url>`");
const flags = args.extOptions;
fs__default.default.existsSync(path__default.default.join(process.cwd(), origin)) && output.warn(`Origin "${origin}?" Remember to quote values (sanity cors add "*")`), await addCorsOrigin(origin, flags, context) && output.print("CORS origin added successfully");
}
}, corsGroup = {
name: "cors",
signature: "[COMMAND]",
isGroupRoot: !0,
description: "Configures CORS settings for Sanity projects"
}, helpText$E = `
Examples
sanity cors delete
sanity cors delete http://localhost:3000
`, deleteCorsOriginCommand = {
name: "delete",
group: "cors",
signature: "[ORIGIN]",
helpText: helpText$E,
description: "Delete an existing CORS-origin from your project",
action: async (args, context) => {
const {
output,
apiClient
} = context, [origin] = args.argsWithoutOptions, client = apiClient({
requireUser: !0,
requireProject: !0
}), originId = await promptForOrigin(origin, context);
try {
await client.request({
method: "DELETE",
uri: `/cors/${originId}`
}), output.print("Origin deleted");
} catch (err) {
throw new Error(`Origin deletion failed:
${err.message}`);
}
}
};
async function promptForOrigin(specified, context) {
const specifiedOrigin = specified && specified.toLowerCase(), {
prompt,
apiClient
} = context, origins = await apiClient({
requireUser: !0,
requireProject: !0
}).request({
url: "/cors"
});
if (specifiedOrigin) {
const selected = origins.filter((origin) => origin.origin.toLowerCase() === specifiedOrigin)[0];
if (!selected)
throw new Error(`Origin "${specified} not found"`);
return selected.id;
}
const choices = origins.map((origin) => ({
value: origin.id,
name: origin.origin
}));
return prompt.single({
message: "Select origin to delete",
type: "list",
choices
});
}
const helpText$D = `
Examples
sanity cors list
`, listCorsOriginsCommand = {
name: "list",
group: "cors",
signature: "",
helpText: helpText$D,
description: "List all origins allowed to access the API for this project",
action: async (args, context) => {
const {
output
} = context, {
apiClient
} = context, origins = await apiClient({
requireUser: !0,
requireProject: !0
}).request({
url: "/cors"
});
output.print(origins.map((origin) => origin.origin).join(`
`));
}
};
function validateDatasetAliasName(datasetName) {
if (!datasetName)
return "Alias name is missing";
const name = `${datasetName}`;
return name.toLowerCase() !== name ? "Alias name must be all lowercase characters" : name.length < 2 ? "Alias name must be at least two characters long" : name.length > 64 ? "Alias name must be at most 64 characters" : /^[a-z0-9~]/.test(name) ? /^[a-z0-9~][-_a-z0-9]+$/.test(name) ? /[-_]$/.test(name) ? "Alias name must not end with a dash or an underscore" : !1 : "Alias name must only contain letters, numbers, dashes and underscores" : "Alias name must start with a letter or a number";
}
function promptForDatasetAliasName(prompt, options = {}) {
return prompt.single({
type: "input",
message: "Alias name:",
validate: (name) => validateDatasetAliasName(name) || !0,
...options
});
}
const ALIAS_PREFIX = "~";
function listAliases(client) {
return client.request({
uri: "/aliases"
});
}
function createAlias(client, aliasName, datasetName) {
return modify(client, "PUT", aliasName, datasetName ? {
datasetName
} : void 0);
}
function updateAlias(client, aliasName, datasetName) {
return modify(client, "PATCH", aliasName, datasetName ? {
datasetName
} : void 0);
}
function unlinkAlias(client, aliasName) {
return modify(client, "PATCH", `${aliasName}/unlink`, {});
}
function removeAlias(client, aliasName) {
return modify(client, "DELETE", aliasName);
}
function modify(client, method, aliasName, body) {
return client.request({
method,
uri: `/aliases/${aliasName}`,
body
});
}
const createAliasHandler = async (args, context) => {
const {
apiClient,
output,
prompt
} = context, [, alias, targetDataset] = args.argsWithoutOptions, client = apiClient(), nameError = alias && validateDatasetAliasName(alias);
if (nameError)
throw new Error(nameError);
const [datasets, aliases, projectFeatures] = await Promise.all([client.datasets.list().then((sets) => sets.map((ds) => ds.name)), listAliases(client).then((sets) => sets.map((ds) => ds.name)), client.request({
uri: "/features"
})]);
let aliasName = await (alias || promptForDatasetAliasName(prompt)), aliasOutputName = aliasName;
if (aliasName.startsWith(ALIAS_PREFIX) ? aliasName = aliasName.slice(1) : aliasOutputName = `${ALIAS_PREFIX}${aliasName}`, aliases.includes(aliasName))
throw new Error(`Dataset alias "${aliasOutputName}" already exists`);
if (targetDataset) {
const datasetErr = validateDatasetName(targetDataset);
if (datasetErr)
throw new Error(datasetErr);
}
const datasetName = await (targetDataset || promptForDatasetName(prompt));
if (datasetName && !datasets.includes(datasetName))
throw new Error(`Dataset "${datasetName}" does not exist `);
if (!projectFeatures.includes("advancedDatasetManagement"))
throw new Error("This project cannot create a dataset alias");
try {
await createAlias(client, aliasName, datasetName), output.print(`Dataset alias ${aliasOutputName} created ${datasetName && `and linked to ${datasetName}`} successfully`);
} catch (err) {
throw new Error(`Dataset alias creation failed:
${err.message}`);
}
};
function parseCliFlags$5(args) {
return yargs__default.default(helpers.hideBin(args.argv || process.argv).slice(2)).option("force", {
type: "boolean"
}).argv;
}
const deleteAliasHandler = async (args, context) => {
const {
apiClient,
prompt,
output
} = context, [, ds] = args.argsWithoutOptions, {
force
} = await parseCliFlags$5(args), client = apiClient();
if (!ds)
throw new Error("Dataset alias name must be provided");
let aliasName = `${ds}`;
const dsError = validateDatasetAliasName(aliasName);
if (dsError)
throw dsError;
aliasName = aliasName.startsWith(ALIAS_PREFIX) ? aliasName.slice(1) : aliasName;
const [fetchedAliases] = await Promise.all([listAliases(client)]), linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName), message = linkedAlias && linkedAlias.datasetName ? `This dataset alias is linked to ${linkedAlias.datasetName}. ` : "";
return force ? output.warn(`'--force' used: skipping confirmation, deleting alias "${aliasName}"`) : await prompt.single({
type: "input",
message: `${message}Are you ABSOLUTELY sure you want to delete this dataset alias?
Type the name of the dataset alias to confirm delete: `,
filter: (input) => `${input}`.trim(),
validate: (input) => input === aliasName || "Incorrect dataset alias name. Ctrl + C to cancel delete."
}), removeAlias(client, aliasName).then(() => {
output.print("Dataset alias deleted successfully");
});
}, linkAliasHandler = async (args, context) => {
const {
apiClient,
output,
prompt
} = context, [, alias, targetDataset] = args.argsWithoutOptions, flags = args.extOptions, client = apiClient(), nameError = alias && validateDatasetAliasName(alias);
if (nameError)
throw new Error(nameError);
const [datasets, fetchedAliases] = await Promise.all([client.datasets.list().then((sets) => sets.map((ds) => ds.name)), listAliases(client)]), aliases = fetchedAliases.map((da) => da.name);
let aliasName = await (alias || promptForDatasetAliasName(prompt)), aliasOutputName = aliasName;
if (aliasName.startsWith(ALIAS_PREFIX) ? aliasName = aliasName.slice(1) : aliasOutputName = `${ALIAS_PREFIX}${aliasName}`, !aliases.includes(aliasName))
throw new Error(`Dataset alias "${aliasOutputName}" does not exist `);
const datasetName = await (targetDataset || promptForDatasetName(prompt)), datasetErr = validateDatasetName(datasetName);
if (datasetErr)
throw new Error(datasetErr);
if (!datasets.includes(datasetName))
throw new Error(`Dataset "${datasetName}" does not exist `);
const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName);
if (linkedAlias && linkedAlias.datasetName) {
if (linkedAlias.datasetName === datasetName)
throw new Error(`Dataset alias ${aliasOutputName} already linked to ${datasetName}`);
flags.force || await prompt.single({
type: "input",
message: `This alias is linked to dataset <${linkedAlias.datasetName}>. Are you ABSOLUTELY sure you want to link this dataset alias to this dataset?
Type YES/NO: `,
filter: (input) => `${input}`.toLowerCase(),
validate: (input) => input === "yes" || "Ctrl + C to cancel dataset alias link."
});
}
try {
await updateAlias(client, aliasName, datasetName), output.print(`Dataset alias ${aliasOutputName} linked to ${datasetName} successfully`);
} catch (err) {
throw new Error(`Dataset alias link failed:
${err.message}`);
}
};
function parseCliFlags$4(args) {
return yargs__default.default(helpers.hideBin(args.argv || process.argv).slice(2)).option("force", {
type: "boolean"
}).argv;
}
const unlinkAliasHandler = async (args, context) => {
const {
apiClient,
output,
prompt
} = context, [, alias] = args.argsWithoutOptions, {
force
} = await parseCliFlags$4(args), client = apiClient(), nameError = alias && validateDatasetAliasName(alias);
if (nameError)
throw new Error(nameError);
const fetchedAliases = await listAliases(client);
let aliasName = await (alias || promptForDatasetAliasName(prompt)), aliasOutputName = aliasName;
aliasName.startsWith(ALIAS_PREFIX) ? aliasName = aliasName.slice(1) : aliasOutputName = `${ALIAS_PREFIX}${aliasName}`;
const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName);
if (!linkedAlias)
throw new Error(`Dataset alias "${aliasOutputName}" does not exist`);
if (!linkedAlias.datasetName)
throw new Error(`Dataset alias "${aliasOutputName}" is not linked to a dataset`);
force ? output.warn(`'--force' used: skipping confirmation, unlinking alias "${aliasOutputName}"`) : await prompt.single({
type: "input",
message: `Are you ABSOLUTELY sure you want to unlink this alias from the "${linkedAlias.datasetName}" dataset?
Type YES/NO: `,
filter: (input) => `${input}`.toLowerCase(),
validate: (input) => input === "yes" || "Ctrl + C to cancel dataset alias unlink."
});
try {
const result = await unlinkAlias(client, aliasName);
output.print(`Dataset alias ${aliasOutputName} unlinked from ${result.datasetName} successfully`);
} catch (err) {
throw new Error(`Dataset alias unlink failed:
${err.message}`);
}
}, helpText$C = `
Below are examples of the alias subcommand
Create Alias
sanity dataset alias create
sanity dataset alias create <alias-name>
sanity dataset alias create <alias-name> <target-dataset>
Delete Alias
Options
--force Skips security prompt and forces link command
Usage
sanity dataset alias delete <alias-name>
sanity dataset alias delete <alias-name> --force
Link Alias
Options
--force Skips security prompt and forces link command
Usage
sanity dataset alias link
sanity dataset alias link <alias-name>
sanity dataset alias link <alias-name> <target-dataset>
sanity dataset alias link <alias-name> <target-dataset> --force
Un-link Alias
Options
--force Skips security prompt and forces link command
Usage
sanity dataset alias unlink
sanity dataset alias unlink <alias-name>
sanity dataset alias unlink <alias-name> --force
`, aliasCommand = {
name: "alias",
group: "dataset",
signature: "SUBCOMMAND [ALIAS_NAME, TARGET_DATASET]",
helpText: helpText$C,
description: "You can manage your dataset alias using this command.",
action: async (args, context) => {
const [verb] = args.argsWithoutOptions;
switch (verb) {
case "create":
await createAliasHandler(args, context);
break;
case "delete":
await deleteAliasHandler(args, context);
break;
case "unlink":
await unlinkAliasHandler(args, context);
break;
case "link":
await linkAliasHandler(args, context);
break;
default:
throw new Error(oneline__default.default`
Invalid command provided. Available commands are: create, delete, link and unlink.
For more guide run the help command 'sanity dataset alias --help'
`);
}
}
};
async function listDatasetCopyJobs(flags, context) {
const {
apiClient,
output,
chalk
} = context, client = apiClient(), projectId = client.config().projectId, query = {};
let response;
flags.offset && flags.offset >= 0 && (query.offset = `${flags.offset}`), flags.limit && flags.limit > 0 && (query.limit = `${flags.limit}`);
try {
response = await client.request({
method: "GET",
uri: `/projects/${projectId}/datasets/copy`,
query
});
} catch (error) {
error.statusCode ? output.error(`${chalk.red(`Dataset copy list failed:
${error.response.body.message}`)}
`) : output.error(`${chalk.red(`Dataset copy list failed:
${error.message}`)}
`);
}
if (response && response.length > 0) {
const table = new consoleTablePrinter.Table({
title: "Dataset copy jobs for this project in descending order",
columns: [{
name: "id",
title: "Job ID",
alignment: "left"
}, {
name: "sourceDataset",
title: "Source Dataset",
alignment: "left"
}, {
name: "targetDataset",
title: "Target Dataset",
alignment: "left"
}, {
name: "state",
title: "State",
alignment: "left"
}, {
name: "withHistory",
title: "With history",
alignment: "left"
}, {
name: "timeStarted",
title: "Time started",
alignment: "left"
}, {
name: "timeTaken",
title: "Time taken",
alignment: "left"
}]
});
response.forEach((job) => {
const {
id,
state,
createdAt,
updatedAt,
sourceDataset,
targetDataset,
withHistory
} = job;
let timeStarted = "";
createdAt !== "" && (timeStarted = dateFns.formatDistanceToNow(dateFns.parseISO(createdAt)));
let timeTaken = "";
updatedAt !== "" && (timeTaken = dateFns.formatDistance(dateFns.parseISO(updatedAt), dateFns.parseISO(createdAt)));
let color;
switch (state) {
case "completed":
color = "green";
break;
case "failed":
color = "red";
break;
case "pending":
color = "yellow";
break;
default:
color = "";
}
table.addRow({
id,