UNPKG

sanity

Version:

Sanity is a real-time content infrastructure with a scalable, hosted backend featuring a Graph Oriented Query Language (GROQ), asset pipelines and fast edge caches

1,308 lines (1,294 loc) • 160 kB
import debug$3 from "debug"; import fs, { createWriteStream, mkdirSync, existsSync, createReadStream } from "node:fs"; import fs$1, { mkdtemp, writeFile, readdir } from "node:fs/promises"; import os, { tmpdir } from "node:os"; import path from "node:path"; import { pipeline, finished } from "node:stream/promises"; import { absolutify } from "@sanity/util/fs"; import { Mutex } from "async-mutex"; import { isString, padStart, noop as noop$1, isEqual, isPlainObject, groupBy, deburr, sortBy, size } from "lodash-es"; import prettyMs from "pretty-ms"; import { hideBin } from "yargs/helpers"; import yargs from "yargs/yargs"; import zlib from "node:zlib"; import archiver from "archiver"; import { rimraf } from "rimraf"; import { getIt } from "get-it"; import { keepAlive, promise } from "get-it/middleware"; import { Readable } from "node:stream"; import { Table } from "console-table-printer"; import { isAfter, lightFormat, parse, isValid, formatDistanceToNow, parseISO, formatDistance } from "date-fns"; import { createRequire } from "node:module"; import url from "node:url"; import logSymbols from "log-symbols"; import oneline from "oneline"; import EventSource from "@sanity/eventsource"; import { Observable } from "rxjs"; import { exportDataset } from "@sanity/export"; import { sanityImport } from "@sanity/import"; import { uuid } from "@sanity/uuid"; import chokidar from "chokidar"; import execa from "execa"; import json5 from "json5"; import pluralize from "pluralize-esm"; import tokenize from "json-lexer"; import open from "open"; import { inspect } from "node:util"; import { register } from "esbuild-register/dist/node"; import { MAX_MUTATION_CONCURRENCY, DEFAULT_MUTATION_CONCURRENCY, run, dryRun } from "@sanity/migrate"; import { isatty } from "node:tty"; import { isIndexSegment, isKeySegment, isIndexTuple } from "@sanity/types"; const defaultApiVersion$1 = "v2024-02-21", datasetBackupGroup = { name: "backup", signature: "[COMMAND]", description: "Manage backups.", isGroupRoot: !0 }; function parseApiErr(err) { const apiErr = {}; return err.code ? apiErr.statusCode = err.code : err.statusCode && (apiErr.statusCode = err.statusCode), err.message ? apiErr.message = err.message : err.statusMessage ? apiErr.message = err.statusMessage : err?.response?.body?.message ? apiErr.message = err.response.body.message : err?.response?.data?.message ? apiErr.message = err.response.data.message : apiErr.message = JSON.stringify(err), apiErr; } const debug$2 = debug$3("sanity:core"); function validateDatasetName(datasetName) { if (!datasetName) return "Dataset name is missing"; const name = `${datasetName}`; return name.toLowerCase() !== name ? "Dataset name must be all lowercase characters" : name.length < 2 ? "Dataset name must be at least two characters long" : name.length > 64 ? "Dataset name must be at most 64 characters" : /^[a-z0-9]/.test(name) ? /^[a-z0-9][-_a-z0-9]+$/.test(name) ? /[-_]$/.test(name) ? "Dataset name must not end with a dash or an underscore" : !1 : "Dataset name must only contain letters, numbers, dashes and underscores" : "Dataset name must start with a letter or a number"; } function promptForDatasetName(prompt, options = {}) { return prompt.single({ type: "input", message: "Dataset name:", validate: (name) => validateDatasetName(name) || !0, ...options }); } async function chooseDatasetPrompt(context, options = {}) { const { apiClient, prompt } = context, { message, allowCreation } = options, client = apiClient(), datasets = await client.datasets.list(), hasProduction = datasets.find((dataset) => dataset.name === "production"), datasetChoices = datasets.map((dataset) => ({ value: dataset.name })), selected = await prompt.single({ message: message || "Select dataset to use", type: "list", choices: allowCreation ? [{ value: "new", name: "Create new dataset" }, new prompt.Separator(), ...datasetChoices] : datasetChoices }); if (selected === "new") { debug$2("User wants to create a new dataset, prompting for name"); const newDatasetName = await promptForDatasetName(prompt, { message: "Name your dataset:", default: hasProduction ? void 0 : "production" }); return await client.datasets.create(newDatasetName), newDatasetName; } return selected; } async function resolveApiClient(context, datasetName, apiVersion) { const { apiClient } = context; let client = apiClient(); const { projectId, token } = client.config(); if (!projectId) throw new Error("Project ID not defined"); let selectedDataset = datasetName; return selectedDataset || (selectedDataset = await chooseDatasetPrompt(context, { message: "Select the dataset name:" })), client = client.withConfig({ dataset: datasetName, apiVersion }), { projectId, datasetName: selectedDataset, token, client }; } const helpText$K = ` Examples sanity backup disable DATASET_NAME `, disableDatasetBackupCommand = { name: "disable", group: "backup", signature: "[DATASET_NAME]", description: "Disable backup for a dataset.", helpText: helpText$K, action: async (args, context) => { const { output, chalk } = context, [dataset] = args.argsWithoutOptions, { projectId, datasetName, token, client } = await resolveApiClient(context, dataset, defaultApiVersion$1); try { await client.request({ method: "PUT", headers: { Authorization: `Bearer ${token}` }, uri: `/projects/${projectId}/datasets/${datasetName}/settings/backups`, body: { enabled: !1 } }), output.print(`${chalk.green(`Disabled daily backups for dataset ${datasetName} `)}`); } catch (error) { const { message } = parseApiErr(error); output.print(`${chalk.red(`Disabling dataset backup failed: ${message}`)} `); } } }; var debug$1 = debug$3("sanity:backup"); function archiveDir(tmpOutDir, outFilePath, progressCb) { return new Promise((resolve, reject) => { const archiveDestination = createWriteStream(outFilePath); archiveDestination.on("error", (err) => { reject(err); }), archiveDestination.on("close", () => { resolve(); }); const archive = archiver("tar", { gzip: !0, gzipOptions: { level: zlib.constants.Z_DEFAULT_COMPRESSION } }); archive.on("error", (err) => { debug$1(`Archiving errored! %s`, err.stack), reject(err); }), archive.on("warning", (err) => { debug$1("Archive warning: %s", err.message); }), archive.on("progress", (progress2) => { progressCb(progress2.fs.processedBytes); }), archive.pipe(archiveDestination), archive.directory(tmpOutDir, !1), archive.finalize(); }); } const maxBackupIdsShown = 100; async function chooseBackupIdPrompt(context, datasetName) { const { prompt } = context, { projectId, token, client } = await resolveApiClient(context, datasetName, defaultApiVersion$1); try { const response = await client.request({ headers: { Authorization: `Bearer ${token}` }, uri: `/projects/${projectId}/datasets/${datasetName}/backups`, query: { limit: maxBackupIdsShown.toString() } }); if (response?.backups?.length > 0) { const backupIdChoices = response.backups.map((backup) => ({ value: backup.id })); return await prompt.single({ message: `Select backup ID to use (only last ${maxBackupIdsShown} shown)`, type: "list", choices: backupIdChoices }); } } catch (err) { throw new Error(`Failed to fetch backups for dataset ${datasetName}: ${err.message}`, { cause: err }); } throw new Error("No backups found"); } async function cleanupTmpDir(tmpDir) { try { await rimraf(tmpDir); } catch (err) { debug$1(`Error cleaning up temporary files: ${err.message}`); } } const MAX_RETRIES = 5, BACKOFF_DELAY_BASE = 200, exponentialBackoff = (retryCount) => Math.pow(2, retryCount) * BACKOFF_DELAY_BASE; async function withRetry(operation, maxRetries = MAX_RETRIES) { for (let retryCount = 0; retryCount < maxRetries; retryCount++) try { return await operation(); } catch (err) { if (err.response && err.response.statusCode && err.response.statusCode < 500) throw err; const retryDelay = exponentialBackoff(retryCount); debug$1(`Error encountered, retrying after ${retryDelay}ms: %s`, err.message), await new Promise((resolve) => setTimeout(resolve, retryDelay)); } throw new Error("Operation failed after all retries"); } const CONNECTION_TIMEOUT$1 = 15 * 1e3, READ_TIMEOUT$1 = 180 * 1e3, request$1 = getIt([keepAlive(), promise()]); async function downloadAsset(url2, fileName, fileType, outDir) { const normalizedFileName = path.basename(fileName), assetFilePath = getAssetFilePath(normalizedFileName, fileType, outDir); await withRetry(async () => { const response = await request$1({ url: url2, maxRedirects: 5, timeout: { connect: CONNECTION_TIMEOUT$1, socket: READ_TIMEOUT$1 }, stream: !0 }); debug$1("Received asset %s with status code %d", normalizedFileName, response?.statusCode), await pipeline(response.body, createWriteStream(assetFilePath)); }); } function getAssetFilePath(fileName, fileType, outDir) { let assetFilePath = ""; return fileType === "image" ? assetFilePath = path.join(outDir, "images", fileName) : fileType === "file" && (assetFilePath = path.join(outDir, "files", fileName)), assetFilePath; } const CONNECTION_TIMEOUT = 15 * 1e3, READ_TIMEOUT = 180 * 1e3, request = getIt([keepAlive(), promise()]); async function downloadDocument(url2) { const response = await withRetry(() => request({ url: url2, maxRedirects: 5, timeout: { connect: CONNECTION_TIMEOUT, socket: READ_TIMEOUT } })); return debug$1("Received document from %s with status code %d", url2, response?.statusCode), response.body; } class PaginatedGetBackupStream extends Readable { cursor = ""; totalFiles = 0; constructor(client, projectId, datasetName, backupId, token) { super({ objectMode: !0 }), this.client = client, this.projectId = projectId, this.datasetName = datasetName, this.backupId = backupId, this.token = token; } async _read() { try { const data = await this.fetchNextBackupPage(); this.totalFiles === 0 && (this.totalFiles = data.totalFiles), data.files.forEach((file) => this.push(file)), typeof data.nextCursor == "string" && data.nextCursor !== "" ? this.cursor = data.nextCursor : this.push(null); } catch (err) { this.destroy(err); } } // fetchNextBackupPage fetches the next page of backed up files from the backup API. async fetchNextBackupPage() { const query = this.cursor === "" ? {} : { nextCursor: this.cursor }; try { return await this.client.request({ headers: { Authorization: `Bearer ${this.token}` }, uri: `/projects/${this.projectId}/datasets/${this.datasetName}/backups/${this.backupId}`, query }); } catch (error) { let msg = error.statusCode ? error.response.body.message : error.message; throw msg === void 0 && (msg = String(error)), new Error(`Downloading dataset backup failed: ${msg}`, { cause: error }); } } } const newProgress = (output, startStep) => { let spinner = output.spinner(startStep).start(), lastProgress = { step: startStep }, start = Date.now(); const print = (progress2) => { const elapsed = prettyMs(Date.now() - start); progress2.current && progress2.current > 0 && progress2.total && progress2.total > 0 ? spinner.text = `${progress2.step} (${progress2.current}/${progress2.total}) [${elapsed}]` : spinner.text = `${progress2.step} [${elapsed}]`; }; return { set: (progress2) => { progress2.step !== lastProgress.step ? (print(lastProgress), spinner.succeed(), spinner = output.spinner(progress2.step).start(), start = Date.now()) : progress2.step === lastProgress.step && progress2.update && print(progress2), lastProgress = progress2; }, update: (progress2) => { print(progress2), lastProgress = progress2; }, succeed: () => { spinner.succeed(), start = Date.now(); }, fail: () => { spinner.fail(), start = Date.now(); } }; }; function humanFileSize(size2) { const i = size2 == 0 ? 0 : Math.floor(Math.log(size2) / Math.log(1024)); return `${(size2 / Math.pow(1024, i)).toFixed(2)} ${["B", "kB", "MB", "GB", "TB"][i]}`; } function isPathDirName(filepath) { return !/\.\w+$/.test(filepath); } const debug = debug$3("sanity:backup"), DEFAULT_DOWNLOAD_CONCURRENCY = 10, MAX_DOWNLOAD_CONCURRENCY = 24, helpText$J = ` Options --backup-id <string> The backup ID to download. (required) --out <string> The file or directory path the backup should download to. --overwrite Allows overwriting of existing backup file. --concurrency <num> Concurrent number of backup item downloads. (max: 24) Examples sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-1 sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-2 --out /path/to/file sanity backup download DATASET_NAME --backup-id 2024-01-01-backup-3 --out /path/to/file --overwrite `; function parseCliFlags$7(args) { return yargs(hideBin(args.argv || process.argv).slice(2)).options("backup-id", { type: "string" }).options("out", { type: "string" }).options("concurrency", { type: "number", default: DEFAULT_DOWNLOAD_CONCURRENCY }).options("overwrite", { type: "boolean", default: !1 }).argv; } const downloadBackupCommand = { name: "download", group: "backup", signature: "[DATASET_NAME]", description: "Download a dataset backup to a local file.", helpText: helpText$J, // eslint-disable-next-line max-statements action: async (args, context) => { const { output, chalk } = context, [client, opts] = await prepareBackupOptions(context, args), { projectId, datasetName, backupId, outDir, outFileName } = opts; if (outDir === "" || outFileName === "") { output.print("Operation cancelled."); return; } const outFilePath = path.join(outDir, outFileName); output.print("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), output.print("\u2502 \u2502"), output.print("\u2502 Downloading backup for: \u2502"), output.print(`\u2502 ${chalk.bold("projectId")}: ${chalk.cyan(projectId).padEnd(56)} \u2502`), output.print(`\u2502 ${chalk.bold("dataset")}: ${chalk.cyan(datasetName).padEnd(58)} \u2502`), output.print(`\u2502 ${chalk.bold("backupId")}: ${chalk.cyan(backupId).padEnd(56)} \u2502`), output.print("\u2502 \u2502"), output.print("\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256F"), output.print(""), output.print(`Downloading backup to "${chalk.cyan(outFilePath)}"`); const start = Date.now(), progressSpinner = newProgress(output, "Setting up backup environment..."), tmpOutDir = await mkdtemp(path.join(tmpdir(), "sanity-backup-")); for (const dir of [outDir, path.join(tmpOutDir, "images"), path.join(tmpOutDir, "files")]) mkdirSync(dir, { recursive: !0 }); debug("Writing to temporary directory %s", tmpOutDir); const tmpOutDocumentsFile = path.join(tmpOutDir, "data.ndjson"), docOutStream = createWriteStream(tmpOutDocumentsFile), docWriteMutex = new Mutex(); try { const backupFileStream = new PaginatedGetBackupStream(client, opts.projectId, opts.datasetName, opts.backupId, opts.token), files = []; let i = 0; for await (const file of backupFileStream) files.push(file), i++, progressSpinner.set({ step: "Reading backup files...", update: !0, current: i, total: backupFileStream.totalFiles }); let totalItemsDownloaded = 0; const { default: pMap } = await import("p-map"); await pMap(files, async (file) => { if (file.type === "file" || file.type === "image") await downloadAsset(file.url, file.name, file.type, tmpOutDir); else { const doc = await downloadDocument(file.url); await docWriteMutex.runExclusive(() => { docOutStream.write(`${doc} `); }); } totalItemsDownloaded += 1, progressSpinner.set({ step: "Downloading documents and assets...", update: !0, current: totalItemsDownloaded, total: backupFileStream.totalFiles }); }, { concurrency: opts.concurrency }); } catch (error) { progressSpinner.fail(); const { message } = parseApiErr(error); throw new Error(`Downloading dataset backup failed: ${message}`, { cause: error }); } docOutStream.end(), await finished(docOutStream), progressSpinner.set({ step: "Archiving files into a tarball...", update: !0 }); try { await archiveDir(tmpOutDir, outFilePath, (processedBytes) => { progressSpinner.update({ step: `Archiving files into a tarball, ${humanFileSize(processedBytes)} bytes written...` }); }); } catch (err) { throw progressSpinner.fail(), new Error(`Archiving backup failed: ${err.message}`, { cause: err }); } progressSpinner.set({ step: `Cleaning up temporary files at ${chalk.cyan(`${tmpOutDir}`)}` }), await cleanupTmpDir(tmpOutDir), progressSpinner.set({ step: `Backup download complete [${prettyMs(Date.now() - start)}]` }), progressSpinner.succeed(); } }; async function prepareBackupOptions(context, args) { const flags = await parseCliFlags$7(args), [dataset] = args.argsWithoutOptions, { prompt, workDir } = context, { projectId, datasetName, client } = await resolveApiClient(context, dataset, defaultApiVersion$1), { token } = client.config(); if (!isString(token) || token.length < 1) throw new Error("token is missing"); if (!isString(datasetName) || datasetName.length < 1) throw new Error(`dataset ${datasetName} must be a valid dataset name`); const backupId = String(flags["backup-id"] || await chooseBackupIdPrompt(context, datasetName)); if (backupId.length < 1) throw new Error(`backup-id ${flags["backup-id"]} should be a valid string`); if ("concurrency" in flags && (flags.concurrency < 1 || flags.concurrency > MAX_DOWNLOAD_CONCURRENCY)) throw new Error(`concurrency should be in 1 to ${MAX_DOWNLOAD_CONCURRENCY} range`); const defaultOutFileName = `${datasetName}-backup-${backupId}.tar.gz`; let out = await (async () => flags.out !== void 0 ? absolutify(flags.out) : await prompt.single({ type: "input", message: "Output path:", default: path.join(workDir, defaultOutFileName), filter: absolutify }))(); return isPathDirName(out) && (out = path.join(out, defaultOutFileName)), !flags.overwrite && existsSync(out) && (await prompt.single({ type: "confirm", message: `File "${out}" already exists, would you like to overwrite it?`, default: !1 }) || (out = "")), [client, { projectId, datasetName, backupId, token, outDir: path.dirname(out), outFileName: path.basename(out), overwrite: flags.overwrite, concurrency: flags.concurrency || DEFAULT_DOWNLOAD_CONCURRENCY }]; } const helpText$I = ` Examples sanity backup enable DATASET_NAME `, enableDatasetBackupCommand = { name: "enable", group: "backup", signature: "[DATASET_NAME]", description: "Enable backup for a dataset.", helpText: helpText$I, action: async (args, context) => { const { output, chalk } = context, [dataset] = args.argsWithoutOptions, { projectId, datasetName, token, client } = await resolveApiClient(context, dataset, defaultApiVersion$1); try { await client.request({ method: "PUT", headers: { Authorization: `Bearer ${token}` }, uri: `/projects/${projectId}/datasets/${datasetName}/settings/backups`, body: { enabled: !0 } }), output.print(`${chalk.green(`Enabled backups for dataset ${datasetName}. Please note that it may take up to 24 hours before the first backup is created. `)}`), output.print(`${chalk.bold(`Retention policies may apply depending on your plan and agreement. `)}`); } catch (error) { const { message } = parseApiErr(error); output.print(`${chalk.red(`Enabling dataset backup failed: ${message}`)} `); } } }, DEFAULT_LIST_BACKUP_LIMIT = 30, helpText$H = ` Options --limit <int> Maximum number of backups returned. Default 30. --after <string> Only return backups after this date (inclusive) --before <string> Only return backups before this date (exclusive). Cannot be younger than <after> if specified. Examples sanity backup list DATASET_NAME sanity backup list DATASET_NAME --limit 50 sanity backup list DATASET_NAME --after 2024-01-31 --limit 10 sanity backup list DATASET_NAME --after 2024-01-31 --before 2024-01-10 `; function parseCliFlags$6(args) { return yargs(hideBin(args.argv || process.argv).slice(2)).options("after", { type: "string" }).options("before", { type: "string" }).options("limit", { type: "number", default: DEFAULT_LIST_BACKUP_LIMIT, alias: "l" }).argv; } const listDatasetBackupCommand = { name: "list", group: "backup", signature: "[DATASET_NAME]", description: "List available backups for a dataset.", helpText: helpText$H, action: async (args, context) => { const { output, chalk } = context, flags = await parseCliFlags$6(args), [dataset] = args.argsWithoutOptions, { projectId, datasetName, token, client } = await resolveApiClient(context, dataset, defaultApiVersion$1), query = { limit: DEFAULT_LIST_BACKUP_LIMIT.toString() }; if (flags.limit) { if (flags.limit < 1 || flags.limit > Number.MAX_SAFE_INTEGER) throw new Error(`Parsing --limit: must be an integer between 1 and ${Number.MAX_SAFE_INTEGER}`); query.limit = flags.limit.toString(); } if (flags.before || flags.after) try { const parsedBefore = processDateFlags(flags.before), parsedAfter = processDateFlags(flags.after); if (parsedAfter && parsedBefore && isAfter(parsedAfter, parsedBefore)) throw new Error("--after date must be before --before"); query.before = flags.before, query.after = flags.after; } catch (err) { throw new Error(`Parsing date flags: ${err}`, { cause: err }); } let response; try { response = await client.request({ headers: { Authorization: `Bearer ${token}` }, uri: `/projects/${projectId}/datasets/${datasetName}/backups`, query: { ...query } }); } catch (error) { const { message } = parseApiErr(error); output.error(`${chalk.red(`List dataset backup failed: ${message}`)} `); } if (response && response.backups) { if (response.backups.length === 0) { output.print("No backups found."); return; } const table = new Table({ columns: [{ name: "resource", title: "RESOURCE", alignment: "left" }, { name: "createdAt", title: "CREATED AT", alignment: "left" }, { name: "backupId", title: "BACKUP ID", alignment: "left" }] }); response.backups.forEach((backup) => { const { id, createdAt } = backup; table.addRow({ resource: "Dataset", createdAt: lightFormat(Date.parse(createdAt), "yyyy-MM-dd HH:mm:ss"), backupId: id }); }), table.printTable(); } } }; function processDateFlags(date) { if (!date) return; const parsedDate = parse(date, "yyyy-MM-dd", /* @__PURE__ */ new Date()); if (isValid(parsedDate)) return parsedDate; throw new Error(`Invalid ${date} date format. Use YYYY-MM-DD`); } function determineIsApp(cliConfig) { return !!(cliConfig && "app" in cliConfig); } createRequire(import.meta.url); const helpText$G = ` Options --source-maps Enable source maps for built bundles (increases size of bundle) --no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle) -y, --yes Unattended mode, answers "yes" to any "yes/no" prompt and otherwise uses defaults --schema-path If you are storing your schemas in a different path than the default one, you need to specify it here. Examples sanity build sanity build --no-minify --source-maps `, buildCommand = { name: "build", signature: "[OUTPUT_DIR]", description: "Builds the Sanity Studio configuration into a static bundle", action: async (args, context, overrides) => (await getBuildAction(context))(args, context, overrides), helpText: helpText$G }; async function getBuildAction(context) { return determineIsApp(context.cliConfig) ? (await import("./buildAction.js")).default : (await import("./buildAction2.js")).default; } const wildcardReplacement = "a-wild-card-r3pl4c3m3n7-a", portReplacement = ":7777777"; async function addCorsOrigin(givenOrigin, flags, context) { const { apiClient, prompt, output } = context, origin = await (givenOrigin ? filterAndValidateOrigin(givenOrigin) : promptForOrigin$1(prompt)), hasWildcard = origin.includes("*"); if (hasWildcard && !await promptForWildcardConfirmation(origin, context)) return !1; const allowCredentials = typeof flags.credentials > "u" ? await promptForCredentials(hasWildcard, context) : !!flags.credentials; return givenOrigin !== origin && output.print(`Normalized origin to ${origin}`), await apiClient({ requireUser: !0, requireProject: !0 }).request({ method: "POST", url: "/cors", body: { origin, allowCredentials }, maxRedirects: 0 }), !0; } function promptForCredentials(hasWildcard, context) { const { prompt, output, chalk } = context; return output.print(""), hasWildcard ? output.print(oneline` ${chalk.yellow(`${logSymbols.warning} Warning:`)} We ${chalk.red(chalk.underline("HIGHLY"))} recommend NOT allowing credentials on origins containing wildcards. If you are logged in to a studio, people will be able to send requests ${chalk.underline("on your behalf")} to read and modify data, from any matching origin. Please tread carefully! `) : output.print(oneline` ${chalk.yellow(`${logSymbols.warning} Warning:`)} Should this origin be allowed to send requests using authentication tokens or session cookies? Be aware that any script on this origin will be able to send requests ${chalk.underline("on your behalf")} to read and modify data if you are logged in to a Sanity studio. If this origin hosts a studio, you will need this, otherwise you should probably answer "No" (n). `), output.print(""), prompt.single({ type: "confirm", message: oneline` Allow credentials to be sent from this origin? Please read the warning above. `, default: !1 }); } function promptForWildcardConfirmation(origin, context) { const { prompt, output, chalk } = context; return output.print(""), output.print(chalk.yellow(`${logSymbols.warning} Warning: Examples of allowed origins:`)), origin === "*" ? (output.print("- http://www.some-malicious.site"), output.print("- https://not.what-you-were-expecting.com"), output.print("- https://high-traffic-site.com"), output.print("- http://192.168.1.1:8080")) : (output.print(`- ${origin.replace(/:\*/, ":1234").replace(/\*/g, "foo")}`), output.print(`- ${origin.replace(/:\*/, ":3030").replace(/\*/g, "foo.bar")}`)), output.print(""), prompt.single({ type: "confirm", message: oneline` Using wildcards can be ${chalk.red("risky")}. Are you ${chalk.underline("absolutely sure")} you want to allow this origin?`, default: !1 }); } function promptForOrigin$1(prompt) { return prompt.single({ type: "input", message: "Origin (including protocol):", filter: filterOrigin, validate: (origin) => validateOrigin(origin, origin) }); } function filterOrigin(origin) { if (origin === "*" || origin === "file:///*" || origin === "null") return origin; try { const example = origin.replace(/([^:])\*/g, `$1${wildcardReplacement}`).replace(/:\*/, portReplacement), parsed = url.parse(example); let host = parsed.host || ""; return /^https?:$/.test(parsed.protocol || "") && (host = host.replace(/:(80|443)$/, "")), host = host.replace(portReplacement, ":*").replace(new RegExp(wildcardReplacement, "g"), "*"), `${parsed.protocol}//${host}`; } catch { return null; } } function validateOrigin(origin, givenOrigin) { if (origin === "*" || origin === "file:///*" || origin === "null") return !0; try { return url.parse(origin || 0), !0; } catch { } return givenOrigin.startsWith("file://") ? "Only a local file wildcard is currently allowed: file:///*" : `Invalid origin "${givenOrigin}", must include protocol (https://some.host)`; } function filterAndValidateOrigin(givenOrigin) { const origin = filterOrigin(givenOrigin), result = validateOrigin(origin, givenOrigin); if (result !== !0) throw new Error(result); if (!origin) throw new Error("Invalid origin"); return origin; } const helpText$F = ` Options --credentials Allow credentials (token/cookie) to be sent from this origin --no-credentials Disallow credentials (token/cookie) to be sent from this origin Examples sanity cors add sanity cors add http://localhost:3000 --no-credentials `, addCorsOriginCommand = { name: "add", group: "cors", signature: "[ORIGIN]", helpText: helpText$F, description: "Allow a new origin to use your project API through CORS", action: async (args, context) => { const { output } = context, [origin] = args.argsWithoutOptions; if (!origin) throw new Error("No origin specified, use `sanity cors add <origin-url>`"); const flags = args.extOptions; fs.existsSync(path.join(process.cwd(), origin)) && output.warn(`Origin "${origin}?" Remember to quote values (sanity cors add "*")`), await addCorsOrigin(origin, flags, context) && output.print("CORS origin added successfully"); } }, corsGroup = { name: "cors", signature: "[COMMAND]", isGroupRoot: !0, description: "Configures CORS settings for Sanity projects" }, helpText$E = ` Examples sanity cors delete sanity cors delete http://localhost:3000 `, deleteCorsOriginCommand = { name: "delete", group: "cors", signature: "[ORIGIN]", helpText: helpText$E, description: "Delete an existing CORS-origin from your project", action: async (args, context) => { const { output, apiClient } = context, [origin] = args.argsWithoutOptions, client = apiClient({ requireUser: !0, requireProject: !0 }), originId = await promptForOrigin(origin, context); try { await client.request({ method: "DELETE", uri: `/cors/${originId}` }), output.print("Origin deleted"); } catch (err) { throw new Error(`Origin deletion failed: ${err.message}`, { cause: err }); } } }; async function promptForOrigin(specified, context) { const specifiedOrigin = specified && specified.toLowerCase(), { prompt, apiClient } = context, origins = await apiClient({ requireUser: !0, requireProject: !0 }).request({ url: "/cors" }); if (specifiedOrigin) { const selected = origins.filter((origin) => origin.origin.toLowerCase() === specifiedOrigin)[0]; if (!selected) throw new Error(`Origin "${specified} not found"`); return selected.id; } const choices = origins.map((origin) => ({ value: origin.id, name: origin.origin })); return prompt.single({ message: "Select origin to delete", type: "list", choices }); } const helpText$D = ` Examples sanity cors list `, listCorsOriginsCommand = { name: "list", group: "cors", signature: "", helpText: helpText$D, description: "List all origins allowed to access the API for this project", action: async (args, context) => { const { output } = context, { apiClient } = context, origins = await apiClient({ requireUser: !0, requireProject: !0 }).request({ url: "/cors" }); output.print(origins.map((origin) => origin.origin).join(` `)); } }; function validateDatasetAliasName(datasetName) { if (!datasetName) return "Alias name is missing"; const name = `${datasetName}`; return name.toLowerCase() !== name ? "Alias name must be all lowercase characters" : name.length < 2 ? "Alias name must be at least two characters long" : name.length > 64 ? "Alias name must be at most 64 characters" : /^[a-z0-9~]/.test(name) ? /^[a-z0-9~][-_a-z0-9]+$/.test(name) ? /[-_]$/.test(name) ? "Alias name must not end with a dash or an underscore" : !1 : "Alias name must only contain letters, numbers, dashes and underscores" : "Alias name must start with a letter or a number"; } function promptForDatasetAliasName(prompt, options = {}) { return prompt.single({ type: "input", message: "Alias name:", validate: (name) => validateDatasetAliasName(name) || !0, ...options }); } const ALIAS_PREFIX = "~"; function listAliases(client) { return client.request({ uri: "/aliases" }); } function createAlias(client, aliasName, datasetName) { return modify(client, "PUT", aliasName, datasetName ? { datasetName } : void 0); } function updateAlias(client, aliasName, datasetName) { return modify(client, "PATCH", aliasName, datasetName ? { datasetName } : void 0); } function unlinkAlias(client, aliasName) { return modify(client, "PATCH", `${aliasName}/unlink`, {}); } function removeAlias(client, aliasName) { return modify(client, "DELETE", aliasName); } function modify(client, method, aliasName, body) { return client.request({ method, uri: `/aliases/${aliasName}`, body }); } const createAliasHandler = async (args, context) => { const { apiClient, output, prompt } = context, [, alias, targetDataset] = args.argsWithoutOptions, client = apiClient(), nameError = alias && validateDatasetAliasName(alias); if (nameError) throw new Error(nameError); const [datasets, aliases, projectFeatures] = await Promise.all([client.datasets.list().then((sets) => sets.map((ds) => ds.name)), listAliases(client).then((sets) => sets.map((ds) => ds.name)), client.request({ uri: "/features" })]); let aliasName = await (alias || promptForDatasetAliasName(prompt)), aliasOutputName = aliasName; if (aliasName.startsWith(ALIAS_PREFIX) ? aliasName = aliasName.slice(1) : aliasOutputName = `${ALIAS_PREFIX}${aliasName}`, aliases.includes(aliasName)) throw new Error(`Dataset alias "${aliasOutputName}" already exists`); if (targetDataset) { const datasetErr = validateDatasetName(targetDataset); if (datasetErr) throw new Error(datasetErr); } const datasetName = await (targetDataset || promptForDatasetName(prompt)); if (datasetName && !datasets.includes(datasetName)) throw new Error(`Dataset "${datasetName}" does not exist `); if (!projectFeatures.includes("advancedDatasetManagement")) throw new Error("This project cannot create a dataset alias"); try { await createAlias(client, aliasName, datasetName), output.print(`Dataset alias ${aliasOutputName} created ${datasetName && `and linked to ${datasetName}`} successfully`); } catch (err) { throw new Error(`Dataset alias creation failed: ${err.message}`, { cause: err }); } }; function parseCliFlags$5(args) { return yargs(hideBin(args.argv || process.argv).slice(2)).option("force", { type: "boolean" }).argv; } const deleteAliasHandler = async (args, context) => { const { apiClient, prompt, output } = context, [, ds] = args.argsWithoutOptions, { force } = await parseCliFlags$5(args), client = apiClient(); if (!ds) throw new Error("Dataset alias name must be provided"); let aliasName = `${ds}`; const dsError = validateDatasetAliasName(aliasName); if (dsError) throw dsError; aliasName = aliasName.startsWith(ALIAS_PREFIX) ? aliasName.slice(1) : aliasName; const [fetchedAliases] = await Promise.all([listAliases(client)]), linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName), message = linkedAlias && linkedAlias.datasetName ? `This dataset alias is linked to ${linkedAlias.datasetName}. ` : ""; return force ? output.warn(`'--force' used: skipping confirmation, deleting alias "${aliasName}"`) : await prompt.single({ type: "input", message: `${message}Are you ABSOLUTELY sure you want to delete this dataset alias? Type the name of the dataset alias to confirm delete: `, filter: (input) => `${input}`.trim(), validate: (input) => input === aliasName || "Incorrect dataset alias name. Ctrl + C to cancel delete." }), removeAlias(client, aliasName).then(() => { output.print("Dataset alias deleted successfully"); }); }, linkAliasHandler = async (args, context) => { const { apiClient, output, prompt } = context, [, alias, targetDataset] = args.argsWithoutOptions, flags = args.extOptions, client = apiClient(), nameError = alias && validateDatasetAliasName(alias); if (nameError) throw new Error(nameError); const [datasets, fetchedAliases] = await Promise.all([client.datasets.list().then((sets) => sets.map((ds) => ds.name)), listAliases(client)]), aliases = fetchedAliases.map((da) => da.name); let aliasName = await (alias || promptForDatasetAliasName(prompt)), aliasOutputName = aliasName; if (aliasName.startsWith(ALIAS_PREFIX) ? aliasName = aliasName.slice(1) : aliasOutputName = `${ALIAS_PREFIX}${aliasName}`, !aliases.includes(aliasName)) throw new Error(`Dataset alias "${aliasOutputName}" does not exist `); const datasetName = await (targetDataset || promptForDatasetName(prompt)), datasetErr = validateDatasetName(datasetName); if (datasetErr) throw new Error(datasetErr); if (!datasets.includes(datasetName)) throw new Error(`Dataset "${datasetName}" does not exist `); const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName); if (linkedAlias && linkedAlias.datasetName) { if (linkedAlias.datasetName === datasetName) throw new Error(`Dataset alias ${aliasOutputName} already linked to ${datasetName}`); flags.force || await prompt.single({ type: "input", message: `This alias is linked to dataset <${linkedAlias.datasetName}>. Are you ABSOLUTELY sure you want to link this dataset alias to this dataset? Type YES/NO: `, filter: (input) => `${input}`.toLowerCase(), validate: (input) => input === "yes" || "Ctrl + C to cancel dataset alias link." }); } try { await updateAlias(client, aliasName, datasetName), output.print(`Dataset alias ${aliasOutputName} linked to ${datasetName} successfully`); } catch (err) { throw new Error(`Dataset alias link failed: ${err.message}`, { cause: err }); } }; function parseCliFlags$4(args) { return yargs(hideBin(args.argv || process.argv).slice(2)).option("force", { type: "boolean" }).argv; } const unlinkAliasHandler = async (args, context) => { const { apiClient, output, prompt } = context, [, alias] = args.argsWithoutOptions, { force } = await parseCliFlags$4(args), client = apiClient(), nameError = alias && validateDatasetAliasName(alias); if (nameError) throw new Error(nameError); const fetchedAliases = await listAliases(client); let aliasName = await (alias || promptForDatasetAliasName(prompt)), aliasOutputName = aliasName; aliasName.startsWith(ALIAS_PREFIX) ? aliasName = aliasName.slice(1) : aliasOutputName = `${ALIAS_PREFIX}${aliasName}`; const linkedAlias = fetchedAliases.find((elem) => elem.name === aliasName); if (!linkedAlias) throw new Error(`Dataset alias "${aliasOutputName}" does not exist`); if (!linkedAlias.datasetName) throw new Error(`Dataset alias "${aliasOutputName}" is not linked to a dataset`); force ? output.warn(`'--force' used: skipping confirmation, unlinking alias "${aliasOutputName}"`) : await prompt.single({ type: "input", message: `Are you ABSOLUTELY sure you want to unlink this alias from the "${linkedAlias.datasetName}" dataset? Type YES/NO: `, filter: (input) => `${input}`.toLowerCase(), validate: (input) => input === "yes" || "Ctrl + C to cancel dataset alias unlink." }); try { const result = await unlinkAlias(client, aliasName); output.print(`Dataset alias ${aliasOutputName} unlinked from ${result.datasetName} successfully`); } catch (err) { throw new Error(`Dataset alias unlink failed: ${err.message}`, { cause: err }); } }, helpText$C = ` Below are examples of the alias subcommand Create Alias sanity dataset alias create sanity dataset alias create <alias-name> sanity dataset alias create <alias-name> <target-dataset> Delete Alias Options --force Skips security prompt and forces link command Usage sanity dataset alias delete <alias-name> sanity dataset alias delete <alias-name> --force Link Alias Options --force Skips security prompt and forces link command Usage sanity dataset alias link sanity dataset alias link <alias-name> sanity dataset alias link <alias-name> <target-dataset> sanity dataset alias link <alias-name> <target-dataset> --force Un-link Alias Options --force Skips security prompt and forces link command Usage sanity dataset alias unlink sanity dataset alias unlink <alias-name> sanity dataset alias unlink <alias-name> --force `, aliasCommand = { name: "alias", group: "dataset", signature: "SUBCOMMAND [ALIAS_NAME, TARGET_DATASET]", helpText: helpText$C, description: "You can manage your dataset alias using this command.", action: async (args, context) => { const [verb] = args.argsWithoutOptions; switch (verb) { case "create": await createAliasHandler(args, context); break; case "delete": await deleteAliasHandler(args, context); break; case "unlink": await unlinkAliasHandler(args, context); break; case "link": await linkAliasHandler(args, context); break; default: throw new Error(oneline` Invalid command provided. Available commands are: create, delete, link and unlink. For more guide run the help command 'sanity dataset alias --help' `); } } }; async function listDatasetCopyJobs(flags, context) { const { apiClient, output, chalk } = context, client = apiClient(), projectId = client.config().projectId, query = {}; let response; flags.offset && flags.offset >= 0 && (query.offset = `${flags.offset}`), flags.limit && flags.limit > 0 && (query.limit = `${flags.limit}`); try { response = await client.request({ method: "GET", uri: `/projects/${projectId}/datasets/copy`, query }); } catch (error) { error.statusCode ? output.error(`${chalk.red(`Dataset copy list failed: ${error.response.body.message}`)} `) : output.error(`${chalk.red(`Dataset copy list failed: ${error.message}`)} `); } if (response && response.length > 0) { const table = new Table({ title: "Dataset copy jobs for this project in descending order", columns: [{ name: "id", title: "Job ID", alignment: "left" }, { name: "sourceDataset", title: "Source Dataset", alignment: "left" }, { name: "targetDataset", title: "Target Dataset", alignment: "left" }, { name: "state", title: "State", alignment: "left" }, { name: "withHistory", title: "With history", alignment: "left" }, { name: "timeStarted", title: "Time started", alignment: "left" }, { name: "timeTaken", title: "Time taken", alignment: "left" }] }); response.forEach((job) => { const { id, state, createdAt, updatedAt, sourceDataset, targetDataset, withHistory } = job; let timeStarted = ""; createdAt !== "" && (timeStarted = formatDistanceToNow(parseISO(createdAt))); let timeTaken = ""; updatedAt !== "" && (timeTaken = formatDistance(parseISO(updatedAt), parseISO(createdAt))); let color; switch (state) { case "completed": color = "green"; break; case "failed": color = "red"; break; case "pending": color = "yellow"; break; default: color = ""; } table.addRow({ id, state, withHistory, timeStarted: `${timeStarted} ago`, timeTaken, sourceDataset, targetDataset }, { color }); }), table.printTable(); } else output.print("This project doesn't have any dataset copy jobs"); } const getClientUrl = (client, uri, useCdn = !1) => { const config = client.config(); return `${useCdn ? config.cdnUrl : config.url}/${uri.replace(/^\//, "")}`; }, helpText$B = ` Options --detach Start the copy without waiting for it to finish --attach <job-id> Attach to the running copy process to show progress --skip-history Don't preserve document history on copy --list Lists all dataset copy jobs corresponding to a certain criteria. --offset Start position in the list of jobs. Default 0. With --list. --limit Maximum number of jobs returned. Default 10. Maximum 1000. With --list. Examples sanity dataset copy sanity dataset copy <source-dataset> sanity dataset copy <source-dataset> <target-dataset> sanity dataset copy --skip-history <source-dataset> <target-dataset> sanity dataset copy --detach <source-dataset> <target-dataset> sanity dataset copy --attach <job-id> sanity dataset copy --list sanity dataset copy --list --offset=2 sanity dataset copy --list --offset=2 --limit=10 `; function parseCliFlags$3(args) { return yargs(hideBin(args.argv || process.argv).slice(2)).option("attach", { type: "string" }).option("list", { type: "boolean" }).option("limit", { type: "number" }).option("offset", { type: "number" }).option("skip-history", { type: "boolean" }).option("detach", { type: "boolean" }).argv; } const progress = (url2) => new Observable((observer) => { let progressSource = new EventSource(url2), stopped = !1; function onError(error) { progressSource && progressSource.close(), debug$2(`Error received: ${error}`), !stopped && (observer.next({ type: "reconnect" }), progressSource = new EventSource(url2)); } function onChannelError(error) { stopped = !0, progressSource.close(), observer.error(error); } function onMessage(event) { const data = JSON.parse(event.data); data.state === "failed" ? (debug$2("Job failed. Data: %o", event), observer.error(event)) : data.state === "completed" ? (debug$2("Job succeeded. Data: %o", event), onComplete()) : (debug$2("Job progressed. Data: %o", event), observer.next(data)); } function onComplete() { progressSource.removeEventListener("error", onError), progressSource.removeEventListener("channel_error", onChannelError), progressSource.removeEventListener("job", onMessage), progressSource.removeEventListener("done", onComplete), progressSource.close(), observer.complete(); } progressSource.addEventListener("error", onError), progressSource.addEventListener("channel_error", onChannelError), progressSource.addEventListener("job", onMessage), progressSource.addEventListener("done", onComplete); }), followProgress = (jobId, client, output) => { let currentProgress = 0; const spinner = output.spinner({}).start(), listenUrl = getClientUrl(client, `jobs/${jobId}/listen`); return debug$2(`Listening to ${listenUrl}`), new Promise((resolve, reject) => { progress(listenUrl).subscribe({ next: (event) => { typeof event.progress == "number" && (currentProgress = event.progress), spinner.text = `Copy in progress: ${currentProgress}%`; }, error: (err) => { spinner.fail(), reject(new Error(`${err.data}`)); }, complete: () => { spinner.succeed("Copy finished."), resolve(); } }); }); }, copyDatasetCommand = { name: "copy", group: "da