@nuxt/content
Version:
Write your content inside your Nuxt app
1,623 lines (1,606 loc) • 102 kB
JavaScript
import { readFile, writeFile, mkdir, rm, stat } from 'node:fs/promises';
import { tryUseNuxt, resolveAlias, useLogger, isIgnored, updateTemplates, addVitePlugin, useNuxt, resolveModule, installModule, extendViteConfig, addTemplate, hasNuxtModule, addPlugin, addComponent, addServerHandler, addPrerenderRoutes, defineNuxtModule, createResolver, addImports, addServerImports, addTypeTemplate } from '@nuxt/kit';
import { hash } from 'ohash';
import { join, isAbsolute, dirname, extname, relative, resolve, normalize } from 'pathe';
import htmlTags from '@nuxtjs/mdc/runtime/parser/utils/html-tags-list';
import { pascalCase, camelCase, kebabCase } from 'scule';
import defu$1, { defu as defu$2, createDefu } from 'defu';
import { getOrderedSchemaKeys, getCollectionFieldsTypes, describeProperty } from '../dist/runtime/internal/schema.js';
import { withLeadingSlash, withoutTrailingSlash, isRelative, withTrailingSlash } from 'ufo';
import { glob } from 'tinyglobby';
import { createWriteStream } from 'node:fs';
import { pipeline } from 'node:stream';
import { promisify } from 'node:util';
import { extract } from 'tar';
import { readGitConfig } from 'pkg-types';
import gitUrlParse from 'git-url-parse';
import crypto from 'node:crypto';
import chokidar from 'chokidar';
import micromatch from 'micromatch';
import { WebSocketServer } from 'ws';
import { listen } from 'listhen';
import cloudflareD1Connector from 'db0/connectors/cloudflare-d1';
import { isWebContainer } from '@webcontainer/env';
import { addDependency } from 'nypm';
import { createContext } from 'unctx';
import { dirname as dirname$1 } from 'node:path';
import { parseMarkdown, createShikiHighlighter, rehypeHighlight } from '@nuxtjs/mdc/runtime';
import { createJiti } from 'jiti';
import { createOnigurumaEngine } from 'shiki/engine/oniguruma';
import { visit } from 'unist-util-visit';
import { unified } from 'unified';
import { toString } from 'mdast-util-to-string';
import { postprocess, preprocess } from 'micromark';
import { stringifyPosition } from 'unist-util-stringify-position';
import { markdownLineEnding, markdownSpace } from 'micromark-util-character';
import { push, splice } from 'micromark-util-chunked';
import { resolveAll } from 'micromark-util-resolve-all';
import { normalizeUri } from 'micromark-util-sanitize-uri';
import { fromHast } from 'minimark/hast';
import slugify from 'slugify';
import { parseFrontMatter } from 'remark-mdc';
import { destr } from 'destr';
import { gzip } from 'node:zlib';
import { genDynamicImport } from 'knitwork';
import { compile } from 'json-schema-to-typescript';
import { getComponentMeta } from 'nuxt-component-meta/parser';
import { propsToJsonSchema } from 'nuxt-component-meta/utils';
import { createDefineConfig, watchConfig, loadConfig } from 'c12';
import { zodToJsonSchema, ignoreOverride } from 'zod-to-json-schema';
import { z as z$1 } from 'zod';
const version = "3.7.1";
async function downloadRepository(url, cwd, { headers } = {}) {
const tarFile = join(cwd, ".content.clone.tar.gz");
const cacheFile = join(cwd, ".content.cache.json");
const cache = await readFile(cacheFile, "utf8").then((d) => JSON.parse(d)).catch(() => null);
if (cache) {
const response = await fetch(url, { method: "HEAD", headers });
const etag = response.headers.get("etag");
if (etag === cache.etag) {
await writeFile(cacheFile, JSON.stringify({
...cache,
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
}, null, 2));
return;
}
}
await mkdir(cwd, { recursive: true });
try {
const response = await fetch(url, { headers });
const stream = createWriteStream(tarFile);
await promisify(pipeline)(response.body, stream);
await extract({
file: tarFile,
cwd,
onentry(entry) {
entry.path = entry.path.split("/").splice(1).join("/");
}
});
await writeFile(cacheFile, JSON.stringify({
url,
etag: response.headers.get("etag"),
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
}, null, 2));
} finally {
await rm(tarFile, { force: true });
}
}
function parseGitHubUrl(url) {
const regex = /https:\/\/github\.com\/([^/]+)\/([^/]+)(?:\/tree\/([^/]+))?(?:\/(.+))?/;
const match = url.match(regex);
if (match) {
const org = match[1];
const repo = match[2];
let branch = match[3] || "main";
let path = match[4] || "";
if (["fix", "feat", "chore", "test", "docs"].includes(branch)) {
const pathParts = path.split("/");
branch = join(branch, pathParts[0] || "");
path = pathParts.slice(1).join("/");
}
return {
org,
repo,
branch,
path
};
}
return null;
}
function parseBitBucketUrl(url) {
const bitbucketRegex = /https:\/\/bitbucket\.org\/([^/]+)\/([^/]+)(?:\/src\/([^/]+))?(?:\/(.+))?/;
const bitbucketMatch = url.match(bitbucketRegex);
if (bitbucketMatch) {
const org = bitbucketMatch[1];
const repo = bitbucketMatch[2];
let branch = bitbucketMatch[3] || "main";
let path = bitbucketMatch[4] || "";
if (["fix", "feat", "chore", "test", "docs"].includes(branch)) {
const pathParts = path.split("/");
branch = join(branch, pathParts[0] || "");
path = pathParts.slice(1).join("/");
}
return {
org,
repo,
branch,
path
};
}
return null;
}
async function getLocalGitInfo(rootDir) {
const remote = await getLocalGitRemote(rootDir);
if (!remote) {
return;
}
const { name, owner, source } = gitUrlParse(remote);
const url = `https://${source}/${owner}/${name}`;
return {
name,
owner,
url
};
}
function getGitEnv() {
const envInfo = {
// Provider
provider: process.env.VERCEL_GIT_PROVIDER || (process.env.GITHUB_SERVER_URL ? "github" : void 0) || "",
// Owner
owner: process.env.VERCEL_GIT_REPO_OWNER || process.env.GITHUB_REPOSITORY_OWNER || process.env.CI_PROJECT_PATH?.split("/").shift() || "",
// Name
name: process.env.VERCEL_GIT_REPO_SLUG || process.env.GITHUB_REPOSITORY?.split("/").pop() || process.env.CI_PROJECT_PATH?.split("/").splice(1).join("/") || "",
// Url
url: process.env.REPOSITORY_URL || ""
// netlify
};
if (!envInfo.url && envInfo.provider && envInfo.owner && envInfo.name) {
envInfo.url = `https://${envInfo.provider}.com/${envInfo.owner}/${envInfo.name}`;
}
if (!envInfo.name && !envInfo.owner && envInfo.url) {
try {
const { name, owner } = gitUrlParse(envInfo.url);
envInfo.name = name;
envInfo.owner = owner;
} catch {
}
}
return {
name: envInfo.name,
owner: envInfo.owner,
url: envInfo.url
};
}
async function getLocalGitRemote(dir) {
try {
const parsed = await readGitConfig(dir);
if (!parsed) {
return;
}
return parsed.remote?.["origin"]?.url;
} catch {
}
}
const nuxtContentContext = {
zod3: {
toJSONSchema: (_schema, _name) => {
throw new Error(
"It seems you are using Zod version 3 for collection schema, but Zod is not installed, Nuxt Content does not ship with zod, install `zod` and `zod-to-json-schema` and it will work."
);
}
},
zod4: {
toJSONSchema: (_schema, _name) => {
throw new Error(
"It seems you are using Zod version 4 for collection schema, but Zod is not installed, Nuxt Content does not ship with zod, install `zod` and it will work."
);
}
},
valibot: {
toJSONSchema: (_schema, _name) => {
throw new Error(
"It seems you are using Valibot for collection schema, but Valibot is not installed, Nuxt Content does not ship with valibot, install `valibot` and `@valibot/to-json-schema` and it will work."
);
}
},
unknown: {
toJSONSchema: (_schema, _name) => {
throw new Error("Unknown schema vendor");
}
},
set: (key, value) => {
nuxtContentContext[key] = value;
},
get: (key) => {
return nuxtContentContext[key];
}
};
const ctx = createContext();
ctx.set(nuxtContentContext);
const nuxtContentContext$1 = ctx.use;
async function isPackageInstalled(packageName) {
try {
await import(packageName);
return true;
} catch {
return false;
}
}
async function ensurePackageInstalled(pkg) {
if (!await isPackageInstalled(pkg)) {
logger.error(`Nuxt Content requires \`${pkg}\` module to operate.`);
const confirm = await logger.prompt(`Do you want to install \`${pkg}\` package?`, {
type: "confirm",
name: "confirm",
initial: true
});
if (!confirm) {
logger.error(`Nuxt Content requires \`${pkg}\` module to operate. Please install \`${pkg}\` package manually and try again. \`npm install ${pkg}\``);
process.exit(1);
}
await addDependency(pkg, {
cwd: tryUseNuxt()?.options.rootDir
});
}
}
function isNodeSqliteAvailable() {
try {
const module = globalThis.process?.getBuiltinModule?.("node:sqlite");
if (module) {
const originalEmit = process.emit;
process.emit = function(...args) {
const name = args[0];
const data = args[1];
if (name === `warning` && typeof data === `object` && data.name === `ExperimentalWarning` && data.message.includes(`SQLite is an experimental feature`)) {
return false;
}
return originalEmit.apply(process, args);
};
return true;
}
return false;
} catch {
return false;
}
}
async function initiateValidatorsContext() {
if (await isPackageInstalled("valibot") && await isPackageInstalled("@valibot/to-json-schema")) {
nuxtContentContext$1().set("valibot", await import('./chunks/valibot.mjs'));
}
if (await isPackageInstalled("zod")) {
nuxtContentContext$1().set("zod3", await Promise.resolve().then(function () { return zod3; }));
nuxtContentContext$1().set("zod4", await import('./chunks/zod4.mjs'));
}
}
const databaseVersion = "v3.5.0";
async function refineDatabaseConfig(database, opts) {
if (database.type === "d1") {
if (!("bindingName" in database)) {
database.bindingName = database.binding;
}
}
if (database.type === "sqlite") {
const path = isAbsolute(database.filename) ? database.filename : join(opts.rootDir, database.filename);
await mkdir(dirname(path), { recursive: true }).catch(() => {
});
if (opts.updateSqliteFileName) {
database.filename = path;
}
}
}
async function resolveDatabaseAdapter(adapter, opts) {
const databaseConnectors = {
nodesqlite: "db0/connectors/node-sqlite",
bunsqlite: opts.resolver.resolve("./runtime/internal/connectors/bun-sqlite"),
postgres: "db0/connectors/postgresql",
libsql: "db0/connectors/libsql/web",
d1: "db0/connectors/cloudflare-d1"
};
adapter = adapter || "sqlite";
if (adapter === "sqlite" && process.versions.bun) {
return databaseConnectors.bunsqlite;
}
if (adapter === "sqlite") {
return await findBestSqliteAdapter({ sqliteConnector: opts.sqliteConnector, resolver: opts.resolver });
}
return databaseConnectors[adapter];
}
async function getDatabase(database, opts) {
if (database.type === "d1") {
return cloudflareD1Connector({ bindingName: database.bindingName });
}
return import(await findBestSqliteAdapter(opts)).then((m) => {
const connector = m.default || m;
return connector({ path: database.filename });
});
}
const _localDatabase = {};
async function getLocalDatabase(database, { connector, sqliteConnector } = {}) {
const databaseLocation = database.type === "sqlite" ? database.filename : database.bindingName;
const db = _localDatabase[databaseLocation] || connector || await getDatabase(database, { sqliteConnector });
const cacheCollection = {
tableName: "_development_cache",
extendedSchema: {
$schema: "http://json-schema.org/draft-07/schema#",
$ref: "#/definitions/cache",
definitions: {
cache: {
type: "object",
properties: {
id: { type: "string" },
value: { type: "string" },
checksum: { type: "string" }
},
required: ["id", "value", "checksum"]
}
}
}};
if (!_localDatabase[databaseLocation]) {
_localDatabase[databaseLocation] = db;
let dropCacheTable = false;
try {
dropCacheTable = await db.prepare("SELECT * FROM _development_cache WHERE id = ?").get("__DATABASE_VERSION__").then((row) => row?.value !== databaseVersion);
} catch {
dropCacheTable = true;
}
const initQueries = generateCollectionTableDefinition(cacheCollection, { drop: Boolean(dropCacheTable) });
for (const query of initQueries.split("\n")) {
await db.exec(query);
}
if (dropCacheTable) {
await db.exec(generateCollectionInsert(cacheCollection, { id: "__DATABASE_VERSION__", value: databaseVersion, checksum: databaseVersion }).queries[0]);
}
}
const fetchDevelopmentCache = async () => {
const result = await db.prepare("SELECT * FROM _development_cache").all();
return result.reduce((acc, cur) => ({ ...acc, [cur.id]: cur }), {});
};
const fetchDevelopmentCacheForKey = async (id) => {
return await db.prepare("SELECT * FROM _development_cache WHERE id = ?").get(id);
};
const insertDevelopmentCache = async (id, value, checksum) => {
deleteDevelopmentCache(id);
const insert = generateCollectionInsert(cacheCollection, { id, value, checksum });
for (const query of insert.queries) {
await db.exec(query);
}
};
const deleteDevelopmentCache = async (id) => {
db.prepare(`DELETE FROM _development_cache WHERE id = ?`).run(id);
};
const dropContentTables = async () => {
const tables = await db.prepare("SELECT name FROM sqlite_master WHERE type = ? AND name LIKE ?").all("table", "_content_%");
for (const { name } of tables) {
db.exec(`DROP TABLE ${name}`);
}
};
return {
database: db,
async exec(sql) {
db.exec(sql);
},
close() {
Reflect.deleteProperty(_localDatabase, databaseLocation);
},
fetchDevelopmentCache,
fetchDevelopmentCacheForKey,
insertDevelopmentCache,
deleteDevelopmentCache,
dropContentTables
};
}
async function findBestSqliteAdapter(opts) {
if (process.versions.bun) {
return opts.resolver ? opts.resolver.resolve("./runtime/internal/connectors/bun-sqlite") : "db0/connectors/bun-sqlite";
}
if (opts.sqliteConnector === "native" && isNodeSqliteAvailable()) {
return opts.resolver ? opts.resolver.resolve("./runtime/internal/connectors/node-sqlite") : "db0/connectors/node-sqlite";
}
if (opts.sqliteConnector === "sqlite3") {
return "db0/connectors/sqlite3";
}
if (opts.sqliteConnector === "better-sqlite3") {
await ensurePackageInstalled("better-sqlite3");
return "db0/connectors/better-sqlite3";
}
if (isWebContainer()) {
await ensurePackageInstalled("sqlite3");
return "db0/connectors/sqlite3";
}
await ensurePackageInstalled("better-sqlite3");
return "db0/connectors/better-sqlite3";
}
const defineTransformer = (transformer) => {
return transformer;
};
function createTokenizer(parser, initialize, from) {
let point = Object.assign(
{
line: 1,
column: 1,
offset: 0
},
{
_index: 0,
_bufferIndex: -1
}
);
const columnStart = {};
const resolveAllConstructs = [];
let chunks = [];
let stack = [];
const effects = {
consume,
enter,
exit,
attempt: constructFactory(onsuccessfulconstruct),
check: constructFactory(onsuccessfulcheck),
interrupt: constructFactory(onsuccessfulcheck, {
interrupt: true
})
};
const context = {
previous: null,
code: null,
containerState: {},
events: [],
parser,
sliceStream,
sliceSerialize,
now,
defineSkip,
write
};
let state = initialize.tokenize.call(context, effects);
if (initialize.resolveAll) {
resolveAllConstructs.push(initialize);
}
return context;
function write(slice) {
chunks = push(chunks, slice);
main();
if (chunks[chunks.length - 1] !== null) {
return [];
}
addResult(initialize, 0);
context.events = resolveAll(resolveAllConstructs, context.events, context);
return context.events;
}
function sliceSerialize(token, expandTabs) {
return serializeChunks(sliceStream(token), expandTabs);
}
function sliceStream(token) {
return sliceChunks(chunks, token);
}
function now() {
return Object.assign({}, point);
}
function defineSkip(value) {
columnStart[value.line] = value.column;
accountForPotentialSkip();
}
function main() {
let chunkIndex;
while (point._index < chunks.length) {
const chunk = chunks[point._index];
if (typeof chunk === "string") {
chunkIndex = point._index;
if (point._bufferIndex < 0) {
point._bufferIndex = 0;
}
while (point._index === chunkIndex && point._bufferIndex < chunk.length) {
go(chunk.charCodeAt(point._bufferIndex));
}
} else {
go(chunk);
}
}
}
function go(code) {
state = state(code);
}
function consume(code) {
if (markdownLineEnding(code)) {
point.line++;
point.column = 1;
point.offset += code === -3 ? 2 : 1;
accountForPotentialSkip();
} else if (code !== -1) {
point.column++;
point.offset++;
}
if (point._bufferIndex < 0) {
point._index++;
} else {
point._bufferIndex++;
if (point._bufferIndex === chunks[point._index].length) {
point._bufferIndex = -1;
point._index++;
}
}
context.previous = code;
}
function enter(type, fields) {
const token = fields || {};
token.type = type;
token.start = now();
context.events.push(["enter", token, context]);
stack.push(token);
return token;
}
function exit(type) {
const token = stack.pop();
token.end = now();
context.events.push(["exit", token, context]);
return token;
}
function onsuccessfulconstruct(construct, info) {
addResult(construct, info.from);
}
function onsuccessfulcheck(_, info) {
info.restore();
}
function constructFactory(onreturn, fields) {
return hook;
function hook(constructs, returnState, bogusState) {
let listOfConstructs;
let constructIndex;
let currentConstruct;
let info;
return Array.isArray(constructs) ? (
/* c8 ignore next 1 */
handleListOfConstructs(constructs)
) : "tokenize" in constructs ? handleListOfConstructs([constructs]) : handleMapOfConstructs(constructs);
function handleMapOfConstructs(map) {
return start;
function start(code) {
const def = code !== null && map[code];
const all = code !== null && map.null;
const list = [
// To do: add more extension tests.
/* c8 ignore next 2 */
...Array.isArray(def) ? def : def ? [def] : [],
...Array.isArray(all) ? all : all ? [all] : []
];
return handleListOfConstructs(list)(code);
}
}
function handleListOfConstructs(list) {
listOfConstructs = list;
constructIndex = 0;
if (list.length === 0) {
return bogusState;
}
return handleConstruct(list[constructIndex]);
}
function handleConstruct(construct) {
return start;
function start(code) {
info = store();
currentConstruct = construct;
if (!construct.partial) {
context.currentConstruct = construct;
}
if (construct.name && context.parser.constructs.disable.null.includes(construct.name)) {
return nok();
}
return construct.tokenize.call(
// If we do have fields, create an object w/ `context` as its
// prototype.
// This allows a “live binding”, which is needed for `interrupt`.
fields ? Object.assign(Object.create(context), fields) : context,
effects,
ok,
nok
)(code);
}
}
function ok(code) {
onreturn(currentConstruct, info);
return returnState;
}
function nok(code) {
info.restore();
if (++constructIndex < listOfConstructs.length) {
return handleConstruct(listOfConstructs[constructIndex]);
}
return bogusState;
}
}
}
function addResult(construct, from2) {
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
resolveAllConstructs.push(construct);
}
if (construct.resolve) {
splice(
context.events,
from2,
context.events.length - from2,
construct.resolve(context.events.slice(from2), context)
);
}
if (construct.resolveTo) {
context.events = construct.resolveTo(context.events, context);
}
}
function store() {
const startPoint = now();
const startPrevious = context.previous;
const startCurrentConstruct = context.currentConstruct;
const startEventsIndex = context.events.length;
const startStack = Array.from(stack);
return {
restore,
from: startEventsIndex
};
function restore() {
point = startPoint;
context.previous = startPrevious;
context.currentConstruct = startCurrentConstruct;
context.events.length = startEventsIndex;
stack = startStack;
accountForPotentialSkip();
}
}
function accountForPotentialSkip() {
if (point.line in columnStart && point.column < 2) {
point.column = columnStart[point.line];
point.offset += columnStart[point.line] - 1;
}
}
}
function sliceChunks(chunks, token) {
const startIndex = token.start._index;
const startBufferIndex = token.start._bufferIndex;
const endIndex = token.end._index;
const endBufferIndex = token.end._bufferIndex;
let view;
if (startIndex === endIndex) {
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)];
} else {
view = chunks.slice(startIndex, endIndex);
if (startBufferIndex > -1) {
view[0] = view[0].slice(startBufferIndex);
}
if (endBufferIndex > 0) {
view.push(chunks[endIndex].slice(0, endBufferIndex));
}
}
return view;
}
function serializeChunks(chunks, expandTabs) {
let index = -1;
const result = [];
let atTab;
while (++index < chunks.length) {
const chunk = chunks[index];
let value;
if (typeof chunk === "string") {
value = chunk;
} else
switch (chunk) {
case -5: {
value = "\r";
break;
}
case -4: {
value = "\n";
break;
}
case -3: {
value = "\r\n";
break;
}
case -2: {
value = expandTabs ? " " : " ";
break;
}
case -1: {
if (!expandTabs && atTab) continue;
value = " ";
break;
}
default: {
value = String.fromCharCode(chunk);
}
}
atTab = chunk === -2;
result.push(value);
}
return result.join("");
}
function initializeDocument(effects) {
const self = this;
const delimiter = (this.parser.delimiter || ",").charCodeAt(0);
return enterRow;
function enterRow(code) {
return effects.attempt(
{ tokenize: attemptLastLine },
(code2) => {
effects.consume(code2);
return enterRow;
},
(code2) => {
effects.enter("row");
return enterColumn(code2);
}
)(code);
}
function enterColumn(code) {
effects.enter("column");
return content(code);
}
function content(code) {
if (code === null) {
effects.exit("column");
effects.exit("row");
effects.consume(code);
return content;
}
if (code === 34) {
return quotedData(code);
}
if (code === delimiter) {
if (self.previous === delimiter || markdownLineEnding(self.previous) || self.previous === null) {
effects.enter("data");
effects.exit("data");
}
effects.exit("column");
effects.enter("columnSeparator");
effects.consume(code);
effects.exit("columnSeparator");
effects.enter("column");
return content;
}
if (markdownLineEnding(code)) {
effects.exit("column");
effects.enter("newline");
effects.consume(code);
effects.exit("newline");
effects.exit("row");
return enterRow;
}
return data(code);
}
function data(code) {
effects.enter("data");
return dataChunk(code);
}
function dataChunk(code) {
if (code === null || markdownLineEnding(code) || code === delimiter) {
effects.exit("data");
return content(code);
}
if (code === 92) {
return escapeCharacter(code);
}
effects.consume(code);
return dataChunk;
}
function escapeCharacter(code) {
effects.consume(code);
return function(code2) {
effects.consume(code2);
return content;
};
}
function quotedData(code) {
effects.enter("quotedData");
effects.enter("quotedDataChunk");
effects.consume(code);
return quotedDataChunk;
}
function quotedDataChunk(code) {
if (code === 92) {
return escapeCharacter(code);
}
if (code === 34) {
return effects.attempt(
{ tokenize: attemptDoubleQuote },
(code2) => {
effects.exit("quotedDataChunk");
effects.enter("quotedDataChunk");
return quotedDataChunk(code2);
},
(code2) => {
effects.consume(code2);
effects.exit("quotedDataChunk");
effects.exit("quotedData");
return content;
}
)(code);
}
effects.consume(code);
return quotedDataChunk;
}
}
function attemptDoubleQuote(effects, ok, nok) {
return startSequence;
function startSequence(code) {
if (code !== 34) {
return nok(code);
}
effects.enter("quoteFence");
effects.consume(code);
return sequence;
}
function sequence(code) {
if (code !== 34) {
return nok(code);
}
effects.consume(code);
effects.exit("quoteFence");
return (code2) => ok(code2);
}
}
function attemptLastLine(effects, ok, nok) {
return enterLine;
function enterLine(code) {
if (!markdownSpace(code) && code !== null) {
return nok(code);
}
effects.enter("emptyLine");
return continueLine(code);
}
function continueLine(code) {
if (markdownSpace(code)) {
effects.consume(code);
return continueLine;
}
if (code === null) {
effects.exit("emptyLine");
return ok(code);
}
return nok(code);
}
}
const parse = (options) => {
return createTokenizer(
{ ...options },
{ tokenize: initializeDocument });
};
const own = {}.hasOwnProperty;
const initialPoint = {
line: 1,
column: 1,
offset: 0
};
const fromCSV = function(value, encoding, options) {
if (typeof encoding !== "string") {
options = encoding;
encoding = void 0;
}
return compiler()(
postprocess(
parse(options).write(preprocess()(value, encoding, true))
)
);
};
function compiler() {
const config = {
enter: {
column: opener(openColumn),
row: opener(openRow),
data: onenterdata,
quotedData: onenterdata
},
exit: {
row: closer(),
column: closer(),
data: onexitdata,
quotedData: onexitQuotedData
}
};
return compile;
function compile(events) {
const tree = {
type: "root",
children: []
};
const stack = [tree];
const tokenStack = [];
const context = {
stack,
tokenStack,
config,
enter,
exit,
resume
};
let index = -1;
while (++index < events.length) {
const handler = config[events[index][0]];
if (own.call(handler, events[index][1].type)) {
handler[events[index][1].type].call(
Object.assign(
{
sliceSerialize: events[index][2].sliceSerialize
},
context
),
events[index][1]
);
}
}
if (tokenStack.length > 0) {
const tail = tokenStack[tokenStack.length - 1];
const handler = tail[1] || defaultOnError;
handler.call(context, void 0, tail[0]);
}
tree.position = {
start: point(
events.length > 0 ? events[0][1].start : initialPoint
),
end: point(
events.length > 0 ? events[events.length - 2][1].end : initialPoint
)
};
return tree;
}
function point(d) {
return {
line: d.line,
column: d.column,
offset: d.offset
};
}
function opener(create, and) {
return open;
function open(token) {
enter.call(this, create(token), token);
}
}
function enter(node, token, errorHandler) {
const parent = this.stack[this.stack.length - 1];
parent.children.push(node);
this.stack.push(node);
this.tokenStack.push([token, errorHandler]);
node.position = {
start: point(token.start)
};
return node;
}
function closer(and) {
return close;
function close(token) {
exit.call(this, token);
}
}
function exit(token, onExitError) {
const node = this.stack.pop();
const open = this.tokenStack.pop();
if (!open) {
throw new Error(
"Cannot close `" + token.type + "` (" + stringifyPosition({
start: token.start,
end: token.end
}) + "): it\u2019s not open"
);
} else if (open[0].type !== token.type) {
if (onExitError) {
onExitError.call(this, token, open[0]);
} else {
const handler = open[1] || defaultOnError;
handler.call(this, token, open[0]);
}
}
node.position.end = point(token.end);
return node;
}
function resume() {
return toString(this.stack.pop());
}
function onenterdata(token) {
const parent = this.stack[this.stack.length - 1];
let tail = parent.children[parent.children.length - 1];
if (!tail || tail.type !== "text") {
tail = text();
tail.position = {
start: point(token.start)
};
parent.children.push(tail);
}
this.stack.push(tail);
}
function onexitdata(token) {
const tail = this.stack.pop();
tail.value += this.sliceSerialize(token).trim().replace(/""/g, '"');
tail.position.end = point(token.end);
}
function onexitQuotedData(token) {
const tail = this.stack.pop();
const value = this.sliceSerialize(token);
tail.value += this.sliceSerialize(token).trim().substring(1, value.length - 1).replace(/""/g, '"');
tail.position.end = point(token.end);
}
function text() {
return {
type: "text",
value: ""
};
}
function openColumn() {
return {
type: "column",
children: []
};
}
function openRow() {
return {
type: "row",
children: []
};
}
}
function defaultOnError(left, right) {
if (left) {
throw new Error([
"Cannot close `",
left.type,
"` (",
stringifyPosition({
start: left.start,
end: left.end
}),
"): a different token (`",
right.type,
"`, ",
stringifyPosition({
start: right.start,
end: right.end
}),
") is open"
].join(""));
} else {
throw new Error(
"Cannot close document, a token (`" + right.type + "`, " + stringifyPosition({
start: right.start,
end: right.end
}) + ") is still open"
);
}
}
function csvParse(options) {
const parser = (doc) => {
return fromCSV(doc, options);
};
Object.assign(this, { Parser: parser });
const toJsonObject = (tree) => {
const [header, ...rows] = tree.children;
const columns = header.children.map((col) => col.children[0].value);
const data = rows.map((row) => {
return row.children.reduce((acc, col, i) => {
acc[String(columns[i])] = col.children[0]?.value;
return acc;
}, {});
});
return data;
};
const toJsonArray = (tree) => {
const data = tree.children.map((row) => {
return row.children.map((col) => col.children[0]?.value);
});
return data;
};
const compiler = (doc) => {
if (options.json) {
return toJsonObject(doc);
}
return toJsonArray(doc);
};
Object.assign(this, { Compiler: compiler });
}
const csv = defineTransformer({
name: "csv",
extensions: [".csv"],
parse: async (file, options = {}) => {
const stream = unified().use(csvParse, {
delimiter: ",",
json: true,
...options
});
const { result } = await stream.process(file.body);
return {
id: file.id,
body: result
};
}
});
const SEMVER_REGEX = /^\d+(?:\.\d+)*(?:\.x)?$/;
const defaultOptions = {
slugifyOptions: {
lower: true
}
};
const pathMeta = defineTransformer({
name: "path-meta",
extensions: [".*"],
transform(content, options = {}) {
const opts = defu$1(options, defaultOptions);
const { basename, extension, stem } = describeId(content.id);
const filePath = generatePath(stem, opts);
return {
path: filePath,
...content,
title: content.title || generateTitle(refineUrlPart(basename)),
stem,
extension
};
}
});
const generatePath = (path, { forceLeadingSlash = true, slugifyOptions = {} } = {}) => {
path = path.split("/").map((part) => slugify(refineUrlPart(part), slugifyOptions)).join("/");
return forceLeadingSlash ? withLeadingSlash(withoutTrailingSlash(path)) : path;
};
const generateTitle = (path) => path.split(/[\s-]/g).map(pascalCase).join(" ");
function refineUrlPart(name) {
name = name.split(/[/:]/).pop();
if (SEMVER_REGEX.test(name)) {
return name;
}
return name.replace(/(\d+\.)?(.*)/, "$2").replace(/^index(\.draft)?$/, "").replace(/\.draft$/, "");
}
const describeId = (id) => {
const [source, ...parts] = id.split(/[:/]/);
const [, basename, extension] = parts[parts.length - 1]?.match(/(.*)\.([^.]+)$/) || [];
if (basename) {
parts[parts.length - 1] = basename;
}
const stem = (parts || []).join("/");
return {
source,
stem,
extension,
basename: basename || ""
};
};
const markdown = defineTransformer({
name: "markdown",
extensions: [".md"],
parse: async (file, options = {}) => {
const config = { ...options };
config.rehypePlugins = await importPlugins(config.rehypePlugins);
config.remarkPlugins = await importPlugins(config.remarkPlugins);
const highlight = options.highlight ? {
...options.highlight,
// Pass only when it's an function. String values are handled by `@nuxtjs/mdc`
highlighter: typeof options.highlight?.highlighter === "function" ? options.highlight.highlighter : void 0
} : void 0;
const parsed = await parseMarkdown(file.body, {
...config,
highlight,
toc: config.toc,
remark: { plugins: config.remarkPlugins },
rehype: {
plugins: config.rehypePlugins,
options: { handlers: { link } }
}
}, {
fileOptions: file
});
if (options.compress) {
return {
...parsed.data,
excerpt: parsed.excerpt ? fromHast(parsed.excerpt) : void 0,
body: {
...fromHast(parsed.body),
toc: parsed.toc
},
id: file.id
};
}
return {
...parsed.data,
excerpt: parsed.excerpt,
body: {
...parsed.body,
toc: parsed.toc
},
id: file.id
};
}
});
async function importPlugins(plugins = {}) {
const resolvedPlugins = {};
for (const [name, plugin] of Object.entries(plugins)) {
if (plugin) {
resolvedPlugins[name] = {
instance: plugin.instance || await import(
/* @vite-ignore */
name
).then((m) => m.default || m),
options: plugin.options || {}
};
} else {
resolvedPlugins[name] = false;
}
}
return resolvedPlugins;
}
function link(state, node) {
const properties = {
...node.attributes || {},
href: normalizeUri(normaliseLink(node.url))
};
if (node.title !== null && node.title !== void 0) {
properties.title = node.title;
}
const result = {
type: "element",
tagName: "a",
properties,
children: state.all(node)
};
state.patch(node, result);
return state.applyData(node, result);
}
function normaliseLink(link2) {
const match = link2.match(/#.+$/);
const hash = match ? match[0] : "";
if (link2.replace(/#.+$/, "").endsWith(".md") && (isRelative(link2) || !/^https?/.test(link2) && !link2.startsWith("/"))) {
return generatePath(link2.replace(".md" + hash, ""), { forceLeadingSlash: false }) + hash;
} else {
return link2;
}
}
const yaml = defineTransformer({
name: "Yaml",
extensions: [".yml", ".yaml"],
parse: (file) => {
const { id, body } = file;
const { data } = parseFrontMatter(`---
${body}
---`);
let parsed = data;
if (Array.isArray(data)) {
console.warn(`YAML array is not supported in ${id}, moving the array into the \`body\` key`);
parsed = { body: data };
}
return {
...parsed,
body: parsed.body || parsed,
id
};
}
});
const json = defineTransformer({
name: "Json",
extensions: [".json"],
parse: async (file) => {
const { id, body } = file;
let parsed;
if (typeof body === "string") {
parsed = destr(body);
} else {
parsed = body;
}
if (Array.isArray(parsed)) {
console.warn(`JSON array is not supported in ${id}, moving the array into the \`body\` key`);
parsed = {
body: parsed
};
}
return {
...parsed,
body: parsed.body || parsed,
id
};
}
});
const TRANSFORMERS = [
csv,
markdown,
json,
yaml,
pathMeta
];
function getParser(ext, additionalTransformers = []) {
let parser = additionalTransformers.find((p) => ext.match(new RegExp(p.extensions.join("|"), "i")) && p.parse);
if (!parser) {
parser = TRANSFORMERS.find((p) => ext.match(new RegExp(p.extensions.join("|"), "i")) && p.parse);
}
return parser;
}
function getTransformers(ext, additionalTransformers = []) {
return [
...additionalTransformers.filter((p) => ext.match(new RegExp(p.extensions.join("|"), "i")) && p.transform),
...TRANSFORMERS.filter((p) => ext.match(new RegExp(p.extensions.join("|"), "i")) && p.transform)
];
}
async function transformContent(file, options = {}) {
const { transformers = [] } = options;
const ext = file.extension || extname(file.id);
const parser = getParser(ext, transformers);
if (!parser) {
throw new Error(`\`${ext}\` files are not supported.`);
}
const parserOptions = options[camelCase(parser.name)] || {};
const parsed = await parser.parse(file, parserOptions);
const matchedTransformers = getTransformers(ext, transformers);
const result = await matchedTransformers.reduce(async (prev, cur) => {
const next = await prev || parsed;
const transformOptions = options[camelCase(cur.name)];
if (transformOptions === false) {
return next;
}
return cur.transform(next, transformOptions || {});
}, Promise.resolve(parsed));
return result;
}
let parserOptions = {
mdcConfigs: []
};
function setParserOptions(opts) {
parserOptions = defu$2(opts, parserOptions);
}
let highlightPlugin;
let highlightPluginPromise;
async function getHighlightPluginInstance(options) {
const key = hash(JSON.stringify(options || {}));
if (highlightPlugin && highlightPlugin.key !== key) {
highlightPlugin = void 0;
highlightPluginPromise = void 0;
}
if (!highlightPlugin) {
highlightPluginPromise = highlightPluginPromise || _getHighlightPlugin(key, options);
await highlightPluginPromise;
}
return highlightPlugin;
}
async function _getHighlightPlugin(key, options) {
const langs = Array.from(/* @__PURE__ */ new Set(["bash", "html", "mdc", "vue", "yml", "scss", "ts", "typescript", ...options.langs || []]));
const themesObject = typeof options.theme === "string" ? { default: options.theme } : options.theme || { default: "material-theme-palenight" };
const bundledThemes = await Promise.all(Object.entries(themesObject).map(async ([name, theme]) => [
name,
typeof theme === "string" ? await import(`shiki/themes/${theme}.mjs`).then((m) => m.default || m) : theme
]));
const bundledLangs = await Promise.all(langs.map(async (lang) => [
typeof lang === "string" ? lang : lang.name,
typeof lang === "string" ? await import(`@shikijs/langs/${lang}`).then((m) => m.default || m) : lang
]));
const highlighter = createShikiHighlighter({
bundledThemes: Object.fromEntries(bundledThemes),
// Configure the bundled languages
bundledLangs: Object.fromEntries(bundledLangs),
engine: createOnigurumaEngine(import('shiki/wasm')),
getMdcConfigs: () => Promise.resolve(parserOptions.mdcConfigs)
});
highlightPlugin = {
key,
instance: rehypeHighlight,
...options,
options: {
highlighter: async (code, lang, theme, opts) => {
const result = await highlighter(code, lang, theme, opts);
const visitTree = {
type: "element",
children: result.tree
};
if (options.compress) {
const stylesMap = {};
visit(
visitTree,
(node) => !!node.properties?.style,
(_node) => {
const node = _node;
const style = node.properties.style;
stylesMap[style] = stylesMap[style] || "s" + hash(style).substring(0, 4);
node.properties.class = `${node.properties.class || ""} ${stylesMap[style]}`.trim();
node.properties.style = void 0;
}
);
result.style = Object.entries(stylesMap).map(([style, cls]) => `html pre.shiki code .${cls}, html code.shiki .${cls}{${style}}`).join("") + result.style;
}
return result;
},
theme: Object.fromEntries(bundledThemes)
}
};
return highlightPlugin;
}
async function createParser(collection, nuxt) {
const nuxtOptions = nuxt?.options;
const mdcOptions = nuxtOptions?.mdc || {};
const { pathMeta = {}, markdown = {}, transformers = [] } = nuxtOptions?.content?.build || {};
const rehypeHighlightPlugin = markdown.highlight !== false ? await getHighlightPluginInstance(defu$2(markdown.highlight, mdcOptions.highlight, { compress: true })) : void 0;
let extraTransformers = [];
if (nuxt?.options?.rootDir) {
const jiti = createJiti(nuxt.options.rootDir);
extraTransformers = await Promise.all(transformers.map(async (transformer) => {
const resolved = resolveAlias(transformer, nuxt?.options?.alias);
return jiti.import(resolved).then((m) => m.default || m).catch((e) => {
logger.error(`Failed to load transformer ${transformer}`, e);
return false;
});
})).then((transformers2) => transformers2.filter(Boolean));
}
const parserOptions2 = {
pathMeta,
markdown: {
compress: true,
...mdcOptions,
...markdown,
rehypePlugins: {
...mdcOptions?.rehypePlugins,
...markdown?.rehypePlugins,
// keep highlight plugin last to avoid conflict with other code plugins like `rehype-katex`
highlight: rehypeHighlightPlugin
},
remarkPlugins: {
"remark-emoji": {},
...mdcOptions?.remarkPlugins,
...markdown?.remarkPlugins
},
highlight: void 0
}
};
return async function parse(file) {
if (file.path) {
file.dirname = file.dirname ?? dirname$1(file.path);
file.extension = file.extension ?? file.path.includes(".") ? "." + file.path.split(".").pop() : void 0;
}
if (String(file.body).includes("\r\n")) {
file.body = file.body.replace(/\r\n/g, "\n");
}
const beforeParseCtx = { file, collection, parserOptions: parserOptions2 };
await nuxt?.callHook?.("content:file:beforeParse", beforeParseCtx);
const { file: hookedFile } = beforeParseCtx;
const parsedContent = await transformContent(hookedFile, {
...beforeParseCtx.parserOptions,
transformers: extraTransformers
});
const { id, __metadata, ...parsedContentFields } = parsedContent;
const result = { id };
const meta = {};
const collectionKeys = getOrderedSchemaKeys(collection.extendedSchema);
for (const key of Object.keys(parsedContentFields)) {
if (collectionKeys.includes(key)) {
result[key] = parsedContent[key];
} else {
meta[key] = parsedContent[key];
}
}
result.meta = meta;
result.__metadata = __metadata || {};
if (collectionKeys.includes("rawbody")) {
result.rawbody = result.rawbody ?? file.body;
}
if (collectionKeys.includes("seo")) {
const seo = result.seo = result.seo || {};
seo.title = seo.title || result.title;
seo.description = seo.description || result.description;
}
const afterParseCtx = { file: hookedFile, content: result, collection };
await nuxt?.callHook?.("content:file:afterParse", afterParseCtx);
return afterParseCtx.content;
};
}
const compress = (text) => {
return new Promise((resolve, reject) => gzip(text, (err, buff) => {
if (err) {
return reject(err);
}
return resolve(buff?.toString("base64"));
}));
};
function indentLines(str, indent = 2) {
return str.replace(/ {4}/g, " ".repeat(indent)).split("\n").map((line) => " ".repeat(indent) + line).join("\n");
}
const moduleTemplates = {
types: "content/types.d.ts",
preview: "content/preview.mjs",
manifest: "content/manifest.ts",
components: "content/components.ts",
fullCompressedDump: "content/database.compressed.mjs",
fullRawDump: "content/sql_dump.txt"
};
const contentTypesTemplate = (collections) => ({
filename: moduleTemplates.types,
getContents: async ({ options }) => {
const publicCollections = options.collections.filter((c) => !c.private);
const pagesCollections = publicCollections.filter((c) => c.type === "page");
const parentInterface = (c) => c.type === "page" ? "PageCollectionItemBase" : "DataCollectionItemBase";
return [
"import type { PageCollectionItemBase, DataCollectionItemBase } from '@nuxt/content'",
"",
"declare module '@nuxt/content' {",
...await Promise.all(
publicCollections.map(async (c) => {
const type = await compile(c.schema, "CLASS").then((code) => code.replace("export interface CLASS", `interface ${pascalCase(c.name)}CollectionItem extends ${parentInterface(c)}`));
return indentLines(` ${type}`);
})
),
"",
" interface PageCollections {",
...pagesCollections.map((c) => indentLines(`${c.name}: ${pascalCase(c.name)}CollectionItem`, 4)),
" }",
"",
" interface Collections {",
...publicCollections.map((c) => indentLines(`${c.name}: ${pascalCase(c.name)}CollectionItem`, 4)),
" }",
"}",
""
].join("\n");
},
options: {
collections
}
});
const fullDatabaseCompressedDumpTemplate = (manifest) => ({
filename: moduleTemplates.fullCompressedDump,
getContents: async ({ options }) => {
const result = [];
for (const [key, dump] of Object.entries(options.manifest.dump)) {
if (options.manifest.collections.find((c) => c.name === key)?.private) {
return "";
}
const compressedDump = await compress(JSON.stringify(dump));
result.push(`export const ${key} = "${compressedDump}"`);
}
return result.join("\n");
},
write: true,
options: {
manifest
}
});
const fullDatabaseRawDumpTemplate = (manifest) => ({
filename: moduleTemplates.fullRawDump,
getContents: ({ options }) => {
return Object.entries(options.manifest.dump).map(([_key, value]) => {
return value.join("\n");
}).join("\n");
},
write: true,
options: {
manifest
}
});
const collectionDumpTemplate = (collection, manifest) => ({
filename: `content/raw/dump.${collection}.sql`,
getContents: async ({ options }) => {
return compress(JSON.stringify(options.manifest.dump[collection] || []));
},
write: true,
options: {
manifest
}
});
const componentsManifestTemplate = (manifest) => {
return {
filename: moduleTemplates.components,
write: true,
getContents: ({ app, nuxt, options }) => {
const componentsMap = app.components.filter((c) => {
if (c.island) {
return false;
}
if (c.filePath.endsWith(".css")) {
return false;
}
return nuxt.options.dev || options.manifest.components.includes(c.pascalName) || c.global;
}).reduce((map, c) => {
map[c.pascalName] = map[c.pascalName] || [
c.pascalName,
`${genDynamicImport(isAbsolute(c.filePath) ? "./" + relative(join(nuxt.options.buildDir, "content"), c.filePath).replace(/\b\.(?!vue)\w+$/g, "") : c.filePath.replace(/\b\.(?!vue)\w+$/g, ""), { wrapper: false, singleQuotes: true })}`,
c.global
];
return map;
}, {});
const componentsList = Object.values(componentsMap);
const globalComponents = componentsList.filter((c) => c[2]).map((c) => c[0]);
const localComponents = componentsList.filter((c) => !c[2]);
return [
...localComponents.map(([pascalName, type]) => `export const ${pascalName} = () => ${type}`),
`export const globalComponents: string[] = ${JSON.stringify(globalComponents)}`,
`export const localComponents: string[] = ${JSON.stringify(localComponents.map((c) => c[0]))}`
].join("\n");
},
options: {
manifest
}
};
};
const manifestTemplate = (manifest) =>