UNPKG

@nuxt/content

Version:

Write your content inside your Nuxt app

147 lines (146 loc) 6.09 kB
import { isAbsolute } from "pathe"; import { decompressSQLDump } from "./dump.js"; import { fetchDatabase } from "./api.js"; import { refineContentFields } from "./collection.js"; import { tables, checksums, checksumsStructure } from "#content/manifest"; import adapter from "#content/adapter"; import localAdapter from "#content/local-adapter"; let db; export default function loadDatabaseAdapter(config) { const { database, localDatabase } = config; if (!db) { if (import.meta.dev || ["nitro-prerender", "nitro-dev"].includes(import.meta.preset)) { db = localAdapter(refineDatabaseConfig(localDatabase)); } else { db = adapter(refineDatabaseConfig(database)); } } return { all: async (sql, params = []) => { return db.prepare(sql).all(...params).then((result) => (result || []).map((item) => refineContentFields(sql, item))); }, first: async (sql, params = []) => { return db.prepare(sql).get(...params).then((item) => item ? refineContentFields(sql, item) : item); }, exec: async (sql, params = []) => { return db.prepare(sql).run(...params); } }; } const checkDatabaseIntegrity = {}; const integrityCheckPromise = {}; export async function checkAndImportDatabaseIntegrity(event, collection, config) { if (checkDatabaseIntegrity[String(collection)] !== false) { checkDatabaseIntegrity[String(collection)] = false; integrityCheckPromise[String(collection)] = integrityCheckPromise[String(collection)] || _checkAndImportDatabaseIntegrity(event, collection, checksums[String(collection)], checksumsStructure[String(collection)], config).then((isValid) => { checkDatabaseIntegrity[String(collection)] = !isValid; }).catch((error) => { console.error("Database integrity check failed", error); checkDatabaseIntegrity[String(collection)] = true; integrityCheckPromise[String(collection)] = null; }); } if (integrityCheckPromise[String(collection)]) { await integrityCheckPromise[String(collection)]; } } async function _checkAndImportDatabaseIntegrity(event, collection, integrityVersion, structureIntegrityVersion, config) { const db2 = loadDatabaseAdapter(config); const before = await db2.first(`SELECT * FROM ${tables.info} WHERE id = ?`, [`checksum_${collection}`]).catch(() => null); if (before?.version && !String(before.version)?.startsWith(`${config.databaseVersion}--`)) { await db2.exec(`DROP TABLE IF EXISTS ${tables.info}`); before.version = ""; } const unchangedStructure = before?.structureVersion === structureIntegrityVersion; if (before?.version) { if (before.version === integrityVersion) { if (before.ready) { return true; } await waitUntilDatabaseIsReady(db2, collection); return true; } await db2.exec(`DELETE FROM ${tables.info} WHERE id = ?`, [`checksum_${collection}`]); if (!unchangedStructure) { await db2.exec(`DROP TABLE IF EXISTS ${tables[collection]}`); } } const dump = await loadDatabaseDump(event, collection).then(decompressSQLDump); const dumpLinesHash = dump.map((row) => row.split(" -- ").pop()); let hashesInDb = /* @__PURE__ */ new Set(); if (unchangedStructure) { const hashListFromTheDump = new Set(dumpLinesHash); const hashesInDbRecords = await db2.all(`SELECT __hash__ FROM ${tables[collection]}`).catch(() => []); hashesInDb = new Set(hashesInDbRecords.map((r) => r.__hash__)); const hashesToDelete = hashesInDb.difference(hashListFromTheDump); if (hashesToDelete.size) { await db2.exec(`DELETE FROM ${tables[collection]} WHERE __hash__ IN (${Array(hashesToDelete.size).fill("?").join(",")})`, Array.from(hashesToDelete)); } } await dump.reduce(async (prev, sql, index) => { await prev; const hash = dumpLinesHash[index]; const statement = sql.substring(0, sql.length - hash.length - 4); if (unchangedStructure) { if (hash === "structure") { return Promise.resolve(); } if (hashesInDb.has(hash)) { return Promise.resolve(); } } await db2.exec(statement).catch((err) => { const message = err.message || "Unknown error"; console.error(`Failed to execute SQL ${sql}: ${message}`); }); }, Promise.resolve()); const after = await db2.first(`SELECT version FROM ${tables.info} WHERE id = ?`, [`checksum_${collection}`]).catch(() => ({ version: "" })); return after?.version === integrityVersion; } const REQUEST_TIMEOUT = 90; async function waitUntilDatabaseIsReady(db2, collection) { let iterationCount = 0; let interval; await new Promise((resolve, reject) => { interval = setInterval(async () => { const row = await db2.first(`SELECT ready FROM ${tables.info} WHERE id = ?`, [`checksum_${collection}`]).catch(() => ({ ready: true })); if (row?.ready) { clearInterval(interval); resolve(0); } if (iterationCount++ > REQUEST_TIMEOUT) { clearInterval(interval); reject(new Error("Waiting for another database initialization timed out")); } }, 1e3); }).catch((e) => { throw e; }).finally(() => { if (interval) { clearInterval(interval); } }); } async function loadDatabaseDump(event, collection) { return await fetchDatabase(event, String(collection)).catch((e) => { console.error("Failed to fetch compressed dump", e); return ""; }); } function refineDatabaseConfig(config) { if (config.type === "d1") { return { ...config, bindingName: config.bindingName || config.binding }; } if (config.type === "sqlite") { const _config = { ...config }; if (config.filename === ":memory:") { return { name: "memory" }; } if ("filename" in config) { const filename = isAbsolute(config?.filename || "") || config?.filename === ":memory:" ? config?.filename : new URL(config.filename, globalThis._importMeta_.url).pathname; _config.path = process.platform === "win32" && filename.startsWith("/") ? filename.slice(1) : filename; } return _config; } return config; }