UNPKG

@knapsack/app

Version:

Build Design Systems with Knapsack

125 lines 4.89 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.Db = void 0; const types_1 = require("@knapsack/types"); const path_1 = require("path"); const file_utils_1 = require("@knapsack/file-utils"); const ks_file_utils_1 = require("@knapsack/ks-file-utils"); const immer_1 = require("immer"); const file_db_1 = require("./server/dbs/file-db"); const log_1 = require("./cli/log"); class Db extends file_db_1.FileDb { #blocksDir; #demosDir; #dataDir; #dbData; constructor({ data }) { super({ filePath: (0, path_1.join)(data, 'db.yml'), type: 'yml', writeFileIfAbsent: true, orderAlphabetically: true, defaults: { // @ts-expect-error - `blocks.byId` is missing from `db.yml` but not from `KsAppClientData['db']` – we'll add it when we read all the files blocks: { settings: {}, }, // @ts-expect-error - same as above demos: { settings: {}, }, // @ts-expect-error - same as above tabs: { settings: {}, }, // @ts-expect-error - same as above blockCollections: { settings: {}, }, settings: {}, }, }); this.#dataDir = data; this.#blocksDir = (0, path_1.join)(data, 'blocks'); this.#demosDir = (0, path_1.join)(data, 'demos'); } init = async (opt) => { await Promise.all([ super.init(opt), (0, file_utils_1.ensureDir)(this.#blocksDir), (0, file_utils_1.ensureDir)(this.#demosDir), ]); // doing it this way instead of `await` so it doesn't block the `init` lifecycle super.getData().then((db) => { if ((0, types_1.isObject)(db.blocks.byId)) { log_1.log.error(`db.yml should not have "blocks.byId" - it looks like you need to run 'npx @knapsack/update@latest' to migrate your data`); process.exit(1); } }); }; /** * Ran only on `knapsack serve`. Passed in full contents of App Client Data, which was created as json file in cache dir from `knapsack build`. */ hydrate = async ({ appClientData }) => { this.#dbData = appClientData.db; }; getData = async () => { const db = await (0, ks_file_utils_1.readDb)({ dataDir: this.#dataDir, blocksSubDir: this.#blocksDir, demosSubDir: this.#demosDir, }); this.#dbData = db; return db; }; savePrep = async (db) => { // we'll remove IDs from these sets as we go const blockIdsToDelete = new Set(Object.keys(this.#dbData.blocks.byId)); const demoIdsToDelete = new Set(Object.keys(this.#dbData.demos.byId)); const modifiedFiles = [ ...Object.values(db.blocks.byId).map((block) => { blockIdsToDelete.delete(block.id); return { path: (0, path_1.join)(this.#blocksDir, `block.${block.id}.json`), contents: JSON.stringify(block, null, 2), encoding: 'utf8', }; }), ...Object.values(db.demos.byId).map((demo) => { demoIdsToDelete.delete(demo.id); return { path: (0, path_1.join)(this.#demosDir, `demo.${demo.id}.json`), contents: JSON.stringify(demo, null, 2), encoding: 'utf8', }; }), ]; // Now that we've removed all the IDs that exist in the new data, we can // delete the rest - which is done by having a `KnapsackFile` object with // `isDeleted: true` const deletedFiles = [ ...Array.from(blockIdsToDelete).map((id) => ({ path: (0, path_1.join)(this.#blocksDir, `block.${id}.json`), isDeleted: true, contents: '', encoding: 'utf8', })), ...Array.from(demoIdsToDelete).map((id) => ({ path: (0, path_1.join)(this.#demosDir, `demo.${id}.json`), isDeleted: true, contents: '', encoding: 'utf8', })), ]; // removing the objects that come from multiple JSON files const cleanedDb = (0, immer_1.produce)(db, (dbDraft) => { delete dbDraft.blocks.byId; delete dbDraft.demos.byId; }); // it'll be an array of one const dbFiles = await super.savePrep(cleanedDb); return [...dbFiles, ...modifiedFiles, ...deletedFiles]; }; } exports.Db = Db; //# sourceMappingURL=db.js.map