UNPKG

alinea

Version:
180 lines (176 loc) 5.3 kB
import { pLimit } from "../../chunks/chunk-C53YJRET.js"; import "../../chunks/chunk-NZLE2WMY.js"; // src/core/source/FSSource.ts import fs from "node:fs/promises"; import path from "node:path/posix"; // node_modules/p-debounce/index.js var pDebounce = (fn, wait, options = {}) => { if (!Number.isFinite(wait)) { throw new TypeError("Expected `wait` to be a finite number"); } let leadingValue; let timeout; let resolveList = []; return function(...arguments_) { return new Promise((resolve) => { const shouldCallNow = options.before && !timeout; clearTimeout(timeout); timeout = setTimeout(() => { timeout = null; const result = options.before ? leadingValue : fn.apply(this, arguments_); for (resolve of resolveList) { resolve(result); } resolveList = []; }, wait); if (shouldCallNow) { leadingValue = fn.apply(this, arguments_); resolve(leadingValue); } else { resolveList.push(resolve); } }); }; }; pDebounce.promise = (function_) => { let currentPromise; return async function(...arguments_) { if (currentPromise) { return currentPromise; } try { currentPromise = function_.apply(this, arguments_); return await currentPromise; } finally { currentPromise = void 0; } }; }; var p_debounce_default = pDebounce; // src/core/source/FSSource.ts import { assert } from "../util/Assert.js"; import { accumulate } from "../util/Async.js"; import { hashBlob } from "./GitUtils.js"; import { ReadonlyTree, WriteableTree } from "./Tree.js"; var limit = pLimit(1); var FSSource = class { #current = ReadonlyTree.EMPTY; #cwd; #locations = /* @__PURE__ */ new Map(); #lastModified = /* @__PURE__ */ new Map(); constructor(cwd) { this.#cwd = cwd; } async getTree() { return limit(async () => { const current = this.#current; const builder = new WriteableTree(); const files = await fs.readdir(this.#cwd, { recursive: true }); const tasks = files.map((file) => this.getFile(current, builder, file)); await Promise.all(tasks); const tree = await builder.compile(current); this.#current = tree; return tree; }); } async getFile(current, builder, file) { const filePath = file.replaceAll("\\", "/"); const fullPath = path.join(this.#cwd, filePath); let stat; try { stat = await fs.stat(fullPath); if (!stat.isFile()) return; } catch { return; } const previouslyModified = this.#lastModified.get(filePath); if (previouslyModified && stat.mtimeMs === previouslyModified) { const previous = current.get(filePath); if (previous && typeof previous.sha === "string") { builder.add(filePath, previous.sha); return; } } try { const contents = await fs.readFile(fullPath); const sha = await hashBlob(contents); this.#locations.set(sha, filePath); this.#lastModified.set(filePath, stat.mtimeMs); builder.add(filePath, sha); return [sha, contents]; } catch { } } async getTreeIfDifferent(sha) { const current = await this.getTree(); return current.sha === sha ? void 0 : current; } async *getBlobs(shas) { for (const sha of shas) { const path2 = this.#locations.get(sha); assert(path2, `Missing path for blob ${sha}`); yield [sha, await fs.readFile(`${this.#cwd}/${path2}`)]; } } async applyChanges(batch) { return limit(async () => { await Promise.all( batch.changes.map(async (change) => { switch (change.op) { case "delete": { return fs.unlink(`${this.#cwd}/${change.path}`).catch(() => { }); } case "add": { const { contents } = change; assert(contents, "Missing contents"); const dir = path.dirname(change.path); await fs.mkdir(`${this.#cwd}/${dir}`, { recursive: true }).catch(() => { }); return fs.writeFile(`${this.#cwd}/${change.path}`, contents); } } }) ); }); } }; var CachedFSSource = class extends FSSource { #tree; #blobs = /* @__PURE__ */ new Map(); constructor(cwd) { super(cwd); } refresh = p_debounce_default(async () => { this.#blobs = /* @__PURE__ */ new Map(); return this.#tree = super.getTree(); }, 50); getTree() { if (!this.#tree) return this.refresh(); return this.#tree; } async getFile(current, builder, file) { const result = await super.getFile(current, builder, file); if (result) this.#blobs?.set(result[0], result[1]); return result; } async *getBlobs(shas) { const fromLocal = shas.filter((sha) => this.#blobs.has(sha)); const localEntries = fromLocal.map( (sha) => [sha, this.#blobs.get(sha)] ); const fromRemote = shas.filter((sha) => !this.#blobs.has(sha)); const remoteEntries = fromRemote.length > 0 ? await accumulate(super.getBlobs(fromRemote)) : []; const entries = [...localEntries, ...remoteEntries]; this.#blobs = new Map(entries); yield* entries; } }; export { CachedFSSource, FSSource };