UNPKG

@mintlify/previewing

Version:

Preview Mintlify docs locally

84 lines (83 loc) 4.04 kB
import { findAndRemoveImports, stringifyTree, topologicalSort, hasImports, optionallyAddLeadingSlash, optionallyRemoveLeadingSlash, } from '@mintlify/common'; import { outputFile } from 'fs-extra'; import { join } from 'path'; import { NEXT_PUBLIC_PATH } from '../../constants.js'; import { getOriginalSnippets } from './getSnippets.js'; import { resolveAllImports } from './resolveAllImports.js'; const findAllDependents = async (initialFileWithSlash, allSnippets, processedDataCache) => { const affected = new Set([initialFileWithSlash]); const queue = [initialFileWithSlash]; while (queue.length > 0) { const currentSourceFile = queue.shift(); for (const snippet of allSnippets) { const potentialDependentFile = optionallyAddLeadingSlash(snippet.filename); if (potentialDependentFile === currentSourceFile) continue; let processedData = processedDataCache.get(potentialDependentFile); if (processedData == null) { const clonedTree = structuredClone(snippet.tree); processedData = await findAndRemoveImports(clonedTree); processedDataCache.set(potentialDependentFile, processedData); } if (processedData.importMap[currentSourceFile]) { if (!affected.has(potentialDependentFile)) { affected.add(potentialDependentFile); queue.push(potentialDependentFile); } } } } return affected; }; export const generateDependentSnippets = async (changedFilename, newImportData) => { const processedDataCache = new Map(); const allOriginalSnippets = await getOriginalSnippets(); const updatedSnippetFileKey = optionallyAddLeadingSlash(changedFilename); const affectedSnippets = await findAllDependents(updatedSnippetFileKey, allOriginalSnippets, processedDataCache); const snippetPromises = Array.from(affectedSnippets).map(async (filename) => { const cachedData = processedDataCache.get(filename); if (cachedData) return { filename, ...cachedData }; const originalSnippet = allOriginalSnippets.find((s) => optionallyAddLeadingSlash(s.filename) === filename); if (!originalSnippet) return null; const processed = await findAndRemoveImports(structuredClone(originalSnippet.tree)); processedDataCache.set(filename, processed); return { filename, ...processed }; }); const snippets = (await Promise.all(snippetPromises)).filter((item) => item != null); const idx = snippets.findIndex((item) => item.filename === updatedSnippetFileKey); if (idx !== -1) { snippets[idx] = { ...newImportData, filename: updatedSnippetFileKey, }; } else { snippets.push({ ...newImportData, filename: updatedSnippetFileKey, }); } const graph = {}; snippets.forEach((item) => { graph[item.filename] = Object.keys(item.importMap).map((dep) => optionallyAddLeadingSlash(dep)); }); const sortedSnippets = topologicalSort(graph).reverse(); const snippetsMap = new Map(snippets.map((item) => [item.filename, item])); const orderedSnippets = sortedSnippets .map((filename) => snippetsMap.get(filename)) .filter((item) => item != null); const processedSnippets = []; for (const currentSnippet of orderedSnippets) { let processedTree = currentSnippet.tree; if (currentSnippet.filename !== updatedSnippetFileKey && hasImports(currentSnippet)) { processedTree = await resolveAllImports(currentSnippet); } const targetFilename = optionallyRemoveLeadingSlash(currentSnippet.filename); const targetPath = join(NEXT_PUBLIC_PATH, targetFilename); await outputFile(targetPath, stringifyTree(processedTree), { flag: 'w' }); processedSnippets.push(targetFilename); } return processedSnippets; };