UNPKG

everything-dev

Version:

A consolidated product package for building Module Federation apps with oRPC APIs.

820 lines (818 loc) 29.5 kB
import { fetchBosConfigFromFastKv } from "../fastkv.mjs"; import { loadManifestNormalizationSpec, normalizePackageManifestsInTree } from "../internal/manifest-normalizer.mjs"; import { saveBosConfig } from "../utils/save-config.mjs"; import { writeSnapshot } from "./snapshot.mjs"; import { createRequire } from "node:module"; import { createWriteStream, existsSync, lstatSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs"; import { dirname, join, resolve } from "node:path"; import { createHash } from "node:crypto"; import { tmpdir } from "node:os"; import { pipeline } from "node:stream/promises"; import { execa } from "execa"; import { glob } from "glob"; //#region src/cli/init.ts const require = createRequire(import.meta.url); const INIT_ROOT_PATTERNS = [ "bos.config.json", "package.json", ".env.example", ".gitignore", "biome.json", "bunfig.toml", "Dockerfile", "railway.json", ".agent/**", "AGENTS.md", ".opencode/skills/everything-dev/**", ".changeset/config.json", ".changeset/README.md", "README.md", "CONTRIBUTING.md", ".github/templates/**" ]; const FRAMEWORK_PACKAGES = ["every-plugin", "everything-dev"]; const OVERRIDE_WORKSPACE_MAP = { ui: ["ui"], api: ["api"], host: ["host"], plugins: [] }; async function resolveSourceDir(opts) { if (opts.source) { const sourceDir = resolve(opts.source); if (!existsSync(join(sourceDir, "bos.config.json"))) throw new Error(`No bos.config.json found in source directory: ${sourceDir}`); return { sourceDir, parentConfig: JSON.parse(readFileSync(join(sourceDir, "bos.config.json"), "utf-8")), cleanup: async () => {} }; } const parentConfig = await fetchParentConfig(opts.extendsAccount, opts.extendsGateway); if (parentConfig.repository) { const { dir: sourceDir, cleanup } = await downloadTarball(parentConfig.repository); return { sourceDir, parentConfig, cleanup }; } const chainResult = await resolveRepositoryViaExtendsChain(opts.extendsAccount, opts.extendsGateway); if (chainResult?.repository) { const { dir: sourceDir, cleanup } = await downloadTarball(chainResult.repository); return { sourceDir, parentConfig: chainResult.config, cleanup }; } return { sourceDir: "", parentConfig, cleanup: async () => {} }; } function buildInitPatterns(overrides, plugins) { const has = (section) => overrides.includes(section); const patterns = [...INIT_ROOT_PATTERNS]; if (has("ui")) patterns.push("ui/**"); if (has("api")) patterns.push("api/**"); if (has("host")) patterns.push("host/**"); if (has("plugins")) for (const plugin of plugins ?? []) patterns.push(`plugins/${plugin}/**`); return patterns; } function sourcePathToDestinationPath(filePath) { return filePath.startsWith(".github/templates/") ? filePath.replace(/^\.github\/templates\//, ".github/") : filePath; } async function fetchParentConfig(extendsAccount, extendsGateway) { return fetchBosConfigFromFastKv(`bos://${extendsAccount}/${extendsGateway}`); } async function resolveRepositoryViaExtendsChain(extendsAccount, extendsGateway, visited = /* @__PURE__ */ new Set()) { const key = `bos://${extendsAccount}/${extendsGateway}`; if (visited.has(key)) return null; visited.add(key); try { const config = await fetchParentConfig(extendsAccount, extendsGateway); if (config.repository) return { repository: config.repository, config }; const extendsRef = config.extends; if (extendsRef && typeof extendsRef === "string") { const match = (extendsRef.startsWith("bos://") ? extendsRef : `bos://${extendsRef}`).match(/^bos:\/\/([^/]+)\/(.+)$/); if (match) { const result = await resolveRepositoryViaExtendsChain(match[1], match[2], visited); if (result) return result; } } return null; } catch { return null; } } async function detectGitRemoteUrl(directory) { try { const { stdout } = await execa("git", [ "remote", "get-url", "origin" ], { cwd: directory, stdio: "pipe" }); const url = stdout.trim(); if (!url) return void 0; return normalizeGitUrl(url); } catch { return; } } function normalizeGitUrl(url) { const sshMatch = url.match(/^git@github\.com:([^/]+)\/([^/]+?)(?:\.git)?$/); if (sshMatch) return `https://github.com/${sshMatch[1]}/${sshMatch[2]}`; const httpsMatch = url.match(/^https?:\/\/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?(?:\/.*)?$/); if (httpsMatch) return `https://github.com/${httpsMatch[1]}/${httpsMatch[2]}`; return url.endsWith(".git") ? url.slice(0, -4) : url; } async function downloadTarball(repoUrl) { const parsed = parseGitHubUrl(repoUrl); if (!parsed) throw new Error(`Cannot parse repository URL: ${repoUrl}`); const { owner, repo, branch } = parsed; const tarballUrl = `https://api.github.com/repos/${owner}/${repo}/tarball/${branch}`; const tmpDir = mkTmpDir("bos-init-tarball-"); const tarballPath = join(tmpDir, "source.tar.gz"); const response = await fetch(tarballUrl, { headers: { "User-Agent": "everything-dev" }, redirect: "follow" }); if (!response.ok) { rmSync(tmpDir, { recursive: true, force: true }); throw new Error(`GitHub tarball download failed: ${response.status} ${response.statusText}`); } if (!response.body) { rmSync(tmpDir, { recursive: true, force: true }); throw new Error("GitHub tarball download returned empty body"); } const fileStream = createWriteStream(tarballPath); const reader = response.body; await pipeline(reader, fileStream); const extractDir = mkTmpDir("bos-init-extract-"); try { await require("tar").extract({ cwd: extractDir, file: tarballPath, strip: 1 }); } catch { await execCommand("tar", [ "-xzf", tarballPath, "--strip-components=1", "-C", extractDir ]); } rmSync(tmpDir, { recursive: true, force: true }); return { dir: extractDir, cleanup: async () => { rmSync(extractDir, { recursive: true, force: true }); } }; } function parseGitHubUrl(url) { const httpsMatch = url.match(/^https?:\/\/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?(?:\/.*)?$/); if (httpsMatch) return { owner: httpsMatch[1], repo: httpsMatch[2], branch: "main" }; const sshMatch = url.match(/^git@github\.com:([^/]+)\/([^/]+?)(?:\.git)?$/); if (sshMatch) return { owner: sshMatch[1], repo: sshMatch[2], branch: "main" }; return null; } async function copyFilteredFiles(sourceDir, destination, patterns, _options) { if (patterns.length === 0) return 0; const allFiles = /* @__PURE__ */ new Set(); for (const pattern of patterns) { const matches = await glob(pattern, { cwd: sourceDir, nodir: true, dot: true, absolute: false, ignore: [ "**/node_modules/**", "**/.git/**", "**/dist/**", "**/.bos/**" ] }); for (const match of matches) allFiles.add(match); } mkdirSync(destination, { recursive: true }); let count = 0; for (const filePath of allFiles) { const src = join(sourceDir, filePath); if (!lstatSync(src).isFile()) continue; const dest = join(destination, sourcePathToDestinationPath(filePath)); mkdirSync(dirname(dest), { recursive: true }); writeFileSync(dest, readFileSync(src)); count++; } return count; } function stripProductionFields(entry) { delete entry.production; delete entry.integrity; delete entry.ssr; delete entry.ssrIntegrity; } async function personalizeConfig(destination, opts) { const has = (section) => opts.overrides.includes(section); const explicitRootKeys = new Set(Object.entries(opts).filter(([key, value]) => value !== void 0 && ![ "extendsAccount", "extendsGateway", "plugins", "overrides", "pluginRoutes", "workspaceOpts", "mode", "existingConfig" ].includes(key)).map(([key]) => key)); const configPath = join(destination, "bos.config.json"); if (existsSync(configPath)) { const config = JSON.parse(readFileSync(configPath, "utf-8")); config.extends = `bos://${opts.extendsAccount}/${opts.extendsGateway}`; if (opts.account) config.account = opts.account; if (opts.domain) config.domain = opts.domain; if (opts.repository) config.repository = opts.repository; else delete config.repository; for (const field of [ "title", "description", "testnet", "staging" ]) if (!(field in opts)) delete config[field]; if (config.app && typeof config.app === "object") { const app = config.app; for (const entryKey of Object.keys(app)) { if (!has(entryKey) && (entryKey === "host" || entryKey === "ui" || entryKey === "api" || entryKey === "auth")) { delete app[entryKey]; continue; } const entry = app[entryKey]; if (entry && typeof entry === "object") stripProductionFields(entry); } } if (has("plugins")) { if (config.plugins && typeof config.plugins === "object") { const plugins = config.plugins; if (opts.plugins !== void 0) { for (const pluginKey of Object.keys(plugins)) if (!opts.plugins.includes(pluginKey)) delete plugins[pluginKey]; } for (const pluginKey of Object.keys(plugins)) { const plugin = plugins[pluginKey]; let pluginObj; if (typeof plugin === "string") { pluginObj = { extends: plugin }; plugins[pluginKey] = pluginObj; } else if (plugin && typeof plugin === "object") { pluginObj = { ...plugin }; plugins[pluginKey] = pluginObj; } else continue; stripProductionFields(pluginObj); } if (Object.keys(plugins).length === 0) delete config.plugins; } } else delete config.plugins; if (opts.mode === "sync" && opts.existingConfig) { const managedRootKeys = new Set([ "extends", "account", "domain", "app", "plugins", "shared" ]); const preservedRootKeys = new Set([ ...managedRootKeys, ...Object.keys(opts.existingConfig), ...explicitRootKeys ]); for (const key of Object.keys(config)) if (!preservedRootKeys.has(key)) delete config[key]; for (const [key, value] of Object.entries(opts.existingConfig)) if (!(key in config) && !managedRootKeys.has(key) && !explicitRootKeys.has(key)) config[key] = value; } await saveBosConfig(destination, config); } const pkgPath = join(destination, "package.json"); if (existsSync(pkgPath)) { const pkg = JSON.parse(readFileSync(pkgPath, "utf-8")); if (pkg.workspaces && typeof pkg.workspaces === "object") { const ws = pkg.workspaces; if (Array.isArray(ws.packages)) { ws.packages = ws.packages.filter((p) => { if (p.startsWith("packages/")) return false; if (p === "host") return has("host"); if (p.startsWith("plugins/")) return false; return true; }); if (has("plugins")) { if (!ws.packages.includes("plugins/*")) ws.packages.push("plugins/*"); } } } if (pkg.scripts && typeof pkg.scripts === "object") { const scripts = pkg.scripts; const FROM = "bun packages/everything-dev/src/cli.ts"; const TO = "node_modules/.bin/bos"; const rewrite = (key) => { if (scripts[key]?.includes(FROM)) scripts[key] = scripts[key].replaceAll(FROM, TO); }; rewrite("dev"); rewrite("dev:ui"); rewrite("dev:api"); rewrite("dev:proxy"); rewrite("build"); rewrite("deploy"); rewrite("publish"); rewrite("start"); rewrite("bos"); scripts.postinstall = "node_modules/.bin/bos types gen || true"; scripts["types:gen"] = "node_modules/.bin/bos types gen"; if (scripts.typecheck) { scripts.typecheck = scripts.typecheck.replace("bun run types:gen && ", "").replace(/bun run --cwd packages\/everything-dev typecheck & ?/, ""); if (!has("ui")) scripts.typecheck = scripts.typecheck.replace(/bun run --cwd ui tsc --noEmit & ?/, ""); if (!has("api")) scripts.typecheck = scripts.typecheck.replace(/bun run --cwd api tsc --noEmit & ?/, ""); if (!has("host")) scripts.typecheck = scripts.typecheck.replace(/bun run --cwd host tsc --noEmit & ?/, ""); } } if (pkg.devDependencies && typeof pkg.devDependencies === "object") { const deps = pkg.devDependencies; delete deps["every-plugin"]; delete deps["everything-dev"]; } if (!pkg.workspaces || typeof pkg.workspaces !== "object") pkg.workspaces = { packages: [], catalog: {} }; const workspaces = pkg.workspaces; if (!workspaces.catalog || typeof workspaces.catalog !== "object") workspaces.catalog = {}; if (!pkg.dependencies) pkg.dependencies = {}; const deps = pkg.dependencies; const spec = opts.workspaceOpts?.sourceDir ? loadManifestNormalizationSpec(opts.workspaceOpts.sourceDir) : null; if (spec) { workspaces.catalog["everything-dev"] = spec.rootCatalog["everything-dev"]; workspaces.catalog["every-plugin"] = spec.rootCatalog["every-plugin"]; } const frameworkCatalog = resolveFrameworkCatalog(); for (const [name, version] of Object.entries(frameworkCatalog)) workspaces.catalog[name] = version; if (!deps["everything-dev"]) deps["everything-dev"] = "catalog:"; if (!deps["every-plugin"]) deps["every-plugin"] = "catalog:"; writeFileSync(pkgPath, `${JSON.stringify(pkg, null, 2)}\n`); } const apiTsConfigPath = join(destination, "api", "tsconfig.json"); if (existsSync(apiTsConfigPath)) { const apiTsConfig = JSON.parse(readFileSync(apiTsConfigPath, "utf-8")); if (apiTsConfig.files) { const validFiles = apiTsConfig.files.filter((f) => existsSync(join(destination, "api", f))); if (validFiles.length !== apiTsConfig.files.length) { if (validFiles.length === 0) delete apiTsConfig.files; else apiTsConfig.files = validFiles; writeFileSync(apiTsConfigPath, `${JSON.stringify(apiTsConfig, null, 2)}\n`); } } } await resolveWorkspaceRefs(destination, opts.workspaceOpts); if (has("ui")) { const genContractPath = join(destination, "ui", "src", "lib", "api-types.gen.ts"); if (!existsSync(genContractPath)) { mkdirSync(dirname(genContractPath), { recursive: true }); writeFileSync(genContractPath, `export type ApiContract = Record<string, never>;\n`); } } if (has("api")) { const pluginsClientGenPath = join(destination, "api", "src", "lib", "plugins-types.gen.ts"); if (!existsSync(pluginsClientGenPath)) { mkdirSync(dirname(pluginsClientGenPath), { recursive: true }); writeFileSync(pluginsClientGenPath, `import type { ContractRouterClient, AnyContractRouter } from "@orpc/contract";\ntype ClientFactory<C extends AnyContractRouter> = (context?: Record<string, unknown>) => ContractRouterClient<C>;\nexport type PluginsClient = Record<string, never>;\n`); } } const authTypesContent = generateAuthTypesTemplate(); const authTypesPaths = []; if (has("ui")) authTypesPaths.push(join(destination, "ui", "src", "lib", "auth-types.gen.ts")); if (has("api")) authTypesPaths.push(join(destination, "api", "src", "lib", "auth-types.gen.ts")); if (has("host") && existsSync(join(destination, "host", "src"))) authTypesPaths.push(join(destination, "host", "src", "lib", "auth-types.gen.ts")); for (const authTypesGenPath of authTypesPaths) if (!existsSync(authTypesGenPath)) { mkdirSync(dirname(authTypesGenPath), { recursive: true }); writeFileSync(authTypesGenPath, authTypesContent); } } function generateAuthTypesTemplate() { return `import type { Auth } from "better-auth"; export type { Auth } from "better-auth"; export type AuthSessionUser = NonNullable<Auth["$Infer"]["Session"]["user"]> & { role?: string | null; isAnonymous?: boolean | null; walletAddress?: string | null; banned?: boolean | null; }; export type AuthSessionData = NonNullable<Auth["$Infer"]["Session"]["session"]> & { activeOrganizationId?: string | null; }; export type AuthSession = { user: AuthSessionUser | null; session: AuthSessionData | null; }; export interface AuthOrganizationContext { activeOrganizationId: string | null; organization: { id: string; name: string; slug: string; logo?: string | null; metadata?: Record<string, unknown> } | null; member: { id: string; role: string } | null; isPersonal: boolean; hasOrganization: boolean; } export interface AuthRequestContext { user: AuthSessionUser | null; userId: string | null; isAuthenticated: boolean; authMethod: "session" | "apiKey" | "anonymous" | "none"; near: { primaryAccountId: string | null; linkedAccounts: Array<{ accountId: string; network: string; publicKey: string; isPrimary: boolean }>; hasNearAccount: boolean; }; organization: AuthOrganizationContext; organizations?: Array<{ id: string; role: string; name?: string; slug?: string }>; } export type AuthActiveMember = { id: string | null; role: string | null; organizationId: string | null }; export type AuthOrganization = NonNullable<AuthOrganizationContext["organization"]>; export type AuthOrganizationMember = NonNullable<AuthOrganizationContext["member"]>; export type AuthOrganizationSummary = NonNullable<AuthRequestContext["organizations"]>[number]; export type AuthBaseSession = Auth["$Infer"]["Session"]; export type createAuthInstance = never; export interface AuthServices { auth: Auth; db: unknown; driver: { close(): Promise<void> }; handler: (req: Request) => Promise<Response>; } `; } async function runBunInstall(destination, spinner) { await runWithProgress("bun", ["install", "--ignore-scripts"], destination, spinner, "Installing dependencies"); } async function runBunInstallForUpgrade(destination, spinner) { await runWithProgress("bun", ["install", "--force"], destination, spinner, "Installing dependencies"); } async function runTypesGen(destination, spinner) { if (existsSync(join(destination, "node_modules", ".bin", "bos"))) { await runWithProgress("node_modules/.bin/bos", ["types", "gen"], destination, spinner, "Generating types"); return; } if (existsSync(join(destination, "packages", "everything-dev", "src", "cli.ts"))) { await runWithProgress("bun", [ "run", "--cwd", "packages/everything-dev", "src/cli.ts", "types", "gen" ], destination, spinner, "Generating types"); return; } throw new Error("Unable to locate bos CLI for types generation"); } async function runDockerComposeUp(destination) { await execCommand("docker", [ "compose", "up", "-d", "--wait" ], destination, { stdio: "inherit" }); } async function runWithProgress(command, args, cwd, spinner, label) { const child = execa(command, args, { cwd, stdio: "inherit", timeout: COMMAND_TIMEOUTS[command] ?? 2 * 6e4 }); if (spinner) { const start = Date.now(); const interval = setInterval(() => { const elapsed = Math.round((Date.now() - start) / 1e3); spinner.message(`${label}... (${elapsed}s)`); }, 2e3); try { await child; } finally { clearInterval(interval); } } else await child; } function stripOrphanedWorkspacesFromLockfile(lockfilePath, allowedWorkspaces) { if (!existsSync(lockfilePath)) return; const content = readFileSync(lockfilePath, "utf-8"); let lockfile; try { lockfile = JSON.parse(content); } catch { return; } const workspaces = lockfile.workspaces; if (!workspaces || typeof workspaces !== "object") return; const workspaceMap = workspaces; const allowed = new Set(["", ...allowedWorkspaces]); const keys = Object.keys(workspaceMap); let changed = false; for (const key of keys) { if (allowed.has(key)) continue; if (allowedWorkspaces.some((pattern) => pattern.endsWith("/*") && key.startsWith(pattern.slice(0, -1)))) continue; delete workspaceMap[key]; changed = true; } if (changed) writeFileSync(lockfilePath, `${JSON.stringify(lockfile, null, 2)}\n`); } function removeInitLockfile(lockfilePath) { if (!existsSync(lockfilePath)) return; rmSync(lockfilePath, { force: true }); } const WORKSPACE_LOCAL_PATHS = { "everything-dev": "packages/everything-dev", "every-plugin": "packages/every-plugin" }; function readJsonFile(filePath) { return JSON.parse(readFileSync(filePath, "utf-8")); } function tryResolvePackageJson(packageName) { try { return require.resolve(`${packageName}/package.json`); } catch { return null; } } function resolveFrameworkCatalog() { const catalog = {}; const everythingDevPackageJson = tryResolvePackageJson("everything-dev"); if (everythingDevPackageJson) { try { const monorepoPkgPath = join(dirname(everythingDevPackageJson), "..", "..", "package.json"); if (existsSync(monorepoPkgPath)) { const sourceCatalog = readJsonFile(monorepoPkgPath).workspaces?.catalog; if (sourceCatalog && typeof sourceCatalog === "object") { for (const [name, version] of Object.entries(sourceCatalog)) if (typeof version === "string") catalog[name] = version; } } } catch {} try { const selfPkg = readJsonFile(everythingDevPackageJson); if (selfPkg.version && !catalog["everything-dev"]) catalog["everything-dev"] = `^${selfPkg.version}`; const sourceCatalog = selfPkg.workspaces?.catalog; if (sourceCatalog && typeof sourceCatalog === "object") { for (const [name, version] of Object.entries(sourceCatalog)) if (typeof version === "string" && !catalog[name]) catalog[name] = version; } } catch {} } for (const packageName of FRAMEWORK_PACKAGES) { const resolved = tryResolvePackageJson(packageName); if (!resolved) continue; try { const pkg = readJsonFile(resolved); if (pkg.version) catalog[packageName] = `^${pkg.version}`; } catch {} } return catalog; } async function scaffoldMinimalProject(destination, parentConfig, opts) { mkdirSync(destination, { recursive: true }); const has = (section) => opts.overrides.includes(section); const config = { extends: `bos://${opts.extendsAccount}/${opts.extendsGateway}`, account: opts.account || opts.extendsAccount, ...opts.domain ? { domain: opts.domain } : {}, ...opts.repository ? { repository: opts.repository } : {}, ...opts.title ? { title: opts.title } : {}, ...opts.description ? { description: opts.description } : {} }; if (parentConfig.app && typeof parentConfig.app === "object") { const app = {}; const parentApp = parentConfig.app; if (has("host") && parentApp.host) { app.host = { ...parentApp.host }; stripProductionFields(app.host); } if (has("ui") && parentApp.ui) { app.ui = { ...parentApp.ui }; stripProductionFields(app.ui); } if (has("api") && parentApp.api) { app.api = { ...parentApp.api }; stripProductionFields(app.api); } if (has("plugins") && parentApp.auth) { app.auth = { ...parentApp.auth }; stripProductionFields(app.auth); } if (Object.keys(app).length > 0) config.app = app; } if (has("plugins") && opts.plugins && opts.plugins.length > 0 && parentConfig.plugins) { const plugins = {}; for (const key of opts.plugins) { const parentPlugin = parentConfig.plugins?.[key]; if (parentPlugin) if (typeof parentPlugin === "string") plugins[key] = { extends: parentPlugin }; else { const pluginCopy = { ...parentPlugin }; stripProductionFields(pluginCopy); plugins[key] = pluginCopy; } } config.plugins = plugins; } await saveBosConfig(destination, config); const workspacePackages = []; for (const section of opts.overrides) workspacePackages.push(...OVERRIDE_WORKSPACE_MAP[section]); if (has("plugins")) workspacePackages.push("plugins/*"); const catalog = resolveFrameworkCatalog(); const pkg = { name: opts.domain || opts.extendsGateway, private: true, type: "module", scripts: { dev: "node_modules/.bin/bos dev --host remote", "dev:ui": "node_modules/.bin/bos dev --ui local --api remote", "dev:api": "node_modules/.bin/bos dev --ui remote --api local", build: "node_modules/.bin/bos build", deploy: "node_modules/.bin/bos build --deploy", publish: "node_modules/.bin/bos publish", start: "node_modules/.bin/bos start", typecheck: "node_modules/.bin/bos types gen && tsc --noEmit", postinstall: "node_modules/.bin/bos types gen || true", "types:gen": "node_modules/.bin/bos types gen", bos: "node_modules/.bin/bos" }, dependencies: { "everything-dev": "catalog:", "every-plugin": "catalog:" }, devDependencies: {}, workspaces: { packages: workspacePackages, catalog } }; writeFileSync(join(destination, "package.json"), `${JSON.stringify(pkg, null, 2)}\n`); const envExample = generateEnvExample(parentConfig, opts.overrides); if (envExample) writeFileSync(join(destination, ".env.example"), envExample); writeFileSync(join(destination, ".gitignore"), generateGitignore()); return 4; } async function resolveWorkspaceRefs(destination, options) { await normalizePackageManifestsInTree({ sourceRootDir: options?.sourceDir ?? destination, targetDir: destination, resolveCatalogRefs: false, preserveCatalogRefs: true, removeWorkspaceDeps: ["host"] }); if (options?.localOverrides && options.sourceDir) { const rootPkgPath = join(destination, "package.json"); if (existsSync(rootPkgPath)) { const pkg = JSON.parse(readFileSync(rootPkgPath, "utf-8")); if (!pkg.overrides) pkg.overrides = {}; const overrides = pkg.overrides; const rootWorkspaces = (pkg.workspaces?.packages ?? []).filter(Boolean); for (const [name, relPath] of Object.entries(WORKSPACE_LOCAL_PATHS)) if (!rootWorkspaces.some((ws) => ws === relPath || ws === `plugins/${name}`)) { if (existsSync(join(options.sourceDir, relPath, "package.json"))) { overrides[name] = `file:${relPath}`; rootWorkspaces.push(relPath); } } if (rootWorkspaces.length > 0) { if (!pkg.workspaces) pkg.workspaces = {}; pkg.workspaces.packages = rootWorkspaces; } writeFileSync(rootPkgPath, `${JSON.stringify(pkg, null, 2)}\n`); } } } async function writeInitSnapshot(destination, extendsAccount, extendsGateway, sourceDir, patterns, _options) { const allFiles = /* @__PURE__ */ new Set(); for (const pattern of patterns) { const matches = await glob(pattern, { cwd: sourceDir, nodir: true, dot: true, absolute: false, ignore: [ "**/node_modules/**", "**/.git/**", "**/dist/**", "**/.bos/**" ] }); for (const match of matches) allFiles.add(match); } const fileHashes = {}; for (const filePath of allFiles) { const src = join(sourceDir, filePath); if (!lstatSync(src).isFile()) continue; const content = readFileSync(src); const destPath = sourcePathToDestinationPath(filePath); fileHashes[destPath] = computeHash(content); } await writeSnapshot(destination, { parentRef: `bos://${extendsAccount}/${extendsGateway}`, files: fileHashes }); } function computeHash(data) { return createHash("sha256").update(data).digest("hex").substring(0, 16); } function mkTmpDir(prefix) { return mkdtempSync(join(tmpdir(), `${prefix}-`)); } async function generateDatabaseMigrations(destination) { const drizzleConfigs = await glob("**/drizzle.config.ts", { cwd: destination, nodir: true, dot: false, absolute: false, ignore: ["**/node_modules/**"] }); for (const configPath of drizzleConfigs) { const workspaceDir = dirname(configPath); const pkgPath = join(destination, workspaceDir, "package.json"); if (!existsSync(pkgPath)) continue; if (!JSON.parse(readFileSync(pkgPath, "utf-8")).scripts?.["db:generate"]) continue; await execCommand("bun", ["run", "db:generate"], join(destination, workspaceDir)); } } const COMMAND_TIMEOUTS = { bun: 5 * 6e4, docker: 5 * 6e4, node_modules: 2 * 6e4, tar: 6e4 }; async function execCommand(command, args, cwd, options) { const timeout = COMMAND_TIMEOUTS[command] ?? 2 * 6e4; await execa(command, args, { cwd, stdio: options?.stdio ?? "pipe", timeout }); } function generateEnvExample(config, overrides) { const has = (section) => overrides.includes(section); const lines = ["# Environment variables"]; const collectSecrets = (obj, includeSection, prefix = "") => { for (const [key, value] of Object.entries(obj)) { if (!includeSection) continue; if (key === "secrets" && Array.isArray(value)) { for (const secret of value) if (typeof secret === "string") lines.push(`${secret}=`); } else if (key === "variables" && isPlainObject(value)) { for (const [varKey, varVal] of Object.entries(value)) if (typeof varVal === "string") lines.push(`${varKey}=${varVal}`); } else if (isPlainObject(value) && key !== "extends") collectSecrets(value, includeSection, `${prefix}${key}.`); } }; if (config.app && typeof config.app === "object") { const app = config.app; collectSecrets(app, has("host"), "host."); collectSecrets(app, has("ui"), "ui."); collectSecrets(app, has("api"), "api."); collectSecrets(app, has("plugins"), "auth."); } if (has("plugins") && config.plugins && typeof config.plugins === "object") { for (const [pluginKey, pluginVal] of Object.entries(config.plugins)) if (isPlainObject(pluginVal)) collectSecrets(pluginVal, true); else if (typeof pluginVal === "string") lines.push(`# Plugin '${pluginKey}' extends ${pluginVal}`); } lines.push("BETTER_AUTH_SECRET=generate-a-secret-here"); return `${lines.join("\n")}\n`; } function isPlainObject(value) { return Boolean(value) && typeof value === "object" && !Array.isArray(value); } function generateGitignore() { return `node_modules/ dist/ .env .bos/ *.gen.ts *.gen.tsx `; } //#endregion export { INIT_ROOT_PATTERNS, buildInitPatterns, copyFilteredFiles, detectGitRemoteUrl, downloadTarball, execCommand, fetchParentConfig, generateDatabaseMigrations, personalizeConfig, removeInitLockfile, resolveRepositoryViaExtendsChain, resolveSourceDir, runBunInstall, runBunInstallForUpgrade, runDockerComposeUp, runTypesGen, scaffoldMinimalProject, sourcePathToDestinationPath, stripOrphanedWorkspacesFromLockfile, writeInitSnapshot }; //# sourceMappingURL=init.mjs.map