UNPKG

dotagent

Version:

Multi-file AI agent configuration manager with .agent directory support

1 lines 80.1 kB
{"version":3,"sources":["../src/index.ts","../src/parser.ts","../src/importers.ts","../src/yaml-parser.ts","../src/exporters.ts"],"sourcesContent":["export {\n parseAgentMarkdown,\n parseFenceEncodedMarkdown\n} from './parser.js'\n\nexport {\n importAll,\n importAgent,\n importCopilot,\n importCursor,\n importCursorLegacy,\n importCline,\n importWindsurf,\n importZed,\n importCodex,\n importAider,\n importClaudeCode,\n importGemini,\n importQodo,\n importAmazonQ\n} from './importers.js'\n\nexport {\n toAgentMarkdown,\n exportToAgent,\n exportToCopilot,\n exportToCursor,\n exportToCline,\n exportToWindsurf,\n exportToZed,\n exportToCodex,\n exportToAider,\n exportToClaudeCode,\n exportToGemini,\n exportToQodo,\n exportToAmazonQ,\n exportAll\n} from './exporters.js'\n\nexport type {\n RuleMetadata,\n RuleBlock,\n ImportResult,\n ImportResults,\n ExportOptions,\n ParserOptions\n} from './types.js'","import { unified } from 'unified'\nimport remarkParse from 'remark-parse'\nimport { toMarkdown } from 'mdast-util-to-markdown'\nimport yaml from 'js-yaml'\nimport type { Root, RootContent } from 'mdast'\nimport type { RuleBlock, RuleMetadata, ParserOptions } from './types.js'\n\n/**\n * @deprecated Use importAgent() instead. Single-file .agentconfig format is deprecated.\n */\nexport function parseAgentMarkdown(\n markdown: string,\n options: ParserOptions = {}\n): RuleBlock[] {\n console.warn('Warning: parseAgentMarkdown() is deprecated. Use importAgent() to import from .agent/ directory instead.')\n const processor = unified().use(remarkParse)\n const tree = processor.parse(markdown) as Root\n\n const rules: RuleBlock[] = []\n let currentMetadata: RuleMetadata | null = null\n let currentContent: RootContent[] = []\n let currentPosition: RuleBlock['position'] | undefined\n\n for (let i = 0; i < tree.children.length; i++) {\n const node = tree.children[i]\n\n // Check for HTML comment with @<id> directive\n if (node.type === 'html' && isRuleComment(node.value)) {\n // If we have accumulated content, save the previous rule\n if (currentMetadata && currentContent.length > 0) {\n rules.push({\n metadata: currentMetadata,\n content: nodesToMarkdown(currentContent),\n position: currentPosition\n })\n }\n\n // Parse the new metadata\n currentMetadata = parseRuleComment(node.value)\n currentContent = []\n currentPosition = node.position ? {\n start: { ...node.position.start },\n end: { ...node.position.end }\n } : undefined\n }\n // Accumulate content\n else if (currentMetadata) {\n currentContent.push(node)\n if (currentPosition && node.position) {\n currentPosition.end = { ...node.position.end }\n }\n }\n }\n\n // Don't forget the last rule\n if (currentMetadata && currentContent.length > 0) {\n rules.push({\n metadata: currentMetadata,\n content: nodesToMarkdown(currentContent),\n position: currentPosition\n })\n }\n\n return rules\n}\n\nfunction isRuleComment(html: string): boolean {\n // Check if it contains @<id> pattern (@ followed by alphanumeric and hyphens)\n return /<!--\\s*@[a-zA-Z0-9-]+(\\s|$)/.test(html)\n}\n\nfunction parseRuleComment(html: string): RuleMetadata {\n // Extract @<id> and any additional metadata\n const match = html.match(/<!--\\s*@([a-zA-Z0-9-]+)\\s*([\\s\\S]*?)\\s*-->/)\n if (!match) {\n throw new Error('Invalid rule comment format')\n }\n\n const id = match[1]\n const metaContent = match[2].trim()\n\n // Start with the ID from the @<id> pattern\n const metadata: RuleMetadata = { id }\n\n // If there's no additional content, return just the ID\n if (!metaContent) {\n return metadata\n }\n\n // Check if it looks like YAML (has newlines or starts with a YAML indicator)\n if (metaContent.includes('\\n') || metaContent.startsWith('-') || metaContent.includes(': ')) {\n // Try to parse as YAML\n try {\n const parsed = yaml.load(metaContent) as Record<string, unknown>\n if (typeof parsed === 'object' && parsed !== null) {\n // Merge with existing metadata, but preserve the ID from @<id>\n return { ...parsed, id } as RuleMetadata\n }\n } catch {\n // Fall through to key:value parsing\n }\n }\n\n // Parse as key:value pairs\n // First check if it's all on one line (inline format)\n if (!metaContent.includes('\\n')) {\n // Inline format: key:value pairs separated by spaces\n const pairs = metaContent.matchAll(/(\\w+):(\\S+)(?:\\s|$)/g);\n for (const [, key, value] of pairs) {\n // Skip 'id' since we already have it from @<id>\n if (key === 'scope' && value.includes(',')) {\n metadata[key] = value.split(',').map(s => s.trim())\n } else if (key === 'alwaysApply' || key === 'manual') {\n metadata[key] = value === 'true'\n } else if (key !== 'id') {\n metadata[key] = value\n }\n }\n } else {\n // Multi-line format: one key:value per line\n const lines = metaContent.split('\\n');\n for (const line of lines) {\n const colonIndex = line.indexOf(':');\n if (colonIndex > 0) {\n const key = line.substring(0, colonIndex).trim();\n const value = line.substring(colonIndex + 1).trim();\n \n // Skip 'id' since we already have it from @<id>\n if (key === 'scope' && value.includes(',')) {\n metadata[key] = value.split(',').map(s => s.trim())\n } else if (key === 'alwaysApply' || key === 'manual') {\n metadata[key] = value === 'true'\n } else if (key !== 'id' && value) {\n metadata[key] = value\n }\n }\n }\n }\n\n return metadata\n}\n\nfunction nodesToMarkdown(nodes: RootContent[]): string {\n const tree: Root = {\n type: 'root',\n children: nodes\n }\n\n return toMarkdown(tree, {\n bullet: '-',\n emphasis: '*',\n rule: '-'\n }).trim()\n}\n\n// Alternative parser for fence-encoded format\nexport function parseFenceEncodedMarkdown(\n markdown: string,\n options: ParserOptions = {}\n): RuleBlock[] {\n const processor = unified().use(remarkParse)\n const tree = processor.parse(markdown) as Root\n\n const rules: RuleBlock[] = []\n let currentMetadata: RuleMetadata | null = null\n let currentContent: RootContent[] = []\n let currentPosition: RuleBlock['position'] | undefined\n\n for (let i = 0; i < tree.children.length; i++) {\n const node = tree.children[i]\n\n // Check for code block with 'rule' language\n if (node.type === 'code' && node.lang === 'rule') {\n // Save previous rule if exists\n if (currentMetadata && currentContent.length > 0) {\n rules.push({\n metadata: currentMetadata,\n content: nodesToMarkdown(currentContent),\n position: currentPosition\n })\n }\n\n // Parse the rule metadata\n try {\n currentMetadata = yaml.load(node.value) as RuleMetadata\n if (!currentMetadata.id) {\n currentMetadata.id = `rule-${Date.now()}`\n }\n currentContent = []\n currentPosition = node.position ? {\n start: { ...node.position.start },\n end: { ...node.position.end }\n } : undefined\n } catch (e) {\n if (options.strict) {\n throw new Error(`Failed to parse rule metadata: ${e}`)\n }\n // Skip invalid rule blocks in non-strict mode\n currentMetadata = null\n }\n }\n // Accumulate content after rule block\n else if (currentMetadata) {\n currentContent.push(node)\n if (currentPosition && node.position) {\n currentPosition.end = { ...node.position.end }\n }\n }\n }\n\n // Don't forget the last rule\n if (currentMetadata && currentContent.length > 0) {\n rules.push({\n metadata: currentMetadata,\n content: nodesToMarkdown(currentContent),\n position: currentPosition\n })\n }\n\n return rules\n}","import { readFileSync, existsSync, readdirSync, statSync, Dirent } from 'fs'\nimport { join, basename } from 'path'\nimport matter from 'gray-matter'\nimport yaml from 'js-yaml'\nimport type { ImportResult, ImportResults, RuleBlock } from './types.js'\nimport { grayMatterOptions } from './yaml-parser.js'\n\n// Helper function to detect if a file/path indicates a private rule\nfunction isPrivateRule(filePath: string): boolean {\n const lowerPath = filePath.toLowerCase()\n return lowerPath.includes('.local.') || lowerPath.includes('/private/') || lowerPath.includes('\\\\private\\\\')\n}\n\nexport async function importAll(repoPath: string): Promise<ImportResults> {\n const results: ImportResult[] = []\n const errors: Array<{ file: string; error: string }> = []\n \n // Check for Agent directory (.agent/)\n const agentDir = join(repoPath, '.agent')\n if (existsSync(agentDir)) {\n try {\n results.push(importAgent(agentDir))\n } catch (e) {\n errors.push({ file: agentDir, error: String(e) })\n }\n }\n \n // Check for VS Code Copilot instructions\n const copilotPath = join(repoPath, '.github', 'copilot-instructions.md')\n if (existsSync(copilotPath)) {\n try {\n results.push(importCopilot(copilotPath))\n } catch (e) {\n errors.push({ file: copilotPath, error: String(e) })\n }\n }\n \n // Check for local VS Code Copilot instructions\n const copilotLocalPath = join(repoPath, '.github', 'copilot-instructions.local.md')\n if (existsSync(copilotLocalPath)) {\n try {\n results.push(importCopilot(copilotLocalPath))\n } catch (e) {\n errors.push({ file: copilotLocalPath, error: String(e) })\n }\n }\n \n // Check for Cursor directory (.cursor/)\n const cursorDir = join(repoPath, '.cursor')\n if (existsSync(cursorDir)) {\n try {\n results.push(importCursor(cursorDir))\n } catch (e) {\n errors.push({ file: cursorDir, error: String(e) })\n }\n }\n \n // Legacy single .cursorrules file\n const cursorRulesFile = join(repoPath, '.cursorrules')\n if (existsSync(cursorRulesFile)) {\n try {\n results.push(importCursorLegacy(cursorRulesFile))\n } catch (e) {\n errors.push({ file: cursorRulesFile, error: String(e) })\n }\n }\n \n // Check for Cline rules\n const clinerules = join(repoPath, '.clinerules')\n if (existsSync(clinerules)) {\n try {\n results.push(importCline(clinerules))\n } catch (e) {\n errors.push({ file: clinerules, error: String(e) })\n }\n }\n \n // Check for local Cline rules\n const clinerulesLocal = join(repoPath, '.clinerules.local')\n if (existsSync(clinerulesLocal)) {\n try {\n results.push(importCline(clinerulesLocal))\n } catch (e) {\n errors.push({ file: clinerulesLocal, error: String(e) })\n }\n }\n \n // Check for Windsurf rules\n const windsurfRules = join(repoPath, '.windsurfrules')\n if (existsSync(windsurfRules)) {\n try {\n results.push(importWindsurf(windsurfRules))\n } catch (e) {\n errors.push({ file: windsurfRules, error: String(e) })\n }\n }\n \n // Check for local Windsurf rules\n const windsurfRulesLocal = join(repoPath, '.windsurfrules.local')\n if (existsSync(windsurfRulesLocal)) {\n try {\n results.push(importWindsurf(windsurfRulesLocal))\n } catch (e) {\n errors.push({ file: windsurfRulesLocal, error: String(e) })\n }\n }\n \n // Check for Zed rules\n const zedRules = join(repoPath, '.rules')\n if (existsSync(zedRules)) {\n try {\n results.push(importZed(zedRules))\n } catch (e) {\n errors.push({ file: zedRules, error: String(e) })\n }\n }\n \n // Check for local Zed rules\n const zedRulesLocal = join(repoPath, '.rules.local')\n if (existsSync(zedRulesLocal)) {\n try {\n results.push(importZed(zedRulesLocal))\n } catch (e) {\n errors.push({ file: zedRulesLocal, error: String(e) })\n }\n }\n \n // Check for OpenAI Codex AGENTS.md\n const agentsMd = join(repoPath, 'AGENTS.md')\n if (existsSync(agentsMd)) {\n try {\n results.push(importCodex(agentsMd))\n } catch (e) {\n errors.push({ file: agentsMd, error: String(e) })\n }\n }\n \n // Check for local AGENTS.md\n const agentsLocalMd = join(repoPath, 'AGENTS.local.md')\n if (existsSync(agentsLocalMd)) {\n try {\n results.push(importCodex(agentsLocalMd))\n } catch (e) {\n errors.push({ file: agentsLocalMd, error: String(e) })\n }\n }\n \n // Check for CLAUDE.md (Claude Code)\n const claudeMd = join(repoPath, 'CLAUDE.md')\n if (existsSync(claudeMd)) {\n try {\n results.push(importClaudeCode(claudeMd))\n } catch (e) {\n errors.push({ file: claudeMd, error: String(e) })\n }\n }\n \n // Check for GEMINI.md (Gemini CLI)\n const geminiMd = join(repoPath, 'GEMINI.md')\n if (existsSync(geminiMd)) {\n try {\n results.push(importGemini(geminiMd))\n } catch (e) {\n errors.push({ file: geminiMd, error: String(e) })\n }\n }\n\n // Check for best_practices.md (Qodo)\n const bestPracticesMd = join(repoPath, 'best_practices.md')\n if (existsSync(bestPracticesMd)) {\n try {\n results.push(importQodo(bestPracticesMd))\n } catch (e) {\n errors.push({ file: bestPracticesMd, error: String(e) })\n }\n }\n\n // Check for local CLAUDE.md\n const claudeLocalMd = join(repoPath, 'CLAUDE.local.md')\n if (existsSync(claudeLocalMd)) {\n try {\n results.push(importClaudeCode(claudeLocalMd))\n } catch (e) {\n errors.push({ file: claudeLocalMd, error: String(e) })\n }\n }\n \n // Check for local GEMINI.md\n const geminiLocalMd = join(repoPath, 'GEMINI.local.md')\n if (existsSync(geminiLocalMd)) {\n try {\n results.push(importGemini(geminiLocalMd))\n } catch (e) {\n errors.push({ file: geminiLocalMd, error: String(e) })\n }\n }\n \n // Check for CONVENTIONS.md (Aider)\n const conventionsMd = join(repoPath, 'CONVENTIONS.md')\n if (existsSync(conventionsMd)) {\n try {\n results.push(importAider(conventionsMd))\n } catch (e) {\n errors.push({ file: conventionsMd, error: String(e) })\n }\n }\n \n // Check for local CONVENTIONS.md\n const conventionsLocalMd = join(repoPath, 'CONVENTIONS.local.md')\n if (existsSync(conventionsLocalMd)) {\n try {\n results.push(importAider(conventionsLocalMd))\n } catch (e) {\n errors.push({ file: conventionsLocalMd, error: String(e) })\n }\n }\n \n // Check for Amazon Q rules\n const amazonqRulesDir = join(repoPath, '.amazonq', 'rules')\n if (existsSync(amazonqRulesDir)) {\n try {\n results.push(importAmazonQ(amazonqRulesDir))\n } catch (e) {\n errors.push({ file: amazonqRulesDir, error: String(e) })\n }\n }\n \n return { results, errors }\n}\n\nexport function importCopilot(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const isPrivate = isPrivateRule(filePath)\n \n const metadata: any = {\n id: 'copilot-instructions',\n alwaysApply: true,\n description: 'GitHub Copilot custom instructions'\n }\n \n if (isPrivate) {\n metadata.private = true\n }\n \n const rules: RuleBlock[] = [{\n metadata,\n content: content.trim()\n }]\n \n return {\n format: 'copilot',\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importAgent(agentDir: string): ImportResult {\n const rules: RuleBlock[] = []\n \n // Recursively find all .md files in the agent directory\n function findMarkdownFiles(dir: string, relativePath = ''): void {\n const entries = readdirSync(dir, { withFileTypes: true })\n \n // Ensure deterministic ordering: process directories before files, then sort alphabetically\n entries.sort((a: Dirent, b: Dirent) => {\n if (a.isDirectory() && !b.isDirectory()) return -1;\n if (!a.isDirectory() && b.isDirectory()) return 1;\n return a.name.localeCompare(b.name);\n })\n \n for (const entry of entries) {\n const fullPath = join(dir, entry.name)\n const relPath = relativePath ? join(relativePath, entry.name) : entry.name\n \n if (entry.isDirectory()) {\n // Recursively search subdirectories\n findMarkdownFiles(fullPath, relPath)\n } else if (entry.isFile() && entry.name.endsWith('.md')) {\n const content = readFileSync(fullPath, 'utf-8')\n const { data, content: body } = matter(content, grayMatterOptions)\n \n // Remove any leading numeric ordering prefixes (e.g., \"001-\" or \"12-\") from each path segment\n let segments = relPath\n .replace(/\\.md$/, '')\n .replace(/\\\\/g, '/')\n .split('/')\n .map((s: string) => s.replace(/^\\d{2,}-/, '').replace(/\\.local$/, ''))\n if (segments[0] === 'private') segments = segments.slice(1)\n const defaultId = segments.join('/')\n \n // Check if this is a private rule (either by path or frontmatter)\n const isPrivateFile = isPrivateRule(fullPath)\n \n const metadata: any = {\n id: data.id || defaultId,\n ...data\n }\n \n // Set default alwaysApply to false if not specified\n if (metadata.alwaysApply === undefined) {\n metadata.alwaysApply = false\n }\n \n // Only set private if it's true (from file pattern or frontmatter)\n if (data.private === true || (data.private === undefined && isPrivateFile)) {\n metadata.private = true\n }\n \n rules.push({\n metadata,\n content: body.trim()\n })\n }\n }\n }\n \n findMarkdownFiles(agentDir)\n \n return {\n format: 'agent',\n filePath: agentDir,\n rules\n }\n}\n\nexport function importCursor(cursorDir: string): ImportResult {\n const rules: RuleBlock[] = []\n \n // Recursively find all .mdc and .md files in the .cursor directory\n function findCursorFiles(dir: string, relativePath = ''): void {\n const entries = readdirSync(dir, { withFileTypes: true })\n \n // Ensure deterministic ordering: process directories before files, then sort alphabetically\n entries.sort((a: Dirent, b: Dirent) => {\n if (a.isDirectory() && !b.isDirectory()) return -1;\n if (!a.isDirectory() && b.isDirectory()) return 1;\n return a.name.localeCompare(b.name);\n })\n \n for (const entry of entries) {\n const fullPath = join(dir, entry.name)\n const relPath = relativePath ? join(relativePath, entry.name) : entry.name\n \n if (entry.isDirectory()) {\n // Recursively search subdirectories\n findCursorFiles(fullPath, relPath)\n } else if (entry.isFile() && (entry.name.endsWith('.mdc') || entry.name.endsWith('.md'))) {\n const content = readFileSync(fullPath, 'utf-8')\n const { data, content: body } = matter(content, grayMatterOptions)\n \n // Remove any leading numeric ordering prefixes (e.g., \"001-\" or \"12-\") from each path segment\n let segments = relPath\n .replace(/\\.(mdc|md)$/, '')\n .replace(/\\\\/g, '/')\n .split('/')\n .map((s: string) => s.replace(/^\\d{2,}-/, '').replace(/\\.local$/, ''))\n \n // Special handling for backward compatibility\n if (segments[0] === 'private') segments = segments.slice(1)\n // If the file is directly in the 'rules' directory, don't include 'rules' in the ID\n if (segments[0] === 'rules' && segments.length === 2) segments = segments.slice(1)\n \n const defaultId = segments.join('/')\n \n // Check if this is a private rule\n const isPrivateFile = isPrivateRule(fullPath)\n \n const metadata: any = {\n id: data.id || defaultId,\n ...data\n }\n \n // Set default alwaysApply to false if not specified\n if (metadata.alwaysApply === undefined) {\n metadata.alwaysApply = false\n }\n \n // Only set private if it's true (from file pattern or frontmatter)\n if (data.private === true || (data.private === undefined && isPrivateFile)) {\n metadata.private = true\n }\n \n rules.push({\n metadata,\n content: body.trim()\n })\n }\n }\n }\n \n findCursorFiles(cursorDir)\n \n return {\n format: 'cursor',\n filePath: cursorDir,\n rules\n }\n}\n\nexport function importCursorLegacy(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const rules: RuleBlock[] = [{\n metadata: {\n id: 'cursor-rules-legacy',\n alwaysApply: true,\n description: 'Legacy Cursor rules'\n },\n content: content.trim()\n }]\n \n return {\n format: 'cursor',\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importCline(rulesPath: string): ImportResult {\n const rules: RuleBlock[] = []\n \n // Check if it's a directory\n if (existsSync(rulesPath) && statSync(rulesPath).isDirectory()) {\n // Recursively find all .md files\n function findMdFiles(dir: string, relativePath = ''): void {\n const entries = readdirSync(dir, { withFileTypes: true })\n \n // Ensure deterministic ordering: process directories before files, then sort alphabetically\n entries.sort((a: Dirent, b: Dirent) => {\n if (a.isDirectory() && !b.isDirectory()) return -1;\n if (!a.isDirectory() && b.isDirectory()) return 1;\n return a.name.localeCompare(b.name);\n })\n \n for (const entry of entries) {\n const fullPath = join(dir, entry.name)\n const relPath = relativePath ? join(relativePath, entry.name) : entry.name\n \n if (entry.isDirectory()) {\n findMdFiles(fullPath, relPath)\n } else if (entry.isFile() && entry.name.endsWith('.md')) {\n const content = readFileSync(fullPath, 'utf-8')\n const isPrivateFile = isPrivateRule(fullPath)\n // Remove any leading numeric ordering prefixes (e.g., \"001-\" or \"12-\") from each path segment\n let segments = relPath\n .replace(/\\.md$/, '')\n .replace(/\\\\/g, '/')\n .split('/')\n .map((s: string) => s.replace(/^\\d{2,}-/, '').replace(/\\.local$/, ''))\n if (segments[0] === 'private') segments = segments.slice(1)\n const defaultId = segments.join('/')\n \n const metadata: any = {\n id: defaultId,\n alwaysApply: true,\n description: `Cline rules from ${relPath}`\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n rules.push({\n metadata,\n content: content.trim()\n })\n }\n }\n }\n \n findMdFiles(rulesPath)\n } else {\n // Single .clinerules file\n const content = readFileSync(rulesPath, 'utf-8')\n const isPrivateFile = isPrivateRule(rulesPath)\n \n const metadata: any = {\n id: 'cline-rules',\n alwaysApply: true,\n description: 'Cline project rules'\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n rules.push({\n metadata,\n content: content.trim()\n })\n }\n \n return {\n format: 'cline',\n filePath: rulesPath,\n rules\n }\n}\n\nexport function importWindsurf(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const isPrivateFile = isPrivateRule(filePath)\n \n const metadata: any = {\n id: 'windsurf-rules',\n alwaysApply: true,\n description: 'Windsurf AI rules'\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n const rules: RuleBlock[] = [{\n metadata,\n content: content.trim()\n }]\n \n return {\n format: 'windsurf',\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importZed(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const isPrivateFile = isPrivateRule(filePath)\n \n const metadata: any = {\n id: 'zed-rules',\n alwaysApply: true,\n description: 'Zed editor rules'\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n const rules: RuleBlock[] = [{\n metadata,\n content: content.trim()\n }]\n \n return {\n format: 'zed',\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importCodex(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const format = basename(filePath) === 'AGENTS.md' || basename(filePath) === 'AGENTS.local.md' ? 'codex' : 'unknown'\n const isPrivateFile = isPrivateRule(filePath)\n \n const metadata: any = {\n id: format === 'codex' ? 'codex-agents' : 'claude-rules',\n alwaysApply: true,\n description: format === 'codex' ? 'OpenAI Codex agent instructions' : 'Claude AI instructions'\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n const rules: RuleBlock[] = [{\n metadata,\n content: content.trim()\n }]\n \n return {\n format,\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importAider(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const isPrivateFile = isPrivateRule(filePath)\n \n const metadata: any = {\n id: 'aider-conventions',\n alwaysApply: true,\n description: 'Aider CLI conventions'\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n const rules: RuleBlock[] = [{\n metadata,\n content: content.trim()\n }]\n \n return {\n format: 'aider',\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importClaudeCode(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const isPrivateFile = isPrivateRule(filePath)\n \n const metadata: any = {\n id: 'claude-code-instructions',\n alwaysApply: true,\n description: 'Claude Code context and instructions'\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n const rules: RuleBlock[] = [{\n metadata,\n content: content.trim()\n }]\n \n return {\n format: 'claude',\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importGemini(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const isPrivateFile = isPrivateRule(filePath)\n \n const metadata: any = {\n id: 'gemini-instructions',\n alwaysApply: true,\n description: 'Gemini CLI context and instructions'\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n const rules: RuleBlock[] = [{\n metadata,\n content: content.trim()\n }]\n \n return {\n format: 'gemini',\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importQodo(filePath: string): ImportResult {\n const content = readFileSync(filePath, 'utf-8')\n const rules: RuleBlock[] = [{\n metadata: {\n id: 'qodo-best-practices',\n alwaysApply: true,\n description: 'Qodo best practices and coding standards',\n scope: '**/*',\n priority: 'high'\n },\n content: content.trim()\n }]\n \n return {\n format: 'qodo',\n filePath,\n rules,\n raw: content\n }\n}\n\nexport function importAmazonQ(rulesDir: string): ImportResult {\n const rules: RuleBlock[] = []\n \n // Recursively find all .md files in the Amazon Q rules directory\n function findMdFiles(dir: string, relativePath = ''): void {\n const entries = readdirSync(dir, { withFileTypes: true })\n \n // Ensure deterministic ordering: process directories before files, then sort alphabetically\n entries.sort((a: Dirent, b: Dirent) => {\n if (a.isDirectory() && !b.isDirectory()) return -1;\n if (!a.isDirectory() && b.isDirectory()) return 1;\n return a.name.localeCompare(b.name);\n })\n \n for (const entry of entries) {\n const fullPath = join(dir, entry.name)\n const relPath = relativePath ? join(relativePath, entry.name) : entry.name\n \n if (entry.isDirectory()) {\n // Recursively search subdirectories\n findMdFiles(fullPath, relPath)\n } else if (entry.isFile() && entry.name.endsWith('.md')) {\n const content = readFileSync(fullPath, 'utf-8')\n const isPrivateFile = isPrivateRule(fullPath)\n \n // Remove any leading numeric ordering prefixes (e.g., \"001-\" or \"12-\") from each path segment\n let segments = relPath\n .replace(/\\.md$/, '')\n .replace(/\\\\/g, '/')\n .split('/')\n .map((s: string) => s.replace(/^\\d{2,}-/, '').replace(/\\.local$/, ''))\n if (segments[0] === 'private') segments = segments.slice(1)\n const defaultId = segments.join('/')\n \n const metadata: any = {\n id: `amazonq-${defaultId}`,\n alwaysApply: true,\n description: `Amazon Q rules from ${relPath}`\n }\n \n if (isPrivateFile) {\n metadata.private = true\n }\n \n rules.push({\n metadata,\n content: content.trim()\n })\n }\n }\n }\n \n findMdFiles(rulesDir)\n \n return {\n format: 'amazonq',\n filePath: rulesDir,\n rules\n }\n}\n","import yaml from 'js-yaml'\nimport type { GrayMatterOption } from 'gray-matter'\n\n/**\n * Custom YAML parser that handles glob patterns starting with *\n * by pre-processing the YAML to quote unquoted strings that start with *\n */\nexport function createSafeYamlParser() {\n return {\n parse: (str: string): object => {\n // Pre-process the YAML string to quote glob patterns\n // This regex looks for unquoted strings starting with * in YAML values\n const processedStr = str.replace(\n /^(\\s*\\w+:\\s*)(\\*[^\\n\\r\"']*?)(\\s*(?:\\r?\\n|$))/gm,\n (match, prefix, value, suffix) => {\n // Check if the value is already quoted\n if (value.startsWith('\"') || value.startsWith(\"'\")) {\n return match\n }\n // Quote the value to prevent it from being interpreted as a YAML alias\n return `${prefix}\"${value}\"${suffix}`\n }\n )\n \n // Also handle array items that start with *\n const fullyProcessedStr = processedStr.replace(\n /^(\\s*-\\s+)(\\*[^\\n\\r\"']*?)(\\s*(?:\\r?\\n|$))/gm,\n (match, prefix, value, suffix) => {\n // Check if the value is already quoted\n if (value.startsWith('\"') || value.startsWith(\"'\")) {\n return match\n }\n // Quote the value\n return `${prefix}\"${value}\"${suffix}`\n }\n )\n \n try {\n return yaml.load(fullyProcessedStr) as object\n } catch (error) {\n // If preprocessing fails, try the original string\n return yaml.load(str) as object\n }\n },\n stringify: (data: object) => yaml.dump(data)\n }\n}\n\n/**\n * Gray-matter options with custom YAML parser for handling glob patterns\n */\nexport const grayMatterOptions: GrayMatterOption<string, object> = {\n engines: {\n yaml: createSafeYamlParser()\n }\n}","import { writeFileSync, mkdirSync, existsSync } from 'fs'\nimport { join, dirname, relative } from 'path'\nimport yaml from 'js-yaml'\nimport matter from 'gray-matter'\nimport type { RuleBlock, ExportOptions } from './types.js'\nimport { grayMatterOptions } from './yaml-parser.js'\n\n/**\n * Generate conditional rules section for single-file formats\n */\nfunction generateConditionalRulesSection(rules: RuleBlock[], repoPath: string): string {\n const sections: string[] = []\n \n // Separate rules by type\n const alwaysApplyRules = rules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalRules = rules.filter(r => r.metadata.alwaysApply === false)\n \n if (conditionalRules.length === 0) {\n return ''\n }\n \n // Group rules by folder (e.g., workflows, components, etc.)\n const rulesByFolder: Record<string, RuleBlock[]> = {}\n const rulesWithScope: RuleBlock[] = []\n const rulesWithDescription: RuleBlock[] = []\n \n conditionalRules.forEach(rule => {\n // Extract folder from ID if it contains a slash\n if (rule.metadata.id && rule.metadata.id.includes('/')) {\n const folder = rule.metadata.id.split('/')[0]\n if (!rulesByFolder[folder]) {\n rulesByFolder[folder] = []\n }\n rulesByFolder[folder].push(rule)\n }\n \n // Categorize by scope/description\n if (rule.metadata.scope) {\n rulesWithScope.push(rule)\n } else if (rule.metadata.description && !rule.metadata.scope && !rule.metadata.id?.includes('/')) {\n // Only treat as description-based if it's not in a folder\n rulesWithDescription.push(rule)\n }\n })\n \n sections.push('## Context-Specific Rules')\n sections.push('')\n \n // Add rules with scope patterns\n if (rulesWithScope.length > 0) {\n rulesWithScope.forEach(rule => {\n const scopes = Array.isArray(rule.metadata.scope) ? rule.metadata.scope : [rule.metadata.scope]\n scopes.forEach(scope => {\n const rulePath = `.agent/${rule.metadata.id}.md`\n const description = rule.metadata.description ? ` - ${rule.metadata.description}` : ''\n sections.push(`When working with files matching \\`${scope}\\`, also apply:`)\n sections.push(`→ [${rule.metadata.id}](${rulePath})${description}`)\n sections.push('')\n })\n })\n }\n \n // Add rules with description keywords\n if (rulesWithDescription.length > 0) {\n rulesWithDescription.forEach(rule => {\n const rulePath = `.agent/${rule.metadata.id}.md`\n sections.push(`When working with ${rule.metadata.description}, also apply:`)\n sections.push(`→ [${rule.metadata.id}](${rulePath})`)\n sections.push('')\n })\n }\n \n // Add folder-based sections (e.g., Workflows)\n Object.entries(rulesByFolder).forEach(([folder, folderRules]) => {\n // Skip if already handled above\n const unhandledRules = folderRules.filter(r => \n !rulesWithScope.includes(r) && !rulesWithDescription.includes(r)\n )\n \n if (unhandledRules.length > 0) {\n // Capitalize folder name for section title\n const sectionTitle = folder.charAt(0).toUpperCase() + folder.slice(1)\n sections.push(`## ${sectionTitle}`)\n sections.push('')\n \n unhandledRules.forEach(rule => {\n const rulePath = `.agent/${rule.metadata.id}.md`\n const description = rule.metadata.description ? ` - ${rule.metadata.description}` : ''\n sections.push(`→ [${rule.metadata.id}](${rulePath})${description}`)\n })\n sections.push('')\n }\n })\n \n return sections.join('\\n')\n}\n\n/**\n * @deprecated Use exportToAgent() instead. Single-file .agentconfig format is deprecated.\n */\nexport function toAgentMarkdown(rules: RuleBlock[]): string {\n console.warn('Warning: toAgentMarkdown() is deprecated. Use exportToAgent() to export to .agent/ directory instead.')\n \n const sections: string[] = []\n\n for (const rule of rules) {\n const { metadata, content } = rule\n\n // Extract id and other metadata\n const { id, ...otherMetadata } = metadata\n\n // Build the comment starting with @<id>\n let metaComment = `<!-- @${id}`\n\n // If there are other metadata properties, add them\n if (Object.keys(otherMetadata).length > 0) {\n // Format remaining metadata as YAML\n const metaYaml = yaml.dump(otherMetadata, {\n flowLevel: 1,\n lineWidth: -1\n }).trim()\n\n metaComment += `\\n${metaYaml}`\n }\n\n metaComment += ' -->'\n\n // Add section\n sections.push(`${metaComment}\\n\\n${content}`)\n }\n\n return sections.join('\\n\\n')\n}\n\nexport function exportToCopilot(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n // Separate always-apply rules from conditional rules\n const alwaysApplyRules = filteredRules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalSection = generateConditionalRulesSection(filteredRules, dirname(outputPath))\n \n // Combine always-apply rules into main content\n const mainContent = alwaysApplyRules\n .map(rule => rule.content)\n .join('\\n\\n---\\n\\n')\n \n // Add conditional rules section if there are any\n const fullContent = conditionalSection \n ? `${mainContent}\\n\\n---\\n\\n${conditionalSection}`\n : mainContent\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, fullContent, 'utf-8')\n}\n\nexport function exportToAgent(rules: RuleBlock[], outputDir: string, options?: ExportOptions): void {\n const agentDir = join(outputDir, '.agent')\n mkdirSync(agentDir, { recursive: true })\n\n let topIndex = 1;\n rules.forEach(rule => {\n // Support nested folders based on rule ID (e.g., \"api/auth\" -> \"api/auth.md\")\n let filename: string\n let filePath: string\n\n if (rule.metadata.id && rule.metadata.id.includes('/')) {\n // Create nested structure based on ID\n const parts = rule.metadata.id.split('/')\n const fileName = parts.pop() + '.md'\n const subDir = join(agentDir, ...parts)\n mkdirSync(subDir, { recursive: true })\n filePath = join(subDir, fileName)\n } else {\n if (rule.metadata.private) {\n const prefix = String(topIndex).padStart(3, '0') + '-'\n topIndex++\n filename = `${prefix}${rule.metadata.id || 'rule'}.md`\n const privDir = join(agentDir, 'private')\n mkdirSync(privDir, { recursive: true })\n filePath = join(privDir, filename)\n } else {\n filename = `${rule.metadata.id || 'rule'}.md`\n filePath = join(agentDir, filename)\n }\n }\n\n // Prepare front matter data - filter out undefined and null values\n const frontMatterBase: Record<string, unknown> = {}\n\n if (rule.metadata.description !== undefined && rule.metadata.description !== null) frontMatterBase.description = rule.metadata.description\n if (rule.metadata.alwaysApply !== undefined) frontMatterBase.alwaysApply = rule.metadata.alwaysApply\n if (rule.metadata.globs !== undefined && rule.metadata.globs !== null) frontMatterBase.globs = rule.metadata.globs\n if (rule.metadata.manual !== undefined && rule.metadata.manual !== null) frontMatterBase.manual = rule.metadata.manual\n if (rule.metadata.scope !== undefined && rule.metadata.scope !== null) frontMatterBase.scope = rule.metadata.scope\n if (rule.metadata.priority !== undefined && rule.metadata.priority !== null) frontMatterBase.priority = rule.metadata.priority\n if (rule.metadata.triggers !== undefined && rule.metadata.triggers !== null) frontMatterBase.triggers = rule.metadata.triggers\n\n // Add other metadata fields but exclude 'private' if it's false or null\n for (const [key, value] of Object.entries(rule.metadata)) {\n if (!['id', 'description', 'alwaysApply', 'globs', 'manual', 'scope', 'priority', 'triggers'].includes(key) && value !== undefined && value !== null) {\n // Don't include private: false in frontmatter\n if (key === 'private' && value === false) continue\n frontMatterBase[key] = value\n }\n }\n\n const frontMatter = frontMatterBase\n\n // Create Markdown content with frontmatter\n const mdContent = matter.stringify(rule.content, frontMatter, grayMatterOptions)\n writeFileSync(filePath, mdContent, 'utf-8')\n })\n}\n\nexport function exportToCursor(rules: RuleBlock[], outputDir: string, options?: ExportOptions): void {\n const rulesDir = join(outputDir, '.cursor', 'rules')\n mkdirSync(rulesDir, { recursive: true })\n\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n for (const rule of filteredRules) {\n // Support nested folders based on rule ID\n let filePath: string\n \n if (rule.metadata.id && rule.metadata.id.includes('/')) {\n // Create nested structure based on ID\n const parts = rule.metadata.id.split('/')\n const fileName = parts.pop() + '.mdc'\n const subDir = join(rulesDir, ...parts)\n mkdirSync(subDir, { recursive: true })\n filePath = join(subDir, fileName)\n } else {\n const filename = `${rule.metadata.id || 'rule'}.mdc`\n filePath = join(rulesDir, filename)\n }\n\n // Prepare front matter data - filter out undefined and null values\n const frontMatterBase: Record<string, unknown> = {}\n\n if (rule.metadata.description !== undefined && rule.metadata.description !== null) frontMatterBase.description = rule.metadata.description\n if (rule.metadata.alwaysApply !== undefined) frontMatterBase.alwaysApply = rule.metadata.alwaysApply\n if (rule.metadata.globs !== undefined && rule.metadata.globs !== null) frontMatterBase.globs = rule.metadata.globs\n if (rule.metadata.manual !== undefined && rule.metadata.manual !== null) frontMatterBase.manual = rule.metadata.manual\n if (rule.metadata.scope !== undefined && rule.metadata.scope !== null) frontMatterBase.scope = rule.metadata.scope\n if (rule.metadata.priority !== undefined && rule.metadata.priority !== null) frontMatterBase.priority = rule.metadata.priority\n if (rule.metadata.triggers !== undefined && rule.metadata.triggers !== null) frontMatterBase.triggers = rule.metadata.triggers\n\n // Add other metadata fields but exclude 'private' if it's false or null\n for (const [key, value] of Object.entries(rule.metadata)) {\n if (!['id', 'description', 'alwaysApply', 'globs', 'manual', 'scope', 'priority', 'triggers'].includes(key) && value !== undefined && value !== null) {\n // Don't include private: false in frontmatter\n if (key === 'private' && value === false) continue\n frontMatterBase[key] = value\n }\n }\n\n const frontMatter = frontMatterBase\n\n // Create MDC content\n const mdcContent = matter.stringify(rule.content, frontMatter, grayMatterOptions)\n writeFileSync(filePath, mdcContent, 'utf-8')\n }\n}\n\nexport function exportToCline(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n if (outputPath.endsWith('.clinerules')) {\n // Single file mode\n const alwaysApplyRules = filteredRules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalSection = generateConditionalRulesSection(filteredRules, dirname(outputPath))\n \n const mainContent = alwaysApplyRules\n .map(rule => {\n const header = rule.metadata.description ? `## ${rule.metadata.description}\\n\\n` : ''\n return header + rule.content\n })\n .join('\\n\\n')\n \n const fullContent = conditionalSection \n ? `${mainContent}\\n\\n${conditionalSection}`\n : mainContent\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, fullContent, 'utf-8')\n } else {\n // Directory mode\n const rulesDir = join(outputPath, '.clinerules')\n mkdirSync(rulesDir, { recursive: true })\n\n filteredRules.forEach((rule, index) => {\n const filename = `${String(index + 1).padStart(2, '0')}-${rule.metadata.id || 'rule'}.md`\n const filePath = join(rulesDir, filename)\n writeFileSync(filePath, rule.content, 'utf-8')\n })\n }\n}\n\nexport function exportToWindsurf(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n const alwaysApplyRules = filteredRules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalSection = generateConditionalRulesSection(filteredRules, dirname(outputPath))\n \n const mainContent = alwaysApplyRules\n .map(rule => rule.content)\n .join('\\n\\n')\n \n const fullContent = conditionalSection \n ? `${mainContent}\\n\\n${conditionalSection}`\n : mainContent\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, fullContent, 'utf-8')\n}\n\nexport function exportToZed(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n const alwaysApplyRules = filteredRules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalSection = generateConditionalRulesSection(filteredRules, dirname(outputPath))\n \n const mainContent = alwaysApplyRules\n .map(rule => rule.content)\n .join('\\n\\n')\n \n const fullContent = conditionalSection \n ? `${mainContent}\\n\\n${conditionalSection}`\n : mainContent\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, fullContent, 'utf-8')\n}\n\nexport function exportToCodex(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n const alwaysApplyRules = filteredRules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalSection = generateConditionalRulesSection(filteredRules, dirname(outputPath))\n \n const mainContent = alwaysApplyRules\n .map(rule => {\n const header = rule.metadata.description ? `# ${rule.metadata.description}\\n\\n` : ''\n return header + rule.content\n })\n .join('\\n\\n')\n \n const fullContent = conditionalSection \n ? `${mainContent}\\n\\n${conditionalSection}`\n : mainContent\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, fullContent, 'utf-8')\n}\n\nexport function exportToAider(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n const alwaysApplyRules = filteredRules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalSection = generateConditionalRulesSection(filteredRules, dirname(outputPath))\n \n const mainContent = alwaysApplyRules\n .map(rule => rule.content)\n .join('\\n\\n')\n \n const fullContent = conditionalSection \n ? `${mainContent}\\n\\n${conditionalSection}`\n : mainContent\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, fullContent, 'utf-8')\n}\n\nexport function exportToClaudeCode(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n const alwaysApplyRules = filteredRules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalSection = generateConditionalRulesSection(filteredRules, dirname(outputPath))\n \n const mainContent = alwaysApplyRules\n .map(rule => {\n const header = rule.metadata.description ? `# ${rule.metadata.description}\\n\\n` : ''\n return header + rule.content\n })\n .join('\\n\\n')\n \n const fullContent = conditionalSection \n ? `${mainContent}\\n\\n${conditionalSection}`\n : mainContent\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, fullContent, 'utf-8')\n}\n\nexport function exportToGemini(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n const content = filteredRules\n .map(rule => {\n const header = rule.metadata.description ? `# ${rule.metadata.description}\\n\\n` : ''\n return header + rule.content\n })\n .join('\\n\\n')\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, content, 'utf-8')\n}\n\nexport function exportToQodo(rules: RuleBlock[], outputPath: string, options?: ExportOptions): void {\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n const alwaysApplyRules = filteredRules.filter(r => r.metadata.alwaysApply !== false)\n const conditionalSection = generateConditionalRulesSection(filteredRules, dirname(outputPath))\n \n const mainContent = alwaysApplyRules\n .map(rule => {\n const header = rule.metadata.description ? `# ${rule.metadata.description}\\n\\n` : ''\n return header + rule.content\n })\n .join('\\n\\n---\\n\\n')\n \n const fullContent = conditionalSection \n ? `${mainContent}\\n\\n---\\n\\n${conditionalSection}`\n : mainContent\n\n ensureDirectoryExists(outputPath)\n writeFileSync(outputPath, fullContent, 'utf-8')\n}\n\nexport function exportToAmazonQ(rules: RuleBlock[], outputDir: string, options?: ExportOptions): void {\n const rulesDir = join(outputDir, '.amazonq', 'rules')\n mkdirSync(rulesDir, { recursive: true })\n\n // Filter out private rules unless includePrivate is true\n const filteredRules = rules.filter(rule => !rule.metadata.private || options?.includePrivate)\n \n for (const rule of filteredRules) {\n // Support nested folders based on rule ID\n let filePath: string\n \n if (rule.metadata.id && rule.met