rxcc
Version:
A tool to pack repository contents to single file for AI consumption
1,863 lines (1,669 loc) • 1.12 MB
text/xml
This file is a merged representation of the entire codebase, combined into a single document by Repomix.
<file_summary>
This section contains a summary of this file.
<purpose>
This file contains a packed representation of the entire repository's contents.
It is designed to be easily consumable by AI systems for analysis, code review,
or other automated processes.
</purpose>
<file_format>
The content is organized as follows:
1. This summary section
2. Repository information
3. Directory structure
4. Repository files, each consisting of:
- File path as an attribute
- Full contents of the file
</file_format>
<usage_guidelines>
- This file should be treated as read-only. Any changes should be made to the
original repository files, not this packed version.
- When processing this file, use the file path to distinguish
between different files in the repository.
- Be aware that this file may contain sensitive information. Handle it with
the same level of security as you would the original repository.
- Pay special attention to the Repository Description. These contain important context and guidelines specific to this project.
- Pay special attention to the Repository Instruction. These contain important context and guidelines specific to this project.
</usage_guidelines>
<notes>
- Some files may have been excluded based on .gitignore rules and Repomix's configuration
- Binary files are not included in this packed representation. Please refer to the Repository Structure section for a complete list of file paths, including binary files
- Files matching patterns in .gitignore are excluded
- Files matching default ignore patterns are excluded
- Files are sorted by Git change count (files with more changes are at the bottom)
</notes>
<additional_info>
<user_provided_header>
This repository contains the source code for the Repomix tool.
Repomix is designed to pack repository contents into a single file,
making it easier for AI systems to analyze and process the codebase.
Key Features:
- Configurable ignore patterns
- Custom header text support
- Efficient file processing and packing
Please refer to the README.md file for more detailed information on usage and configuration.
</user_provided_header>
</additional_info>
</file_summary>
<directory_structure>
.devcontainer/
devcontainer.json
.github/
workflows/
ci.yml
codeql.yml
docker.yml
homebrew.yml
CODEOWNERS
copilot-instructions.md
dependabot.yml
FUNDING.yml
pull_request_template.md
renovate.json5
bin/
repomix.cjs
src/
cli/
actions/
defaultAction.ts
initAction.ts
mcpAction.ts
migrationAction.ts
remoteAction.ts
versionAction.ts
cliPrint.ts
cliRun.ts
cliSpinner.ts
types.ts
config/
configLoad.ts
configSchema.ts
defaultIgnore.ts
globalDirectory.ts
core/
file/
workers/
fileCollectWorker.ts
fileProcessWorker.ts
fileCollect.ts
fileManipulate.ts
filePathSort.ts
fileProcess.ts
fileSearch.ts
fileTreeGenerate.ts
fileTypes.ts
gitCommand.ts
packageJsonParse.ts
permissionCheck.ts
metrics/
workers/
fileMetricsWorker.ts
outputMetricsWorker.ts
types.ts
calculateAllFileMetrics.ts
calculateMetrics.ts
calculateOutputMetrics.ts
output/
outputStyles/
markdownStyle.ts
plainStyle.ts
xmlStyle.ts
outputGenerate.ts
outputGeneratorTypes.ts
outputSort.ts
outputStyleDecorate.ts
packager/
copyToClipboardIfEnabled.ts
writeOutputToDisk.ts
security/
workers/
securityCheckWorker.ts
filterOutUntrustedFiles.ts
securityCheck.ts
validateFileSafety.ts
tokenCount/
tokenCount.ts
treeSitter/
parseStrategies/
CssParseStrategy.ts
DefaultParseStrategy.ts
GoParseStrategy.ts
ParseStrategy.ts
PythonParseStrategy.ts
TypeScriptParseStrategy.ts
VueParseStrategy.ts
queries/
queryC.ts
queryCpp.ts
queryCSharp.ts
queryCss.ts
queryGo.ts
queryJava.ts
queryJavascript.ts
queryPhp.ts
queryPython.ts
queryRuby.ts
queryRust.ts
querySolidity.ts
querySwift.ts
queryTypescript.ts
queryVue.ts
README.md
ext2Lang.ts
lang2Query.ts
languageParser.ts
loadLanguage.ts
parseFile.ts
packager.ts
mcp/
prompts/
packRemoteRepositoryPrompts.ts
tools/
fileSystemReadDirectoryTool.ts
fileSystemReadFileTool.ts
mcpToolRuntime.ts
packCodebaseTool.ts
packRemoteRepositoryTool.ts
readRepomixOutputTool.ts
mcpServer.ts
shared/
constants.ts
errorHandle.ts
logger.ts
processConcurrency.ts
types.ts
index.ts
tests/
cli/
actions/
defaultAction.test.ts
initAction.test.ts
mcpAction.test.ts
migrationAction.test.ts
remoteAction.test.ts
versionAction.test.ts
cliPrint.test.ts
cliRun.test.ts
config/
configLoad.test.ts
configSchema.test.ts
globalDirectory.test.ts
core/
file/
fileCollect.test.ts
fileManipulate.test.ts
filePathSort.test.ts
fileProcess.test.ts
fileSearch.test.ts
gitCommand.test.ts
packageJsonParse.test.ts
permissionCheck.test.ts
metrics/
calculateAllFileMetrics.test.ts
calculateMetrics.test.ts
calculateOutputMetrics.test.ts
output/
outputStyles/
markdownStyle.test.ts
plainStyle.test.ts
xmlStyle.test.ts
outputGenerate.test.ts
outputSort.test.ts
outputStyleDecorate.test.ts
packager/
copyToClipboardIfEnabled.test.ts
writeOutputToDisk.test.ts
security/
workers/
securityCheckWorker.test.ts
filterOutUntrustedFiles.test.ts
securityCheck.test.ts
validateFileSafety.test.ts
tokenCount/
tokenCount.test.ts
treeSitter/
LanguageParser.test.ts
loadLanguage.test.ts
parseFile.c.test.ts
parseFile.comments.test.ts
parseFile.cpp.test.ts
parseFile.csharp.test.ts
parseFile.css.test.ts
parseFile.go.test.ts
parseFile.java.test.ts
parseFile.javascript.test.ts
parseFile.php.test.ts
parseFile.python.test.ts
parseFile.ruby.test.ts
parseFile.rust.test.ts
parseFile.solidity.test.ts
parseFile.swift.test.ts
parseFile.test.ts
parseFile.typescript.test.ts
parseFile.vue.test.ts
packager.test.ts
integration-tests/
packager.test.ts
mcp/
tools/
fileSystemReadDirectoryTool.test.ts
fileSystemReadFileTool.test.ts
packCodebaseTool.test.ts
mcpServer.test.ts
shared/
logger.test.ts
processConcurrency.test.ts
testing/
testUtils.ts
website/
client/
.vitepress/
config/
configDe.ts
configEnUs.ts
configEs.ts
configFr.ts
configJa.ts
configKo.ts
configPtBr.ts
configShard.ts
configZhCn.ts
theme/
component.d.ts
custom.css
index.ts
style.css
config.ts
components/
api/
client.ts
Home/
Hero.vue
PackButton.vue
TryIt.vue
TryItFileUpload.vue
TryItFolderUpload.vue
TryItPackOptions.vue
TryItResult.vue
TryItResultContent.vue
TryItResultErrorContent.vue
TryItUrlInput.vue
utils/
analytics.ts
requestHandlers.ts
resultViewer.ts
validation.ts
Home.vue
src/
de/
guide/
development/
index.md
setup.md
tips/
best-practices.md
code-compress.md
command-line-options.md
comment-removal.md
configuration.md
custom-instructions.md
index.md
installation.md
mcp-server.md
output.md
prompt-examples.md
remote-repository-processing.md
security.md
usage.md
index.md
en/
guide/
development/
index.md
setup.md
tips/
best-practices.md
code-compress.md
command-line-options.md
comment-removal.md
configuration.md
custom-instructions.md
index.md
installation.md
mcp-server.md
output.md
prompt-examples.md
remote-repository-processing.md
security.md
usage.md
index.md
es/
guide/
development/
index.md
setup.md
tips/
best-practices.md
code-compress.md
command-line-options.md
comment-removal.md
configuration.md
custom-instructions.md
index.md
installation.md
mcp-server.md
output.md
prompt-examples.md
remote-repository-processing.md
security.md
usage.md
index.md
fr/
guide/
development/
index.md
setup.md
tips/
best-practices.md
code-compress.md
command-line-options.md
comment-removal.md
configuration.md
custom-instructions.md
index.md
installation.md
mcp-server.md
output.md
prompt-examples.md
remote-repository-processing.md
security.md
usage.md
index.md
ja/
guide/
development/
index.md
setup.md
tips/
best-practices.md
code-compress.md
command-line-options.md
comment-removal.md
configuration.md
custom-instructions.md
index.md
installation.md
mcp-server.md
output.md
prompt-examples.md
remote-repository-processing.md
security.md
usage.md
index.md
ko/
guide/
development/
index.md
setup.md
tips/
best-practices.md
code-compress.md
command-line-options.md
comment-removal.md
configuration.md
custom-instructions.md
index.md
installation.md
mcp-server.md
output.md
prompt-examples.md
remote-repository-processing.md
security.md
usage.md
index.md
pt-br/
guide/
development/
index.md
setup.md
tips/
best-practices.md
code-compress.md
command-line-options.md
comment-removal.md
configuration.md
custom-instructions.md
index.md
installation.md
mcp-server.md
output.md
prompt-examples.md
remote-repository-processing.md
security.md
usage.md
index.md
public/
images/
repomix-logo.svg
zh-cn/
guide/
development/
index.md
setup.md
tips/
best-practices.md
code-compress.md
command-line-options.md
comment-removal.md
configuration.md
custom-instructions.md
index.md
installation.md
mcp-server.md
output.md
prompt-examples.md
remote-repository-processing.md
security.md
usage.md
index.md
.gitignore
.tool-versions
Dockerfile
package.json
tsconfig.json
tsconfig.node.json
server/
src/
schemas/
request.ts
utils/
cache.ts
errorHandler.ts
fileUtils.ts
logger.ts
network.ts
processConcurrency.ts
rateLimit.ts
sharedInstance.ts
time.ts
validation.ts
constants.ts
index.ts
processZipFile.ts
remoteRepo.ts
types.ts
.dockerignore
.gitignore
cloudbuild.yaml
Dockerfile
package.json
tsconfig.json
compose.yml
README.md
.clinerules
.codecov.yml
.cursorrules
.dockerignore
.editorconfig
.gitignore
.npmignore
.repomixignore
.secretlintrc.json
biome.json
CLAUDE.md
CODE_OF_CONDUCT.md
CONTRIBUTING.md
Dockerfile
LICENSE
llms-install.md
package.json
README.md
repomix-instruction.md
repomix.config.json
SECURITY.md
tsconfig.build.json
tsconfig.json
typos.toml
vitest.config.ts
</directory_structure>
<files>
This section contains the contents of the repository's files.
<file path=".devcontainer/devcontainer.json">
{
"name": "Repomix",
"image": "mcr.microsoft.com/devcontainers/typescript-node:1-22-bullseye",
"runArgs": ["--name", "repomix-devcontainer"],
"postCreateCommand": "npm install"
}
</file>
<file path=".github/workflows/ci.yml">
name: CI
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
workflow_dispatch:
jobs:
lint-biome:
name: Lint Biome
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: .tool-versions
cache: npm
- run: npm ci
- run: npm run lint-biome && git diff --exit-code
lint-ts:
name: Lint TypeScript
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: .tool-versions
cache: npm
- run: npm ci
- run: npm run lint-ts
lint-secretlint:
name: Lint Secretlint
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: .tool-versions
cache: npm
- run: npm ci
- run: npm run lint-secretlint
lint-action:
name: Lint GitHub Actions
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: docker://rhysd/actionlint:latest
with:
args: "-color"
check-typos:
name: Check typos
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: crate-ci/typos@master
test:
name: Test
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
node-version: [18.0.0, 18.x, 19.x, 20.x, 21.x, 22.x, 23.x]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- run: npm ci
- run: npm run test --reporter=verbose
env:
CI_OS: ${{ runner.os }}
test-coverage:
name: Test coverage
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: .tool-versions
cache: npm
- run: npm ci
- run: npm run test-coverage -- --reporter=verbose
env:
CI_OS: ${{ runner.os }}
- uses: actions/upload-artifact@v4
with:
name: test-coverage
path: coverage/
- uses: codecov/codecov-action@v5
with:
fail_ci_if_error: true
directory: ./coverage
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
build-and-run:
name: Build and run
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
node-version: [18.0.0, 18.x, 19.x, 20.x, 21.x, 22.x, 23.x]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- run: npm ci
- run: npm run build
- name: Install only production dependencies
run: npm ci --omit=dev
- run: node bin/repomix.cjs
- run: node bin/repomix.cjs --version
- run: node bin/repomix.cjs --help
- name: Upload build artifact
uses: actions/upload-artifact@v4
with:
name: repomix-output-${{ matrix.os }}-${{ matrix.node-version }}.txt
path: repomix-output.txt
</file>
<file path=".github/workflows/codeql.yml">
name: "CodeQL"
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
schedule:
- cron: '25 11 * * 0'
jobs:
analyze:
name: Analyze (${{ matrix.language }})
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
permissions:
security-events: write
packages: read
actions: read
contents: read
strategy:
fail-fast: false
matrix:
include:
- language: javascript-typescript
build-mode: none
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
- if: matrix.build-mode == 'manual'
shell: bash
run: |
echo 'If you are using a "manual" build mode for one or more of the' \
'languages you are analyzing, replace this with the commands to build' \
'your code, for example:'
echo ' make bootstrap'
echo ' make release'
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"
</file>
<file path=".github/workflows/docker.yml">
name: Docker
on:
push:
branches:
- "main"
paths-ignore:
- "**.md"
- LICENSE
pull_request:
branches:
- "*"
paths:
- "Dockerfile"
- ".dockerignore"
- ".github/workflows/docker.yml"
workflow_dispatch:
release:
types: [published, edited]
permissions:
contents: read
packages: write
jobs:
build-and-publish-image:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker metadata
id: meta
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/yamadashy/repomix
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}
type=semver,pattern={{major}}.{{minor}}
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and Publish Docker Image
uses: docker/build-push-action@v6
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
cache-from: type=gha
cache-to: type=gha,mode=max
platforms: linux/amd64,linux/arm64,linux/arm/v7
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
</file>
<file path=".github/workflows/homebrew.yml">
name: Homebrew
on:
release:
types:
- created
jobs:
homebrew:
runs-on: macos-latest
steps:
- name: Set up Homebrew
uses: Homebrew/actions/setup-homebrew@master
with:
test-bot: false
- name: Configure Git user
uses: Homebrew/actions/git-user-config@master
- name: Bump packages
uses: Homebrew/actions/bump-packages@master
with:
token: ${{ secrets.COMMITTER_TOKEN }}
formulae: repomix
</file>
<file path=".github/CODEOWNERS">
# Default owner for everything in the repo
* @yamadashy
</file>
<file path=".github/dependabot.yml">
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
</file>
<file path=".github/FUNDING.yml">
github: yamadashy
</file>
<file path=".github/pull_request_template.md">
<!-- Please include a summary of the changes -->
## Checklist
- [ ] Run `npm run test`
- [ ] Run `npm run lint`
</file>
<file path=".github/renovate.json5">
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended",
"schedule:weekly",
'group:allNonMajor'
],
"rangeStrategy": "bump",
"dependencyDashboard": false,
"labels": ["dependencies", "renovate"],
"packageRules": [
{
matchDepTypes: ['peerDependencies'],
enabled: false,
},
],
"ignoreDeps": [
"node",
]
}
</file>
<file path="bin/repomix.cjs">
#!/usr/bin/env node
const nodeVersion = process.versions.node;
const [major] = nodeVersion.split('.').map(Number);
const EXIT_CODES = {
SUCCESS: 0,
ERROR: 1,
};
if (major < 16) {
console.error(`Repomix requires Node.js version 16 or higher. Current version: ${nodeVersion}\n`);
process.exit(EXIT_CODES.ERROR);
}
function setupErrorHandlers() {
process.on('uncaughtException', (error) => {
console.error('Uncaught Exception:', error);
process.exit(EXIT_CODES.ERROR);
});
process.on('unhandledRejection', (reason) => {
console.error('Unhandled Promise Rejection:', reason);
process.exit(EXIT_CODES.ERROR);
});
function shutdown() {
process.exit(EXIT_CODES.SUCCESS);
}
process.on('SIGINT', () => {
console.log('\nReceived SIGINT. Shutting down...');
shutdown();
});
process.on('SIGTERM', shutdown);
}
(async () => {
try {
setupErrorHandlers();
const { run } = await import('../lib/cli/cliRun.js');
await run();
} catch (error) {
if (error instanceof Error) {
console.error('Fatal Error:', {
name: error.name,
message: error.message,
stack: error.stack,
});
} else {
console.error('Fatal Error:', error);
}
process.exit(EXIT_CODES.ERROR);
}
})();
</file>
<file path="src/cli/actions/migrationAction.ts">
import * as fs from 'node:fs/promises';
import path from 'node:path';
import * as prompts from '@clack/prompts';
import pc from 'picocolors';
import { getGlobalDirectory } from '../../config/globalDirectory.js';
import { logger } from '../../shared/logger.js';
interface MigrationPaths {
oldConfigPath: string;
newConfigPath: string;
oldIgnorePath: string;
newIgnorePath: string;
oldInstructionPath: string;
newInstructionPath: string;
oldOutputPaths: string[];
newOutputPaths: string[];
oldGlobalConfigPath: string;
newGlobalConfigPath: string;
}
interface MigrationResult {
configMigrated: boolean;
ignoreMigrated: boolean;
instructionMigrated: boolean;
outputFilesMigrated: string[];
globalConfigMigrated: boolean;
error?: Error;
}
/**
* Check if a file exists at the given path
*/
const fileExists = async (filePath: string): Promise<boolean> => {
try {
await fs.access(filePath);
return true;
} catch {
return false;
}
};
/**
* Replace all occurrences of 'repopack' with 'repomix' in a string
*/
const replaceRepopackString = (content: string): string => {
return content.replace(/repopack/g, 'repomix').replace(/Repopack/g, 'Repomix');
};
/**
* Update file content by replacing 'repopack' with 'repomix'
*/
const updateFileContent = async (filePath: string): Promise<boolean> => {
const content = await fs.readFile(filePath, 'utf8');
const updatedContent = replaceRepopackString(content);
// Check if content needs to be updated
if (content !== updatedContent) {
await fs.writeFile(filePath, updatedContent, 'utf8');
const relativePath = path.relative(process.cwd(), filePath);
logger.log(`Updated repopack references in ${pc.cyan(relativePath)}`);
return true;
}
return false;
};
/**
* Parse JSON content, update instructionFilePath if exists
*/
const updateInstructionPath = (content: string): string => {
try {
const config = JSON.parse(content);
if (config.output?.instructionFilePath) {
config.output.instructionFilePath = config.output.instructionFilePath.replace('repopack', 'repomix');
}
// Also update output.filePath if it exists
if (config.output?.filePath) {
config.output.filePath = config.output.filePath.replace('repopack', 'repomix');
}
return JSON.stringify(config, null, 2);
} catch {
return content;
}
};
/**
* Get output file paths pairs
*/
const getOutputFilePaths = (rootDir: string): { oldPaths: string[]; newPaths: string[] } => {
const extensions = ['.txt', '.xml', '.md'];
const oldPaths = extensions.map((ext) => path.join(rootDir, `repopack-output${ext}`));
const newPaths = extensions.map((ext) => path.join(rootDir, `repomix-output${ext}`));
return { oldPaths, newPaths };
};
/**
* Migrate a single file from old path to new path
*/
const migrateFile = async (
oldPath: string,
newPath: string,
description: string,
isConfig = false,
): Promise<boolean> => {
if (!(await fileExists(oldPath))) {
return false;
}
const exists = await fileExists(newPath);
if (exists) {
const shouldOverwrite = await prompts.confirm({
message: `${description} already exists at ${newPath}. Do you want to overwrite it?`,
});
if (prompts.isCancel(shouldOverwrite) || !shouldOverwrite) {
logger.info(`Skipping migration of ${description}`);
return false;
}
}
try {
// Read and update content
let content = await fs.readFile(oldPath, 'utf8');
content = replaceRepopackString(content);
// For config files, also update instructionFilePath and output.filePath
if (isConfig) {
content = updateInstructionPath(content);
}
// Ensure the target directory exists
await fs.mkdir(path.dirname(newPath), { recursive: true });
// Write to new file
await fs.writeFile(newPath, content, 'utf8');
// Remove old file
await fs.unlink(oldPath);
const relativeOldPath = path.relative(process.cwd(), oldPath);
const relativeNewPath = path.relative(process.cwd(), newPath);
logger.log(`Renamed ${description} from ${relativeOldPath} to ${relativeNewPath}`);
return true;
} catch (error) {
logger.error(`Failed to migrate ${description}:`, error);
return false;
}
};
/**
* Update content of gitignore and repomixignore files
*/
const updateIgnoreFiles = async (rootDir: string): Promise<void> => {
const gitignorePath = path.join(rootDir, '.gitignore');
const repomixignorePath = path.join(rootDir, '.repomixignore');
if (await fileExists(gitignorePath)) {
const updated = await updateFileContent(gitignorePath);
if (!updated) {
logger.debug('No changes needed in .gitignore');
}
}
if (await fileExists(repomixignorePath)) {
const updated = await updateFileContent(repomixignorePath);
if (!updated) {
logger.debug('No changes needed in .repomixignore');
}
}
};
/**
* Get all migration related file paths
*/
const getMigrationPaths = (rootDir: string): MigrationPaths => {
const { oldPaths: oldOutputPaths, newPaths: newOutputPaths } = getOutputFilePaths(rootDir);
const oldGlobalDirectory = path.join(process.env.HOME || '', '.config', 'repopack');
const newGlobalDirectory = getGlobalDirectory();
return {
oldConfigPath: path.join(rootDir, 'repopack.config.json'),
newConfigPath: path.join(rootDir, 'repomix.config.json'),
oldIgnorePath: path.join(rootDir, '.repopackignore'),
newIgnorePath: path.join(rootDir, '.repomixignore'),
oldInstructionPath: path.join(rootDir, 'repopack-instruction.md'),
newInstructionPath: path.join(rootDir, 'repomix-instruction.md'),
oldOutputPaths,
newOutputPaths,
oldGlobalConfigPath: path.join(oldGlobalDirectory, 'repopack.config.json'),
newGlobalConfigPath: path.join(newGlobalDirectory, 'repomix.config.json'),
};
};
/**
* Migrate output files
*/
const migrateOutputFiles = async (oldPaths: string[], newPaths: string[]): Promise<string[]> => {
const migratedFiles: string[] = [];
for (let i = 0; i < oldPaths.length; i++) {
const oldPath = oldPaths[i];
const newPath = newPaths[i];
const ext = path.extname(oldPath);
if (await migrateFile(oldPath, newPath, `Output file (${ext})`)) {
migratedFiles.push(newPath);
}
}
return migratedFiles;
};
export const runMigrationAction = async (rootDir: string): Promise<MigrationResult> => {
const result: MigrationResult = {
configMigrated: false,
ignoreMigrated: false,
instructionMigrated: false,
outputFilesMigrated: [],
globalConfigMigrated: false,
};
try {
const paths = getMigrationPaths(rootDir);
// Check if migration is needed
const hasOldConfig = await fileExists(paths.oldConfigPath);
const hasOldIgnore = await fileExists(paths.oldIgnorePath);
const hasOldInstruction = await fileExists(paths.oldInstructionPath);
const hasOldGlobalConfig = await fileExists(paths.oldGlobalConfigPath);
const hasOldOutput = await Promise.all(paths.oldOutputPaths.map(fileExists)).then((results) =>
results.some((exists) => exists),
);
if (!hasOldConfig && !hasOldIgnore && !hasOldInstruction && !hasOldOutput && !hasOldGlobalConfig) {
logger.debug('No Repopack files found to migrate.');
return result;
}
// Show migration notice based on what needs to be migrated
let migrationMessage = `Found ${pc.green('Repopack')} `;
const items = [];
if (hasOldConfig || hasOldIgnore || hasOldInstruction || hasOldOutput) items.push('local configuration');
if (hasOldGlobalConfig) items.push('global configuration');
migrationMessage += `${items.join(' and ')}. Would you like to migrate to ${pc.green('Repomix')}?`;
// Confirm migration with user
const shouldMigrate = await prompts.confirm({
message: migrationMessage,
});
if (prompts.isCancel(shouldMigrate) || !shouldMigrate) {
logger.info('Migration cancelled.');
return result;
}
// Show migration notice
logger.info(pc.cyan('\nMigrating from Repopack to Repomix...'));
logger.log('');
// Migrate config file
if (hasOldConfig) {
result.configMigrated = await migrateFile(paths.oldConfigPath, paths.newConfigPath, 'Configuration file', true);
}
// Migrate global config file
if (hasOldGlobalConfig) {
result.globalConfigMigrated = await migrateFile(
paths.oldGlobalConfigPath,
paths.newGlobalConfigPath,
'Global configuration file',
true,
);
}
// Migrate ignore file
if (hasOldIgnore) {
result.ignoreMigrated = await migrateFile(paths.oldIgnorePath, paths.newIgnorePath, 'Ignore file');
}
// Migrate instruction file
if (hasOldInstruction) {
result.instructionMigrated = await migrateFile(
paths.oldInstructionPath,
paths.newInstructionPath,
'Instruction file',
);
}
// Migrate output files
if (hasOldOutput) {
result.outputFilesMigrated = await migrateOutputFiles(paths.oldOutputPaths, paths.newOutputPaths);
}
// Update content in gitignore and repomixignore
await updateIgnoreFiles(rootDir);
// Show success message
if (
result.configMigrated ||
result.ignoreMigrated ||
result.instructionMigrated ||
result.outputFilesMigrated.length > 0 ||
result.globalConfigMigrated
) {
logger.log('');
logger.success('✔ Migration completed successfully!');
logger.log('');
logger.info(
'You can now use Repomix commands as usual. The old Repopack files have been migrated to the new format.',
);
logger.log('');
}
return result;
} catch (error) {
if (error instanceof Error) {
result.error = error;
} else {
result.error = new Error(String(error));
}
logger.error('An error occurred during migration:', error);
return result;
}
};
</file>
<file path="src/cli/actions/remoteAction.ts">
import * as fs from 'node:fs/promises';
import os from 'node:os';
import path from 'node:path';
import GitUrlParse, { type GitUrl } from 'git-url-parse';
import pc from 'picocolors';
import { execGitShallowClone, isGitInstalled } from '../../core/file/gitCommand.js';
import { RepomixError } from '../../shared/errorHandle.js';
import { logger } from '../../shared/logger.js';
import { Spinner } from '../cliSpinner.js';
import type { CliOptions } from '../types.js';
import { type DefaultActionRunnerResult, runDefaultAction } from './defaultAction.js';
interface IGitUrl extends GitUrl {
commit: string | undefined;
}
export const runRemoteAction = async (
repoUrl: string,
cliOptions: CliOptions,
deps = {
isGitInstalled,
execGitShallowClone,
runDefaultAction,
},
): Promise<DefaultActionRunnerResult> => {
if (!(await deps.isGitInstalled())) {
throw new RepomixError('Git is not installed or not in the system PATH.');
}
const parsedFields = parseRemoteValue(repoUrl);
const spinner = new Spinner('Cloning repository...', cliOptions);
const tempDirPath = await createTempDirectory();
let result: DefaultActionRunnerResult;
try {
spinner.start();
// Clone the repository
await cloneRepository(parsedFields.repoUrl, tempDirPath, cliOptions.remoteBranch || parsedFields.remoteBranch, {
execGitShallowClone: deps.execGitShallowClone,
});
spinner.succeed('Repository cloned successfully!');
logger.log('');
// Run the default action on the cloned repository
result = await deps.runDefaultAction([tempDirPath], tempDirPath, cliOptions);
await copyOutputToCurrentDirectory(tempDirPath, process.cwd(), result.config.output.filePath);
} catch (error) {
spinner.fail('Error during repository cloning. cleanup...');
throw error;
} finally {
// Cleanup the temporary directory
await cleanupTempDirectory(tempDirPath);
}
return result;
};
// Check the short form of the GitHub URL. e.g. yamadashy/repomix
const VALID_NAME_PATTERN = '[a-zA-Z0-9](?:[a-zA-Z0-9._-]*[a-zA-Z0-9])?';
const validShorthandRegex = new RegExp(`^${VALID_NAME_PATTERN}/${VALID_NAME_PATTERN}$`);
export const isValidShorthand = (remoteValue: string): boolean => {
return validShorthandRegex.test(remoteValue);
};
export const parseRemoteValue = (remoteValue: string): { repoUrl: string; remoteBranch: string | undefined } => {
if (isValidShorthand(remoteValue)) {
logger.trace(`Formatting GitHub shorthand: ${remoteValue}`);
return {
repoUrl: `https://github.com/${remoteValue}.git`,
remoteBranch: undefined,
};
}
try {
const parsedFields = GitUrlParse(remoteValue) as IGitUrl;
// This will make parsedFields.toString() automatically append '.git' to the returned url
parsedFields.git_suffix = true;
const ownerSlashRepo =
parsedFields.full_name.split('/').length > 1 ? parsedFields.full_name.split('/').slice(-2).join('/') : '';
if (ownerSlashRepo !== '' && !isValidShorthand(ownerSlashRepo)) {
throw new RepomixError('Invalid owner/repo in repo URL');
}
const repoUrl = parsedFields.toString(parsedFields.protocol);
if (parsedFields.ref) {
return {
repoUrl: repoUrl,
remoteBranch: parsedFields.filepath ? `${parsedFields.ref}/${parsedFields.filepath}` : parsedFields.ref,
};
}
if (parsedFields.commit) {
return {
repoUrl: repoUrl,
remoteBranch: parsedFields.commit,
};
}
return {
repoUrl: repoUrl,
remoteBranch: undefined,
};
} catch (error) {
throw new RepomixError('Invalid remote repository URL or repository shorthand (owner/repo)');
}
};
export const isValidRemoteValue = (remoteValue: string): boolean => {
try {
parseRemoteValue(remoteValue);
return true;
} catch (error) {
return false;
}
};
export const createTempDirectory = async (): Promise<string> => {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'repomix-'));
logger.trace(`Created temporary directory. (path: ${pc.dim(tempDir)})`);
return tempDir;
};
export const cloneRepository = async (
url: string,
directory: string,
remoteBranch?: string,
deps = {
execGitShallowClone,
},
): Promise<void> => {
logger.log(`Clone repository: ${url} to temporary directory. ${pc.dim(`path: ${directory}`)}`);
logger.log('');
try {
await deps.execGitShallowClone(url, directory, remoteBranch);
} catch (error) {
throw new RepomixError(`Failed to clone repository: ${(error as Error).message}`);
}
};
export const cleanupTempDirectory = async (directory: string): Promise<void> => {
logger.trace(`Cleaning up temporary directory: ${directory}`);
await fs.rm(directory, { recursive: true, force: true });
};
export const copyOutputToCurrentDirectory = async (
sourceDir: string,
targetDir: string,
outputFileName: string,
): Promise<void> => {
const sourcePath = path.resolve(sourceDir, outputFileName);
const targetPath = path.resolve(targetDir, outputFileName);
try {
logger.trace(`Copying output file from: ${sourcePath} to: ${targetPath}`);
// Create target directory if it doesn't exist
await fs.mkdir(path.dirname(targetPath), { recursive: true });
await fs.copyFile(sourcePath, targetPath);
} catch (error) {
throw new RepomixError(`Failed to copy output file: ${(error as Error).message}`);
}
};
</file>
<file path="src/cli/actions/versionAction.ts">
import { getVersion } from '../../core/file/packageJsonParse.js';
import { logger } from '../../shared/logger.js';
export const runVersionAction = async (): Promise<void> => {
const version = await getVersion();
logger.log(version);
};
</file>
<file path="src/cli/cliPrint.ts">
import path from "node:path";
import pc from "picocolors";
import type { RepomixConfigMerged } from "../config/configSchema.js";
import type { SuspiciousFileResult } from "../core/security/securityCheck.js";
import { logger } from "../shared/logger.js";
export const printSummary = (
totalFiles: number,
totalTokens: number,
totalLines: number,
outputPath: string,
suspiciousFilesResults: SuspiciousFileResult[],
config: RepomixConfigMerged
) => {
let securityCheckMessage = "";
if (config.security.enableSecurityCheck) {
if (suspiciousFilesResults.length > 0) {
securityCheckMessage = pc.yellow(
`${suspiciousFilesResults.length.toLocaleString()} suspicious file(s) detected and excluded`
);
} else {
}
} else {
securityCheckMessage = pc.dim("security check disabled");
}
logger.log(`${pc.white("files")} ${pc.white(totalFiles.toLocaleString())}`);
logger.log(`${pc.white("lines:")} ${pc.white(totalLines.toLocaleString())}`);
logger.log(
`${pc.white("tokens:")} ${pc.white(totalTokens.toLocaleString())}`
);
logger.log("");
logger.log(`${pc.white("output:")} ${pc.white(outputPath)}`);
if (securityCheckMessage)
logger.log(`${pc.white("security:")} ${pc.white(securityCheckMessage)}`);
};
export const printSecurityCheck = (
rootDir: string,
suspiciousFilesResults: SuspiciousFileResult[],
config: RepomixConfigMerged
) => {
if (!config.security.enableSecurityCheck) {
return;
}
if (suspiciousFilesResults.length === 0) {
} else {
logger.log(
pc.yellow(
`${suspiciousFilesResults.length} suspicious file(s) detected and excluded from the output:`
)
);
suspiciousFilesResults.forEach((suspiciousFilesResult, index) => {
const relativeFilePath = path.relative(
rootDir,
suspiciousFilesResult.filePath
);
logger.log(`${pc.white(`${index + 1}.`)} ${pc.white(relativeFilePath)}`);
logger.log(
pc.dim(` - ${suspiciousFilesResult.messages.join("\n - ")}`)
);
});
logger.log(
pc.yellow(
"\nThese files have been excluded from the output for security reasons."
)
);
logger.log(
pc.yellow(
"Please review these files for potential sensitive information."
)
);
}
};
export const printTopFiles = (
fileLineCounts: Record<string, number>,
fileTokenCounts: Record<string, number>,
topFilesLength: number
) => {
const topFilesLengthStrLen = topFilesLength.toString().length;
logger.log(pc.white(`top ${topFilesLength} files`));
logger.log(
pc.dim(
`─────────────────────────────────────────────────${"─".repeat(
topFilesLengthStrLen
)}`
)
);
const topFiles = Object.entries(fileLineCounts)
.sort((a, b) => b[1] - a[1])
.slice(0, topFilesLength);
topFiles.forEach(([filePath, lineCount], index) => {
const tokenCount = fileTokenCounts[filePath];
const indexString = `${index + 1}.`.padEnd(3, " ");
logger.log(
`${pc.white(`${indexString}`)} ${pc.white(filePath)} ${pc.dim(
`(${lineCount.toLocaleString()} lines, ${tokenCount.toLocaleString()} tokens)`
)}`
);
});
};
</file>
<file path="src/cli/cliSpinner.ts">
import cliSpinners from 'cli-spinners';
import logUpdate from 'log-update';
import pc from 'picocolors';
import type { CliOptions } from './types.js';
export class Spinner {
private spinner = cliSpinners.dots;
private message: string;
private currentFrame = 0;
private interval: ReturnType<typeof setInterval> | null = null;
private readonly isQuiet: boolean;
constructor(message: string, cliOptions: CliOptions) {
this.message = message;
// If the user has specified the verbose flag, don't show the spinner
this.isQuiet = cliOptions.quiet || cliOptions.verbose || false;
}
start(): void {
if (this.isQuiet) {
return;
}
const frames = this.spinner.frames;
const framesLength = frames.length;
this.interval = setInterval(() => {
this.currentFrame++;
const frame = frames[this.currentFrame % framesLength];
logUpdate(`${pc.cyan(frame)} ${this.message}`);
}, this.spinner.interval);
}
update(message: string): void {
if (this.isQuiet) {
return;
}
this.message = message;
}
stop(finalMessage: string): void {
if (this.isQuiet) {
return;
}
if (this.interval) {
clearInterval(this.interval);
this.interval = null;
}
logUpdate(finalMessage);
logUpdate.done();
}
succeed(message: string): void {
if (this.isQuiet) {
return;
}
this.stop(`${pc.green('✔')} ${message}`);
}
fail(message: string): void {
if (this.isQuiet) {
return;
}
this.stop(`${pc.red('✖')} ${message}`);
}
}
</file>
<file path="src/config/configLoad.ts">
import * as fs from 'node:fs/promises';
import path from 'node:path';
import JSON5 from 'json5';
import pc from 'picocolors';
import { RepomixError, rethrowValidationErrorIfZodError } from '../shared/errorHandle.js';
import { logger } from '../shared/logger.js';
import {
type RepomixConfigCli,
type RepomixConfigFile,
type RepomixConfigMerged,
defaultConfig,
defaultFilePathMap,
repomixConfigFileSchema,
repomixConfigMergedSchema,
} from './configSchema.js';
import { getGlobalDirectory } from './globalDirectory.js';
const defaultConfigPath = 'repomix.config.json';
const getGlobalConfigPath = () => {
return path.join(getGlobalDirectory(), 'repomix.config.json');
};
export const loadFileConfig = async (rootDir: string, argConfigPath: string | null): Promise<RepomixConfigFile> => {
let useDefaultConfig = false;
let configPath = argConfigPath;
if (!configPath) {
useDefaultConfig = true;
configPath = defaultConfigPath;
}
const fullPath = path.resolve(rootDir, configPath);
logger.trace('Loading local config from:', fullPath);
// Check local file existence
const isLocalFileExists = await fs
.stat(fullPath)
.then((stats) => stats.isFile())
.catch(() => false);
if (isLocalFileExists) {
return await loadAndValidateConfig(fullPath);
}
if (useDefaultConfig) {
// Try to load global config
const globalConfigPath = getGlobalConfigPath();
logger.trace('Loading global config from:', globalConfigPath);
const isGlobalFileExists = await fs
.stat(globalConfigPath)
.then((stats) => stats.isFile())
.catch(() => false);
if (isGlobalFileExists) {
return await loadAndValidateConfig(globalConfigPath);
}
logger.log(
pc.dim(
`No custom config found at ${configPath} or global config at ${globalConfigPath}.\nYou can add a config file for additional settings. Please check https://github.com/yamadashy/repomix for more information.`,
),
);
return {};
}
throw new RepomixError(`Config file not found at ${configPath}`);
};
const loadAndValidateConfig = async (filePath: string): Promise<RepomixConfigFile> => {
try {
const fileContent = await fs.readFile(filePath, 'utf-8');
const config = JSON5.parse(fileContent);
return repomixConfigFileSchema.parse(config);
} catch (error) {
rethrowValidationErrorIfZodError(error, 'Invalid config schema');
if (error instanceof SyntaxError) {
throw new RepomixError(`Invalid JSON5 in config file ${filePath}: ${error.message}`);
}
if (error instanceof Error) {
throw new RepomixError(`Error loading config from ${filePath}: ${error.message}`);
}
throw new RepomixError(`Error loading config from ${filePath}`);
}
};
export const mergeConfigs = (
cwd: string,
fileConfig: RepomixConfigFile,
cliConfig: RepomixConfigCli,
): RepomixConfigMerged => {
logger.trace('Default config:', defaultConfig);
const baseConfig = defaultConfig;
// If the output file path is not provided in the config file or CLI, use the default file path for the style
if (cliConfig.output?.filePath == null && fileConfig.output?.filePath == null) {
const style = cliConfig.output?.style || fileConfig.output?.style || baseConfig.output.style;
baseConfig.output.filePath = defaultFilePathMap[style];
logger.trace('Default output file path is set to:', baseConfig.output.filePath);
}
const mergedConfig = {
cwd,
output: {
...baseConfig.output,
...fileConfig.output,
...cliConfig.output,
},
include: [...(baseConfig.include || []), ...(fileConfig.include || []), ...(cliConfig.include || [])],
ignore: {
...baseConfig.ignore,
...fileConfig.ignore,
...cliConfig.ignore,
customPatterns: [
...(baseConfig.ignore.customPatterns || []),
...(fileConfig.ignore?.customPatterns || []),
...(cliConfig.ignore?.customPatterns || []),
],
},
security: {
...baseConfig.security,
...fileConfig.security,
...cliConfig.security,
},
};
try {
return repomixConfigMergedSchema.parse(mergedConfig);
} catch (error) {
rethrowValidationErrorIfZodError(error, 'Invalid merged config');
throw error;
}
};
</file>
<file path="src/config/globalDirectory.ts">
import os from 'node:os';
import path from 'node:path';
export const getGlobalDirectory = () => {
if (process.platform === 'win32') {
const localAppData = process.env.LOCALAPPDATA || path.join(os.homedir(), 'AppData', 'Local');
return path.join(localAppData, 'Repomix');
}
if (process.env.XDG_CONFIG_HOME) {
return path.join(process.env.XDG_CONFIG_HOME, 'repomix');
}
return path.join(os.homedir(), '.config', 'repomix');
};
</file>
<file path="src/core/file/workers/fileCollectWorker.ts">
import * as fs from 'node:fs/promises';
import path from 'node:path';
import iconv from 'iconv-lite';
import { isBinary } from 'istextorbinary';
import jschardet from 'jschardet';
import pc from 'picocolors';
import { logger } from '../../../shared/logger.js';
// Maximum file size to process (50MB)
// This prevents out-of-memory errors when processing very large files
export const MAX_FILE_SIZE = 50 * 1024 * 1024;
export interface FileCollectTask {
filePath: string;
rootDir: string;
}
export default async ({ filePath, rootDir }: FileCollectTask) => {
const fullPath = path.resolve(rootDir, filePath);
const content = await readRawFile(fullPath);
if (content) {
return {
path: filePath,
content,
};
}
return null;
};
const readRawFile = async (filePath: string): Promise<string | null> => {
try {
const stats = await fs.stat(filePath);
if (stats.size > MAX_FILE_SIZE) {
const sizeMB = (stats.size / 1024 / 1024).toFixed(1);
logger.log('');
logger.log('⚠️ Large File Warning:');
logger.log('──────────────────────');
logger.log(`File exceeds size limit: ${sizeMB}MB > ${MAX_FILE_SIZE / 1024 / 1024}MB (${filePath})`);
logger.log(pc.dim('Add this file to .repomixignore if you want to exclude it permanently'));
logger.log('');
return null;
}
if (isBinary(filePath)) {
logger.debug(`Skipping binary file: ${filePath}`);
return null;
}
logger.trace(`Reading file: ${filePath}`);
const buffer = await fs.readFile(filePath);
if (isBinary(null, buffer)) {
logger.debug(`Skipping binary file (content check): ${filePath}`);
return null;
}
const encoding = jschardet.detect(buffer).encoding || 'utf-8';
const content = iconv.decode(buffer, encoding);
return content;
} catch (error) {
logger.warn(`Failed to re