context1000
Version:
**context1000** is a documentation format for software systems, designed for integration with artificial intelligence tools. The key artifacts are ADRs and RFCs, enriched with formalized links between documents.
934 lines (927 loc) • 35 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __esm = (fn, res) => function __init() {
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
// src/document-processor.ts
import "dotenv/config";
import fs from "fs-extra";
import path from "path";
import matter from "gray-matter";
var DocumentProcessor;
var init_document_processor = __esm({
"src/document-processor.ts"() {
"use strict";
DocumentProcessor = class {
MAX_CHUNK_TOKENS = 800;
OVERLAP_TOKENS = 150;
async processDocuments(docsPath) {
const documents = [];
await this.processDirectory(docsPath, documents);
return documents;
}
async processDocumentsToChunks(docsPath) {
const documents = await this.processDocuments(docsPath);
const chunks = [];
for (const doc of documents) {
chunks.push(...doc.chunks);
}
return chunks;
}
async processDirectory(dirPath, documents) {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
await this.processDirectory(fullPath, documents);
} else if (entry.name.endsWith(".md") && !entry.name.startsWith("_")) {
try {
const doc = await this.processMarkdownFile(fullPath);
if (doc) {
documents.push(doc);
}
} catch (error) {
console.warn(`Error processing ${fullPath}:`, error);
}
}
}
}
async processMarkdownFile(filePath) {
const content = await fs.readFile(filePath, "utf-8");
const { data: frontmatter, content: markdownContent } = matter(content);
if (!markdownContent.trim()) {
return null;
}
const type = this.inferDocumentType(filePath, frontmatter);
const id = this.generateDocumentId(filePath);
const baseMetadata = {
title: this.extractTitle(frontmatter, filePath),
type,
tags: Array.isArray(frontmatter.tags) ? frontmatter.tags : [],
projects: this.extractProjectsArray(frontmatter, filePath),
status: this.validateStatus(frontmatter.status, type),
filePath,
related: this.validateRelatedMetadata(frontmatter.related || {})
};
const chunks = this.createDocumentChunks(id, markdownContent.trim(), baseMetadata);
return {
id,
content: markdownContent.trim(),
chunks,
metadata: baseMetadata
};
}
createDocumentChunks(documentId, content, baseMetadata) {
const sections = this.extractSections(content);
const chunks = [];
let chunkIndex = 0;
for (const section of sections) {
const sectionChunks = this.chunkSection(section, documentId, chunkIndex, baseMetadata);
chunks.push(...sectionChunks);
chunkIndex += sectionChunks.length;
}
chunks.forEach((chunk) => {
chunk.metadata.totalChunks = chunks.length;
});
return chunks;
}
extractSections(content) {
const sections = [];
const lines = content.split("\n");
let currentSection = { title: "", content: "", type: "content" };
for (const line of lines) {
const headingMatch = line.match(/^(#{1,6})\s+(.+)$/);
if (headingMatch) {
if (currentSection.content.trim()) {
sections.push(currentSection);
}
const title = headingMatch[2];
const type = this.inferSectionType(title);
currentSection = {
title,
content: line + "\n",
type
};
} else {
currentSection.content += line + "\n";
}
}
if (currentSection.content.trim()) {
sections.push(currentSection);
}
return sections.length > 0 ? sections : [{ title: "Content", content, type: "content" }];
}
inferSectionType(title) {
const lowerTitle = title.toLowerCase();
if (lowerTitle.includes("context")) return "context";
if (lowerTitle.includes("decision")) return "decision";
if (lowerTitle.includes("consequence")) return "consequences";
if (lowerTitle.includes("summary")) return "summary";
if (lowerTitle.includes("background")) return "background";
if (lowerTitle.includes("implementation")) return "implementation";
return "content";
}
chunkSection(section, documentId, startIndex, baseMetadata) {
const tokens = this.estimateTokens(section.content);
if (tokens <= this.MAX_CHUNK_TOKENS) {
return [
{
id: `${documentId}_chunk_${startIndex}`,
content: section.content.trim(),
metadata: {
...baseMetadata,
chunkIndex: startIndex,
totalChunks: 0,
sectionType: section.type,
sectionTitle: section.title,
tokens
}
}
];
}
return this.splitLargeSection(section, documentId, startIndex, baseMetadata);
}
splitLargeSection(section, documentId, startIndex, baseMetadata) {
const chunks = [];
const words = section.content.split(/\s+/);
const wordsPerChunk = Math.floor(this.MAX_CHUNK_TOKENS * 0.75);
const overlapWords = Math.floor(this.OVERLAP_TOKENS * 0.75);
let currentIndex = startIndex;
for (let i = 0; i < words.length; i += wordsPerChunk - overlapWords) {
const chunkWords = words.slice(i, i + wordsPerChunk);
const chunkContent = chunkWords.join(" ");
const tokens = this.estimateTokens(chunkContent);
chunks.push({
id: `${documentId}_chunk_${currentIndex}`,
content: chunkContent,
metadata: {
...baseMetadata,
chunkIndex: currentIndex,
totalChunks: 0,
sectionType: section.type,
sectionTitle: section.title,
tokens
}
});
currentIndex++;
}
return chunks;
}
estimateTokens(text) {
return Math.ceil(text.split(/\s+/).length * 1.3);
}
inferDocumentType(filePath, _frontmatter) {
const fileName = path.basename(filePath);
const normalizedPath = filePath.replace(/\\/g, "/");
if (fileName.endsWith(".adr.md")) return "adr";
if (fileName.endsWith(".rfc.md")) return "rfc";
if (fileName.endsWith(".guide.md")) return "guide";
if (fileName.endsWith(".rules.md")) return "rule";
if (normalizedPath.match(/\/decisions\/adr\//)) return "adr";
if (normalizedPath.match(/\/decisions\/rfc\//)) return "rfc";
if (normalizedPath.match(/\/guides\//)) return "guide";
if (normalizedPath.match(/\/rules\//)) return "rule";
if (fileName === "project.md" && normalizedPath.includes("/projects/")) return "project";
if (normalizedPath.match(/\/projects\/[^\/]+\/[^\/]*\.md$/)) return "project";
return "guide";
}
extractProjectsArray(frontmatter, filePath) {
if (frontmatter.related?.projects && Array.isArray(frontmatter.related.projects)) {
return frontmatter.related.projects;
}
const projectMatch = filePath.match(/\/projects\/([^\/]+)/);
if (projectMatch) {
return [projectMatch[1]];
}
return [];
}
validateRelatedMetadata(related) {
const validatedRelated = {};
const allowedKeys = ["adrs", "rfcs", "guides", "rules", "projects"];
for (const key of allowedKeys) {
if (related[key] && Array.isArray(related[key])) {
validatedRelated[key] = related[key];
}
}
return validatedRelated;
}
extractTitle(frontmatter, filePath) {
if (frontmatter.title && typeof frontmatter.title === "string") {
return frontmatter.title;
}
if (frontmatter.name && typeof frontmatter.name === "string") {
return frontmatter.name;
}
return path.basename(filePath, ".md");
}
validateStatus(status, type) {
if (!status || typeof status !== "string") {
return void 0;
}
const validStatuses = {
adr: ["draft", "accepted", "rejected"],
rfc: ["draft", "accepted", "rejected"],
guide: [],
rule: [],
project: ["active", "inactive", "archived"]
};
const allowedStatuses = validStatuses[type] || [];
if (allowedStatuses.length === 0) {
return status;
}
return allowedStatuses.includes(status) ? status : void 0;
}
generateDocumentId(filePath) {
return path.relative(process.cwd(), filePath).replace(/\//g, "_").replace(".md", "");
}
};
}
});
// src/chroma-client.ts
import "dotenv/config";
import { ChromaClient as ChromaApi } from "chromadb";
import { OpenAIEmbeddingFunction } from "@chroma-core/openai";
var ChromaClient;
var init_chroma_client = __esm({
"src/chroma-client.ts"() {
"use strict";
init_document_processor();
ChromaClient = class {
client;
collection = null;
embeddingFunction;
constructor() {
this.client = new ChromaApi({
path: process.env.CHROMA_URL || "http://localhost:8000"
});
this.embeddingFunction = new OpenAIEmbeddingFunction({
apiKey: process.env.OPENAI_API_KEY || "",
modelName: "text-embedding-3-small"
});
}
async initialize(collectionName = "context1000", docsPath) {
try {
this.collection = await this.client.getCollection({
name: collectionName,
embeddingFunction: this.embeddingFunction
});
console.log(`Connected to existing collection: ${collectionName}`);
} catch (error) {
this.collection = await this.client.createCollection({
name: collectionName,
embeddingFunction: this.embeddingFunction,
metadata: { description: "Documentation RAG collection for code review" }
});
console.log(`Created new collection: ${collectionName}`);
if (docsPath) {
console.log(`Processing documents from: ${docsPath}`);
const processor = new DocumentProcessor();
const chunks = await processor.processDocumentsToChunks(docsPath);
if (chunks.length > 0) {
await this.addDocuments(chunks);
console.log(`Processed and added ${chunks.length} document chunks from ${docsPath}`);
}
}
}
}
async addDocuments(items) {
if (!this.collection) {
throw new Error("Collection not initialized");
}
const ids = items.map((item) => item.id);
const texts = items.map((item) => item.content);
const metadatas = items.map((item) => {
const baseMetadata = {
title: item.metadata.title,
type: item.metadata.type,
tags: JSON.stringify(item.metadata.tags),
projects: JSON.stringify(item.metadata.projects),
status: item.metadata.status || "",
filePath: item.metadata.filePath,
related: JSON.stringify(item.metadata.related || {})
};
if ("chunkIndex" in item.metadata) {
return {
...baseMetadata,
chunkIndex: item.metadata.chunkIndex.toString(),
totalChunks: item.metadata.totalChunks.toString(),
sectionType: item.metadata.sectionType || "",
sectionTitle: item.metadata.sectionTitle || "",
tokens: item.metadata.tokens.toString()
};
}
return baseMetadata;
});
await this.collection.add({
ids,
documents: texts,
metadatas
});
console.log(`Added ${items.length} items to collection`);
}
async queryDocuments(query, nResults = 5, filters) {
if (!this.collection) {
throw new Error("Collection not initialized");
}
const queryParams = {
queryTexts: [query],
nResults
};
if (filters) {
queryParams.where = filters;
}
const results = await this.collection.query(queryParams);
return {
documents: (results.documents[0] || []).filter((doc) => doc !== null),
metadatas: (results.metadatas[0] || []).filter((meta) => meta !== null),
distances: (results.distances?.[0] || []).filter((dist) => dist !== null)
};
}
async deleteCollection(collectionName = "context1000") {
try {
await this.client.deleteCollection({ name: collectionName });
console.log(`Deleted collection: ${collectionName}`);
} catch (error) {
console.warn(`Could not delete collection ${collectionName}:`, error);
}
}
async listCollections() {
const collections = await this.client.listCollections();
return collections.map((c) => c.name);
}
async getCollectionInfo() {
if (!this.collection) {
throw new Error("Collection not initialized");
}
const count = await this.collection.count();
return {
name: this.collection.name,
count,
metadata: this.collection.metadata
};
}
};
}
});
// src/query.ts
var query_exports = {};
__export(query_exports, {
QueryInterface: () => QueryInterface
});
import "dotenv/config";
var QueryInterface;
var init_query = __esm({
"src/query.ts"() {
"use strict";
init_chroma_client();
QueryInterface = class {
chromaClient;
constructor() {
this.chromaClient = new ChromaClient();
}
async initialize(collectionName = "context1000", docsPath) {
await this.chromaClient.initialize(collectionName, docsPath);
}
async queryDocs(query, options = {}) {
const { maxResults = 5, filterByType, filterByProject } = options;
let whereClause = void 0;
const conditions = [];
if (filterByType && filterByType.length > 0) {
conditions.push({ type: { $in: filterByType } });
}
if (filterByProject && filterByProject.length > 0) {
conditions.push({ projects: { $in: filterByProject } });
}
if (conditions.length > 1) {
whereClause = { $or: conditions };
} else if (conditions.length === 1) {
whereClause = conditions[0];
}
const results = await this.chromaClient.queryDocuments(query, maxResults, whereClause);
return results.documents.map((doc, index) => ({
document: doc,
metadata: {
title: results.metadatas[index].title,
type: results.metadatas[index].type,
filePath: results.metadatas[index].filePath,
tags: JSON.parse(results.metadatas[index].tags || "[]"),
projects: JSON.parse(results.metadatas[index].projects || "[]"),
status: results.metadatas[index].status
},
relevanceScore: 1 - (results.distances[index] || 0)
}));
}
};
}
});
// src/index.ts
init_document_processor();
init_chroma_client();
import "dotenv/config";
import { Command } from "commander";
import path2 from "path";
// package.json
var package_default = {
name: "context1000",
version: "0.1.8",
main: "dist/index.js",
type: "module",
sideEffects: false,
bin: {
context1000: "dist/index.js"
},
keywords: [
"documentation",
"rag",
"retrieval-augmented-generation",
"vector-search",
"chromadb",
"mcp",
"model-context-protocol",
"claude",
"ai",
"documentation-format",
"adr",
"rfc",
"architecture-decision-records",
"cli",
"knowledge-base",
"document-processing",
"ai-tools",
"developer-tools",
"code-review",
"documentation-tools"
],
scripts: {
build: "tsup",
prepublishOnly: "npm run build"
},
module: "./dist/index.js",
types: "./dist/index.d.ts",
exports: {
".": {
import: "./dist/index.js",
types: "./dist/index.d.ts"
}
},
files: [
"dist"
],
dependencies: {
"@anthropic-ai/claude-code": "^1.0.80",
"@chroma-core/openai": "^0.1.7",
"@modelcontextprotocol/sdk": "^1.17.1",
"@xenova/transformers": "^2.17.2",
chromadb: "^3.0.10",
commander: "^14.0.0",
dotenv: "^17.2.1",
"fs-extra": "^11.3.0",
"gray-matter": "^4.0.3",
openai: "^5.10.2"
},
devDependencies: {
"@types/fs-extra": "^11.0.4",
"@types/node": "^20.14.10",
tsup: "^8.5.0",
tsx: "^4.16.2",
typescript: "^5.5.3"
}
};
// src/index.ts
import { createServer } from "http";
import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
var program = new Command();
var sseTransports = {};
program.name("context1000").description("CLI for context1000 RAG system").version(package_default.version);
program.command("index").description("Index documents for RAG system").argument("<docs-path>", "Path to documents directory").action(async (docsPath) => {
try {
console.log("Starting document indexing...");
const finalDocsPath = path2.resolve(docsPath);
console.log(`Processing documents from: ${finalDocsPath}`);
const processor = new DocumentProcessor();
const chunks = await processor.processDocumentsToChunks(finalDocsPath);
console.log(`Processed ${chunks.length} document chunks`);
if (chunks.length === 0) {
console.log("No document chunks to index");
return;
}
const chromaClient = new ChromaClient();
await chromaClient.initialize("context1000");
await chromaClient.deleteCollection("context1000");
await chromaClient.initialize("context1000");
await chromaClient.addDocuments(chunks);
const info = await chromaClient.getCollectionInfo();
console.log("Collection info:", info);
console.log("Document indexing completed successfully!");
console.log("\nIndexed document chunks:");
const documentsMap = /* @__PURE__ */ new Map();
chunks.forEach((chunk) => {
const docId = chunk.metadata.filePath;
if (!documentsMap.has(docId)) {
documentsMap.set(docId, []);
}
documentsMap.get(docId).push(chunk);
});
documentsMap.forEach((chunks2, filePath) => {
const firstChunk = chunks2[0];
console.log(
`- ${firstChunk.metadata.title} (${firstChunk.metadata.type}) - ${chunks2.length} chunks - ${filePath}`
);
});
} catch (error) {
console.error("Error indexing documents:", error);
process.exit(1);
}
});
program.command("mcp").description("Start MCP server").option("--transport <stdio|http|sse>", "transport type", "stdio").option("--port <number>", "port for HTTP/SSE transport", "3000").action(async (options) => {
try {
const { Server } = await import("@modelcontextprotocol/sdk/server/index.js");
const { StdioServerTransport } = await import("@modelcontextprotocol/sdk/server/stdio.js");
const { CallToolRequestSchema, ErrorCode, ListToolsRequestSchema, McpError } = await import("@modelcontextprotocol/sdk/types.js");
const { QueryInterface: QueryInterface2 } = await Promise.resolve().then(() => (init_query(), query_exports));
const transport = options.transport || "stdio";
const port = parseInt(options.port) || 3e3;
const allowedTransports = ["stdio", "http", "sse"];
if (!allowedTransports.includes(transport)) {
console.error(`Invalid --transport value: '${transport}'. Must be one of: stdio, http, sse.`);
process.exit(1);
}
const server = new Server(
{
name: "context1000",
version: package_default.version
},
{
capabilities: {
tools: {}
}
}
);
let queryInterface = null;
async function initializeRAG() {
if (!queryInterface) {
console.error("Initializing global RAG for context1000");
queryInterface = new QueryInterface2();
await queryInterface.initialize("context1000");
}
return queryInterface;
}
server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: [
{
name: "check_project_rules",
description: "Check project rules and constraints. Should be called FIRST when working on any project task. Returns project-specific rules, coding standards, constraints, and requirements that must be followed.",
inputSchema: {
type: "object",
properties: {
project: {
type: "string",
description: "Project name to check rules for. If not provided, searches for general rules."
},
max_results: {
type: "number",
description: "Maximum number of rule chunks to return (default: 15)",
minimum: 1,
maximum: 30
}
},
required: []
}
},
{
name: "search_guides",
description: "Search implementation guides and best practices based on project rules and decision context. Called AFTER check_project_rules when specific implementation guidance is needed. Finds step-by-step guides, patterns, examples, and references to architectural decisions (ADRs/RFCs) that inform implementation approaches.",
inputSchema: {
type: "object",
properties: {
query: {
type: "string",
description: "Specific guidance needed (e.g., 'authentication implementation', 'testing patterns', 'deployment process')"
},
project: {
type: "string",
description: "Project name for project-specific guides"
},
related_rules: {
type: "array",
items: { type: "string" },
description: "Rule references found from check_project_rules to find related guides"
},
max_results: {
type: "number",
description: "Maximum number of guide chunks to return (default: 10)",
minimum: 1,
maximum: 25
}
},
required: ["query"]
}
},
{
name: "search_decisions",
description: "Search through architectural decisions (ADRs) and RFCs. Triggered automatically when rules or guides reference specific decisions. Returns decision context, rationale, and implications.",
inputSchema: {
type: "object",
properties: {
query: {
type: "string",
description: "Decision or RFC topic to search for"
},
references: {
type: "array",
items: { type: "string" },
description: "Decision references found in rules or guides (e.g., 'ADR-001', 'RFC-123')"
},
project: {
type: "string",
description: "Project context for project-specific decisions"
},
max_results: {
type: "number",
description: "Maximum number of decision chunks to return (default: 8)",
minimum: 1,
maximum: 20
}
},
required: ["query"]
}
},
{
name: "search_documentation",
description: "Fallback general documentation search. Use when specific tools (check_project_rules, check_guides, search_decisions) don't provide sufficient information. Searches across all document types.",
inputSchema: {
type: "object",
properties: {
query: {
type: "string",
description: "Natural language search query for finding relevant documentation"
},
project: {
type: "string",
description: "Optional project name to search within specific project documentation"
},
type_filter: {
type: "array",
items: {
type: "string",
enum: ["adr", "rfc", "guide", "rule", "project"]
},
description: "Filter results by document types"
},
max_results: {
type: "number",
description: "Maximum number of document chunks to return (default: 10)",
minimum: 1,
maximum: 50
}
},
required: ["query"]
}
}
]
};
});
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
try {
const rag = await initializeRAG();
switch (name) {
case "check_project_rules": {
const { project, max_results = 15 } = args;
const options2 = {
maxResults: max_results,
filterByType: ["rule"]
};
if (project) {
options2.filterByProject = [project];
}
let query = "rules constraints requirements standards";
if (project) {
query += ` ${project}`;
}
const results = await rag.queryDocs(query, options2);
const ruleReferences = results.map((result) => result.metadata.title || result.metadata.filePath).filter((ref) => Boolean(ref));
return {
content: [
{
type: "text",
text: JSON.stringify(
{
rules: results,
rule_references: ruleReferences,
summary: `Found ${results.length} project rules${project ? ` for ${project}` : ""}`
},
null,
2
)
}
]
};
}
case "search_guides": {
const { query, project, related_rules, max_results = 10 } = args;
if (!query) {
throw new McpError(ErrorCode.InvalidParams, "query is required");
}
const options2 = {
maxResults: max_results,
filterByType: ["guide"]
};
if (project) {
options2.filterByProject = [project];
}
let enhancedQuery = query;
if (related_rules && related_rules.length > 0) {
enhancedQuery += ` ${related_rules.join(" ")}`;
}
const results = await rag.queryDocs(enhancedQuery, options2);
const decisionReferences = results.reduce((refs, result) => {
const content = result.document.toLowerCase();
const adrMatches = content.match(/adr[-_]?\d+/g) || [];
const rfcMatches = content.match(/rfc[-_]?\d+/g) || [];
return refs.concat(adrMatches, rfcMatches);
}, []);
return {
content: [
{
type: "text",
text: JSON.stringify(
{
guides: results,
decision_references: [...new Set(decisionReferences)],
summary: `Found ${results.length} implementation guides for "${query}"${project ? ` in ${project}` : ""}`
},
null,
2
)
}
]
};
}
case "search_decisions": {
const { query, references, project, max_results = 8 } = args;
if (!query) {
throw new McpError(ErrorCode.InvalidParams, "query is required");
}
const options2 = {
maxResults: max_results,
filterByType: ["adr", "rfc"]
};
if (project) {
options2.filterByProject = [project];
}
let enhancedQuery = query;
if (references && references.length > 0) {
enhancedQuery += ` ${references.join(" ")}`;
}
const results = await rag.queryDocs(enhancedQuery, options2);
return {
content: [
{
type: "text",
text: JSON.stringify(
{
decisions: results,
summary: `Found ${results.length} architectural decisions for "${query}"${references ? ` (refs: ${references.join(", ")})` : ""}`
},
null,
2
)
}
]
};
}
case "search_documentation": {
const { query, project, type_filter, max_results = 10 } = args;
if (!query) {
throw new McpError(ErrorCode.InvalidParams, "query is required");
}
const options2 = {
maxResults: max_results,
filterByType: type_filter
};
if (project) {
options2.filterByProject = [project];
}
const results = await rag.queryDocs(query, options2);
return {
content: [
{
type: "text",
text: JSON.stringify(
{
documents: results,
summary: `Found ${results.length} documents for "${query}"${type_filter ? ` (types: ${type_filter.join(", ")})` : ""}`
},
null,
2
)
}
]
};
}
default:
throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`);
}
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Tool execution failed: ${error}`);
}
});
if (transport === "stdio") {
const stderrTransport = new StdioServerTransport();
await server.connect(stderrTransport);
console.error("context1000 RAG MCP server running on stdio");
} else if (transport === "http" || transport === "sse") {
const initialPort = port;
let actualPort = initialPort;
const httpServer = createServer(async (req, res) => {
const url = new URL(req.url || "", `http://${req.headers.host}`).pathname;
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Access-Control-Allow-Methods", "GET,POST,OPTIONS,DELETE");
res.setHeader(
"Access-Control-Allow-Headers",
"Content-Type, MCP-Session-Id, mcp-session-id, MCP-Protocol-Version"
);
res.setHeader("Access-Control-Expose-Headers", "MCP-Session-Id");
if (req.method === "OPTIONS") {
res.writeHead(200);
res.end();
return;
}
try {
if (url === "/mcp") {
const transport2 = new StreamableHTTPServerTransport({
sessionIdGenerator: void 0
});
await server.connect(transport2);
await transport2.handleRequest(req, res);
} else if (url === "/sse" && req.method === "GET") {
const sseTransport = new SSEServerTransport("/messages", res);
sseTransports[sseTransport.sessionId] = sseTransport;
res.on("close", () => {
delete sseTransports[sseTransport.sessionId];
});
await server.connect(sseTransport);
} else if (url === "/messages" && req.method === "POST") {
const sessionId = new URL(req.url || "", `http://${req.headers.host}`).searchParams.get("sessionId") ?? "";
if (!sessionId) {
res.writeHead(400);
res.end("Missing sessionId parameter");
return;
}
const sseTransport = sseTransports[sessionId];
if (!sseTransport) {
res.writeHead(400);
res.end(`No transport found for sessionId: ${sessionId}`);
return;
}
await sseTransport.handlePostMessage(req, res);
} else if (url === "/ping") {
res.writeHead(200, { "Content-Type": "text/plain" });
res.end("pong");
} else {
res.writeHead(404);
res.end("Not found");
}
} catch (error) {
console.error("Error handling request:", error);
if (!res.headersSent) {
res.writeHead(500);
res.end("Internal Server Error");
}
}
});
const startServer = (currentPort, maxAttempts = 10) => {
httpServer.once("error", (err) => {
if (err.code === "EADDRINUSE" && currentPort < initialPort + maxAttempts) {
console.warn(`Port ${currentPort} is in use, trying port ${currentPort + 1}...`);
startServer(currentPort + 1, maxAttempts);
} else {
console.error(`Failed to start server: ${err.message}`);
process.exit(1);
}
});
httpServer.listen(currentPort, () => {
actualPort = currentPort;
console.error(
`context1000 RAG MCP Server running on ${transport.toUpperCase()} at http://localhost:${actualPort}/mcp and legacy SSE at /sse`
);
});
};
startServer(initialPort);
} else {
throw new Error(`Unsupported transport: ${transport}`);
}
} catch (error) {
console.error("Failed to run MCP server:", error);
process.exit(1);
}
});
program.parse();