llms-txt-generator
Version:
A powerful CLI tool and MCP server for generating standardized llms.txt and llms-full.txt documentation files to help AI models better understand project structures
41 lines (40 loc) • 1.64 kB
JavaScript
import { ChatOpenAI, AzureChatOpenAI } from "@langchain/openai";
import { MCPClient, MCPAgent } from 'mcp-use';
import servers from "../mcp/config/servers";
import {} from '../types';
export class Agent extends MCPAgent {
constructor(config) {
const baseURL = config.baseURL || process.env.OPENAI_API_BASE || 'https://api.openai.com/v1';
const isAzure = config.azureOpenAI !== undefined || baseURL.endsWith('openai.azure.com');
const client = MCPClient.fromDict({ mcpServers: servers });
let llm;
if (isAzure) {
llm = new AzureChatOpenAI({
azureOpenAIApiKey: config.apiKey || process.env.OPENAI_MODEL_NAME,
azureOpenAIApiVersion: config.azureOpenAI?.apiVersion || process.env.AZURE_OPENAI_API_VERSION,
azureOpenAIApiDeploymentName: config.azureOpenAI?.deployment || process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME,
azureOpenAIApiInstanceName: config.azureOpenAI?.instanceName || process.env.AZURE_OPENAI_API_INSTANCE_NAME,
modelName: config.model,
streaming: !!config.streaming,
...config.options
});
}
else {
llm = new ChatOpenAI({
modelName: config.model,
openAIApiKey: config.apiKey,
configuration: {
baseURL,
},
streaming: !!config.streaming,
...config.options,
});
}
super({
llm: llm,
client,
maxSteps: 40,
verbose: true,
});
}
}