UNPKG

llms-txt-generator

Version:

A powerful CLI tool and MCP server for generating standardized llms.txt and llms-full.txt documentation files to help AI models better understand project structures

223 lines 9.76 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); Object.defineProperty(exports, "__esModule", { value: true }); exports.LLM = void 0; const events_1 = require("events"); const client_1 = require("../mcp/client"); const openai_1 = __importStar(require("openai")); class LLM { constructor(config) { this.mcpClient = new client_1.MCPClient(); this.config = config; this.isAzure = !!config.azureOpenAI; if (this.isAzure) { // Azure OpenAI configuration this.openai = new openai_1.AzureOpenAI({ apiKey: config.apiKey || process.env.AZURE_OPENAI_API_KEY, endpoint: config.baseURL || process.env.AZURE_OPENAI_API_ENDPOINT, apiVersion: config.azureOpenAI?.apiVersion || process.env.AZURE_OPENAI_API_VERSION || '2024-02-15-preview', deployment: config.azureOpenAI?.deployment || process.env.AZURE_OPENAI_API_DEPLOYMENT, }); } else { // Standard OpenAI configuration this.openai = new openai_1.default({ apiKey: config.apiKey || process.env.OPENAI_API_KEY, baseURL: config.baseURL, }); } this.toolsType = config.toolsType || 'tool_call'; } async chat(messages) { const tools = await this.mcpClient.listTools(this.toolsType); const requestParams = { model: this.config.model, messages, tools: tools.length > 0 ? tools : undefined, stream: false, }; // Only add optional parameters if they are defined in config if (this.config.maxTokens !== undefined) { requestParams.max_tokens = this.config.maxTokens; } if (this.config.temperature !== undefined) { requestParams.temperature = this.config.temperature; } if (this.config.topP !== undefined) { requestParams.top_p = this.config.topP; } if (this.config.frequencyPenalty !== undefined) { requestParams.frequency_penalty = this.config.frequencyPenalty; } if (this.config.presencePenalty !== undefined) { requestParams.presence_penalty = this.config.presencePenalty; } if (this.config.stop !== undefined) { requestParams.stop = this.config.stop; } if (this.config.seed !== undefined) { requestParams.seed = this.config.seed; } const completion = await this.openai.chat.completions.create(requestParams); const message = completion.choices[0]?.message; if (!message) { throw new Error('No response from OpenAI'); } // Handle tool calls if (message.tool_calls && message.tool_calls.length > 0) { const toolResults = await Promise.all(message.tool_calls.map(async (toolCall) => { const result = await this.mcpClient.callTool(toolCall.function.name, JSON.parse(toolCall.function.arguments)); return { role: 'tool', tool_call_id: toolCall.id, content: result.content, }; })); // Recursively call with tool results return await this.chat([ ...messages, message, ...toolResults, ]); } return message.content || ''; } async chatStream(messages, emitter = new events_1.EventEmitter()) { try { const tools = await this.mcpClient.listTools(this.toolsType); // console.log(tools); const requestParams = { model: this.config.model, messages, tools: tools.length > 0 ? tools : undefined, stream: true, }; // Only add optional parameters if they are defined in config if (this.config.maxTokens !== undefined) { requestParams.max_tokens = this.config.maxTokens; } if (this.config.temperature !== undefined) { requestParams.temperature = this.config.temperature; } if (this.config.topP !== undefined) { requestParams.top_p = this.config.topP; } if (this.config.frequencyPenalty !== undefined) { requestParams.frequency_penalty = this.config.frequencyPenalty; } if (this.config.presencePenalty !== undefined) { requestParams.presence_penalty = this.config.presencePenalty; } if (this.config.stop !== undefined) { requestParams.stop = this.config.stop; } if (this.config.seed !== undefined) { requestParams.seed = this.config.seed; } const stream = await this.openai.chat.completions.create(requestParams); // console.log(JSON.stringify(messages, null, 2)); (async () => { let currentMessage = ''; let toolCalls = []; let hasToolCall = false; for await (const chunk of stream) { const delta = chunk.choices[0]?.delta; // console.log(JSON.stringify(delta), chunk.choices[1]); if (delta?.content) { currentMessage += delta.content; emitter.emit('data', delta.content); } // console.log(delta); if (delta?.tool_calls) { hasToolCall = true; for (const toolCall of delta.tool_calls) { if (toolCall.index !== undefined) { if (!toolCalls[toolCall.index]) { toolCalls[toolCall.index] = { id: toolCall.id || '', type: 'function', function: { name: '', arguments: '' } }; } if (toolCall.function?.name) { // join function name toolCalls[toolCall.index].function.name += toolCall.function.name; } if (toolCall.function?.arguments) { // join arguments toolCalls[toolCall.index].function.arguments += toolCall.function.arguments; } } } } } // console.log(toolCalls.length); // Handle tool calls if any if (hasToolCall && toolCalls.length > 0) { const systemRules = []; const toolResults = await Promise.all(toolCalls.map(async (toolCall) => { emitter.emit('data', `⚒️ (do task) -> ${toolCall.function.name} | ${toolCall.function.arguments.replace(/\n/g, ' ')}\n\n`); const result = await this.mcpClient.callTool(toolCall.function.name, JSON.parse(toolCall.function.arguments)); return { role: 'tool', tool_call_id: toolCall.id, content: result.content, }; })); // Continue with tool results const assistantMessage = { role: 'assistant', content: currentMessage || null, tool_calls: toolCalls, }; return await this.chatStream([ ...systemRules, ...messages, assistantMessage, ...toolResults ], emitter); } emitter.emit('end'); })(); return emitter; } catch (error) { emitter.emit('error', error); return emitter; } } } exports.LLM = LLM; //# sourceMappingURL=core.js.map