vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
15 lines • 1.65 kB
TypeScript
import { OpenRouterConfig } from '../types/workflow.js';
export declare function performDirectLlmCall(prompt: string, systemPrompt: string, config: OpenRouterConfig, logicalTaskName: string, temperature?: number, expectedSchema?: object): Promise<string>;
export declare function performOptimizedJsonLlmCall(prompt: string, systemPrompt: string, config: OpenRouterConfig, logicalTaskName: string, expectedSchema?: object, temperature?: number): Promise<{
response: string;
optimizationApplied: string[];
}>;
export declare function performFormatAwareLlmCall(prompt: string, systemPrompt: string, config: OpenRouterConfig, logicalTaskName: string, expectedFormat?: 'json' | 'markdown' | 'text' | 'yaml', expectedSchema?: object, temperature?: number): Promise<string>;
export declare function intelligentJsonParse(response: string, context: string): unknown;
declare function extractPartialJson(jsonString: string, jobId?: string): string;
export declare function normalizeJsonResponse(rawResponse: string, jobId?: string): string;
export { extractPartialJson };
export declare function performDirectLlmCallWithCentralizedConfig(prompt: string, systemPrompt: string, logicalTaskName: string, temperature?: number, expectedSchema?: object): Promise<string>;
export declare function performFormatAwareLlmCallWithCentralizedConfig(prompt: string, systemPrompt: string, logicalTaskName: string, expectedFormat?: 'json' | 'markdown' | 'text' | 'yaml', expectedSchema?: object, temperature?: number): Promise<string>;
export declare function getLLMModelWithCentralizedConfig(operation: string): Promise<string>;
//# sourceMappingURL=llmHelper.d.ts.map