UNPKG

@chainlink/mcp-server

Version:
98 lines 3.74 kB
"use strict"; /** * @fileoverview Anthropic Claude service implementation for AI completions * * Provides integration with Anthropic's Claude models through their official API. * Handles the unique message format requirements of Claude, including proper * system message handling and response parsing. */ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.AnthropicService = void 0; const sdk_1 = __importDefault(require("@anthropic-ai/sdk")); const ai_service_1 = require("./ai-service"); /** * Anthropic Claude service implementation for chat completions * * Supports Claude models including Claude-3 variants (Haiku, Sonnet, Opus). * Handles Anthropic's specific message format where system messages are * passed as a separate parameter rather than in the messages array. * * @class AnthropicService * @extends AIService */ class AnthropicService extends ai_service_1.AIService { client; /** * Initialize the Anthropic service with API configuration * * @param config - Service configuration including API key and model selection */ constructor(config) { super(config); this.client = new sdk_1.default({ apiKey: config.apiKey, }); } /** * Generate a chat completion response using Anthropic's Claude API * * Handles Anthropic's unique message format requirements: * - System messages are extracted and passed as a top-level parameter * - Only user and assistant messages are included in the messages array * - Response format is converted to our standard AIResponse format * * @param messages - Conversation history to send to Claude * @returns Promise resolving to formatted AI response with usage statistics * @throws {Error} When API key is invalid, rate limits exceeded, or other API errors */ async generateResponse(messages) { // Extract system message if present const systemMessage = messages.find((msg) => msg.role === "system"); const userMessages = messages.filter((msg) => msg.role !== "system"); const anthropicMessages = userMessages.map((msg) => ({ role: msg.role, content: msg.content, })); const requestParams = { model: this.config.model, max_tokens: this.config.maxTokens || 2000, messages: anthropicMessages, }; // Add system message as top-level parameter if present if (systemMessage) { requestParams.system = systemMessage.content; } const response = await this.client.messages.create(requestParams); return { content: response.content[0]?.type === "text" ? response.content[0].text : "", usage: { input_tokens: response.usage.input_tokens, output_tokens: response.usage.output_tokens, total_tokens: response.usage.input_tokens + response.usage.output_tokens, }, id: response.id, requestId: response._request_id || undefined, }; } /** * Check if the service has a valid API key configuration * * @returns True if API key is provided, false otherwise */ isConfigured() { return !!this.config.apiKey; } /** * Get the service name for identification * * @returns "Anthropic" as the service identifier */ getServiceName() { return "Anthropic"; } } exports.AnthropicService = AnthropicService; //# sourceMappingURL=anthropic-service.js.map