UNPKG

@chainlink/mcp-server

Version:
93 lines 3.43 kB
"use strict"; /** * @fileoverview Google Gemini service implementation for AI completions * * Provides integration with Google's Gemini models through their official API. * Handles authentication, request formatting, and response parsing for * chat completions using the Google Generative AI SDK. */ Object.defineProperty(exports, "__esModule", { value: true }); exports.GeminiService = void 0; const generative_ai_1 = require("@google/generative-ai"); const ai_service_1 = require("./ai-service"); /** * Google Gemini service implementation for chat completions * * Supports Google's Gemini models including Gemini Pro and Ultra variants. * Converts conversation format to Gemini's expected prompt structure and * handles response parsing with usage statistics. * * @class GeminiService * @extends AIService */ class GeminiService extends ai_service_1.AIService { /** Google Generative AI client instance for API communication */ client; /** * Initialize the Gemini service with API configuration * * @param config - Service configuration including API key and model selection */ constructor(config) { super(config); this.client = new generative_ai_1.GoogleGenerativeAI(config.apiKey); } /** * Generate a chat completion response using Google's Gemini API * * Converts internal message format to Gemini's expected prompt format, * sends the request, and transforms the response back to our standard format. * Handles Gemini's unique conversation structure requirements. * * @param messages - Conversation history to send to Gemini * @returns Promise resolving to formatted AI response with usage statistics * @throws {Error} When API key is invalid, quota exceeded, or other API errors */ async generateResponse(messages) { const model = this.client.getGenerativeModel({ model: this.config.model }); // Convert messages to Gemini format // Gemini expects a single prompt or conversation history const prompt = messages .map((msg) => { const rolePrefix = msg.role === "user" ? "User: " : msg.role === "assistant" ? "Assistant: " : "System: "; return rolePrefix + msg.content; }) .join("\n\n"); const result = await model.generateContent(prompt); const response = await result.response; return { content: response.text(), usage: response.usageMetadata ? { input_tokens: response.usageMetadata.promptTokenCount, output_tokens: response.usageMetadata.candidatesTokenCount, total_tokens: response.usageMetadata.totalTokenCount, } : undefined, id: undefined, requestId: undefined, }; } /** * Check if the service has a valid API key configuration * * @returns True if API key is provided, false otherwise */ isConfigured() { return !!this.config.apiKey; } /** * Get the service name for identification * * @returns "Gemini" as the service identifier */ getServiceName() { return "Gemini"; } } exports.GeminiService = GeminiService; //# sourceMappingURL=gemini-service.js.map