UNPKG

@juspay/neurolink

Version:

Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and

98 lines (97 loc) 4.03 kB
import { createAnthropic } from "@ai-sdk/anthropic"; import { streamText } from "ai"; import { BaseProvider } from "../core/baseProvider.js"; import { logger } from "../utils/logger.js"; import { createTimeoutController, TimeoutError, } from "../utils/timeout.js"; import { DEFAULT_MAX_TOKENS } from "../core/constants.js"; import { validateApiKey, createAnthropicBaseConfig, } from "../utils/providerConfig.js"; /** * Anthropic provider implementation using BaseProvider pattern * Migrated from direct API calls to Vercel AI SDK (@ai-sdk/anthropic) * Follows exact Google AI interface patterns for compatibility */ export class AnthropicProviderV2 extends BaseProvider { constructor(modelName) { super(modelName, "anthropic"); logger.debug("AnthropicProviderV2 initialized", { model: this.modelName, provider: this.providerName, }); } // =================== // ABSTRACT METHOD IMPLEMENTATIONS // =================== getProviderName() { return "anthropic"; } getDefaultModel() { return process.env.ANTHROPIC_MODEL || "claude-3-5-sonnet-20241022"; } /** * Returns the Vercel AI SDK model instance for Anthropic */ getAISDKModel() { const apiKey = this.getApiKey(); const anthropic = createAnthropic({ apiKey }); return anthropic(this.modelName); } handleProviderError(error) { if (error instanceof TimeoutError) { return new Error(`Anthropic request timed out: ${error.message}`); } const errorWithStatus = error; if (errorWithStatus?.status === 401) { return new Error("Invalid Anthropic API key. Please check your ANTHROPIC_API_KEY environment variable."); } if (errorWithStatus?.status === 429) { return new Error("Anthropic rate limit exceeded. Please try again later."); } if (errorWithStatus?.status === 400) { return new Error(`Anthropic bad request: ${errorWithStatus?.message || "Invalid request parameters"}`); } return new Error(`Anthropic error: ${errorWithStatus?.message || String(error) || "Unknown error"}`); } // Configuration helper - now using consolidated utility getApiKey() { return validateApiKey(createAnthropicBaseConfig()); } // executeGenerate removed - BaseProvider handles all generation with tools async executeStream(options, analysisSchema) { // Note: StreamOptions validation handled differently than TextGenerationOptions const apiKey = this.getApiKey(); const anthropicClient = createAnthropic({ apiKey }); const model = anthropicClient(this.modelName); const timeout = this.getTimeout(options); const timeoutController = createTimeoutController(timeout, this.providerName, "stream"); try { const result = await streamText({ model, prompt: options.input.text, system: options.systemPrompt, temperature: options.temperature, maxTokens: options.maxTokens || DEFAULT_MAX_TOKENS, tools: options.tools, toolChoice: "auto", abortSignal: timeoutController?.controller.signal, }); timeoutController?.cleanup(); // Transform string stream to content object stream (match Google AI pattern) const transformedStream = async function* () { for await (const chunk of result.textStream) { yield { content: chunk }; } }; return { stream: transformedStream(), provider: this.providerName, model: this.modelName, }; } catch (error) { timeoutController?.cleanup(); throw this.handleProviderError(error); } } } // Export for testing export default AnthropicProviderV2;