UNPKG

@juspay/neurolink

Version:

Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and

103 lines (102 loc) 3.84 kB
import { createAzure } from "@ai-sdk/azure"; import { streamText } from "ai"; import { BaseProvider } from "../core/baseProvider.js"; import { validateApiKey, createAzureAPIKeyConfig, createAzureEndpointConfig, } from "../utils/providerConfig.js"; import { logger } from "../utils/logger.js"; import { buildMessagesArray } from "../utils/messageBuilder.js"; import { createProxyFetch } from "../proxy/proxyFetch.js"; export class AzureOpenAIProvider extends BaseProvider { apiKey; resourceName; deployment; apiVersion; azureProvider; constructor(modelName, sdk) { super(modelName, "azure", sdk); this.apiKey = process.env.AZURE_OPENAI_API_KEY || ""; const endpoint = process.env.AZURE_OPENAI_ENDPOINT || ""; this.resourceName = endpoint .replace("https://", "") .replace(/\/+$/, "") // Remove trailing slashes .replace(".openai.azure.com", ""); this.deployment = modelName || process.env.AZURE_OPENAI_DEPLOYMENT || process.env.AZURE_OPENAI_DEPLOYMENT_ID || "gpt-4o"; this.apiVersion = process.env.AZURE_API_VERSION || "2024-10-01-preview"; // Configuration validation - now using consolidated utility if (!this.apiKey) { validateApiKey(createAzureAPIKeyConfig()); } if (!this.resourceName) { validateApiKey(createAzureEndpointConfig()); } // Create the Azure provider instance with proxy support this.azureProvider = createAzure({ resourceName: this.resourceName, apiKey: this.apiKey, apiVersion: this.apiVersion, fetch: createProxyFetch(), }); logger.debug("Azure Vercel Provider initialized", { deployment: this.deployment, resourceName: this.resourceName, provider: "azure-vercel", }); } getProviderName() { return "azure"; } getDefaultModel() { return this.deployment; } /** * Returns the Vercel AI SDK model instance for Azure OpenAI */ getAISDKModel() { return this.azureProvider(this.deployment); } handleProviderError(error) { const errorObj = error; if (errorObj?.message && typeof errorObj.message === "string" && errorObj.message.includes("401")) { return new Error("Invalid Azure OpenAI API key or endpoint."); } const message = errorObj?.message && typeof errorObj.message === "string" ? errorObj.message : "Unknown error"; return new Error(`Azure OpenAI error: ${message}`); } // executeGenerate removed - BaseProvider handles all generation with tools async executeStream(options, analysisSchema) { try { // Build message array from options const messages = buildMessagesArray(options); const stream = await streamText({ model: this.azureProvider(this.deployment), messages: messages, maxTokens: options.maxTokens || 1000, temperature: options.temperature || 0.7, }); return { stream: (async function* () { for await (const chunk of stream.textStream) { yield { content: chunk }; } })(), provider: "azure", model: this.deployment, metadata: { streamId: `azure-${Date.now()}`, startTime: Date.now(), }, }; } catch (error) { throw this.handleProviderError(error); } } } export default AzureOpenAIProvider;