@juspay/neurolink
Version:
Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and
106 lines (105 loc) • 3.95 kB
TypeScript
import type { AIProviderName } from "../core/types.js";
import type { LanguageModelV1 } from "ai";
import type { StreamOptions, StreamResult } from "../types/streamTypes.js";
import type { ZodUnknownSchema } from "../types/typeAliases.js";
import type { Schema } from "ai";
import { BaseProvider } from "../core/baseProvider.js";
/**
* Ollama Provider v2 - BaseProvider Implementation
*
* PHASE 3.7: BaseProvider wrap around existing custom Ollama implementation
*
* Features:
* - Extends BaseProvider for shared functionality
* - Preserves custom OllamaLanguageModel implementation
* - Local model management and health checking
* - Enhanced error handling with Ollama-specific guidance
*/
export declare class OllamaProvider extends BaseProvider {
private ollamaModel;
private baseUrl;
private timeout;
constructor(modelName?: string);
protected getProviderName(): AIProviderName;
protected getDefaultModel(): string;
/**
* Returns the Vercel AI SDK model instance for Ollama
* The OllamaLanguageModel implements LanguageModelV1 interface properly
*/
protected getAISDKModel(): LanguageModelV1;
/**
* Ollama Tool Calling Support (Enhanced 2025)
*
* Uses configurable model list from ModelConfiguration instead of hardcoded values.
* Tool-capable models can be configured via OLLAMA_TOOL_CAPABLE_MODELS environment variable.
*
* **Configuration Options:**
* - Environment variable: OLLAMA_TOOL_CAPABLE_MODELS (comma-separated list)
* - Configuration file: providers.ollama.modelBehavior.toolCapableModels
* - Fallback: Default list of known tool-capable models
*
* **Implementation Features:**
* - Direct Ollama API integration (/v1/chat/completions)
* - Automatic tool schema conversion to Ollama format
* - Streaming tool calls with incremental response parsing
* - Model compatibility validation and fallback handling
*
* @returns true for supported models, false for unsupported models
*/
supportsTools(): boolean;
protected executeStream(options: StreamOptions, analysisSchema?: ZodUnknownSchema | Schema<unknown>): Promise<StreamResult>;
/**
* Execute streaming with Ollama's function calling support
* Uses the /v1/chat/completions endpoint with tools parameter
*/
private executeStreamWithTools;
/**
* Execute streaming without tools using the generate API
* Fallback for non-tool scenarios or when chat API is unavailable
*/
private executeStreamWithoutTools;
/**
* Convert AI SDK tools format to Ollama's function calling format
*/
private convertToolsToOllamaFormat;
/**
* Create stream generator for Ollama chat API with tool call support
*/
private createOllamaChatStream;
/**
* Format tool calls for display when tools aren't executed directly
*/
private formatToolCallForDisplay;
/**
* Create stream generator for Ollama generate API (non-tool mode)
*/
private createOllamaStream;
protected handleProviderError(error: unknown): Error;
/**
* Check if Ollama service is healthy and accessible
*/
private checkOllamaHealth;
/**
* Get available models from Ollama
*/
getAvailableModels(): Promise<string[]>;
/**
* Check if a specific model is available
*/
isModelAvailable(modelName: string): Promise<boolean>;
/**
* Get recommendations for tool-calling capable Ollama models
* Provides guidance for users who want to use function calling locally
*/
static getToolCallingRecommendations(): {
recommended: string[];
performance: Record<string, {
speed: number;
quality: number;
size: string;
}>;
notes: Record<string, string>;
installation: Record<string, string>;
};
}
export default OllamaProvider;