@nullplatform/llm-gateway
Version:
LLM Gateway Core - Main proxy server
139 lines • 5.36 kB
JavaScript
// packages/core/basic-apikey-auth/providers/ollama.ts
Object.defineProperty(exports, "__esModule", { value: true });
exports.OllamaProviderFactory = exports.OllamaProvider = void 0;
const llm_gateway_sdk_1 = require("@nullplatform/llm-gateway-sdk");
const logger_js_1 = require("../utils/logger.js");
const openai_js_1 = require("./openai.js");
class OllamaProvider extends openai_js_1.OpenAIProvider {
// @ts-ignore
name = 'ollama';
// @ts-ignore
config;
ollamaClient;
constructor(config, logger) {
// Convert Ollama config to OpenAI config format for parent class
const openaiConfig = {
bypassModel: config.bypassModel,
baseUrl: config.baseUrl || 'http://localhost:11434/v1', // Default Ollama URL with v1 path
model: config.model,
apiKey: config.apiKey, // Ollama doesn't need real API key, but some clients expect it
retryAttempts: config.retryAttempts,
retryDelay: config.retryDelay
};
super(openaiConfig, logger, true);
this.ollamaClient = super.getHttpClient();
this.setupOllamaInterceptors();
}
setupOllamaInterceptors() {
this.ollamaClient.interceptors.request.use((config) => {
this.logger.debug('Ollama request', {
method: config.method,
url: config.url,
headers: config.headers
});
return config;
}, (error) => {
this.logger.error('Ollama request error', { error });
return Promise.reject(error);
});
this.ollamaClient.interceptors.response.use((response) => {
this.logger.debug('Ollama response', {
status: response.status,
headers: response.headers
});
return response;
}, (error) => {
this.logger.error('Ollama response error', {
status: error.response?.status,
data: error.response?.data,
message: error.message
});
return Promise.reject(this.transformOllamaError(error));
});
}
buildOpenAIRequest(request) {
const openaiRequest = super.buildOpenAIRequest(request);
// Remove parameters that Ollama doesn't support or handle differently
delete openaiRequest.frequency_penalty;
delete openaiRequest.presence_penalty;
// Add Ollama-specific parameters if available in metadata
if (request.metadata?.custom) {
const custom = request.metadata.custom;
if (custom.keep_alive) {
openaiRequest.keep_alive = custom.keep_alive;
}
if (custom.raw !== undefined) {
openaiRequest.raw = custom.raw;
}
}
return openaiRequest;
}
async execute(request) {
try {
// Use parent's execute method which handles the OpenAI-compatible endpoint
return await super.execute(request);
}
catch (error) {
this.logger.error('Ollama request failed', {
error,
request: this.sanitizeRequest(request)
});
throw new llm_gateway_sdk_1.LLMModelError(error);
}
}
async executeStreaming(request, chunkEmitter) {
try {
// Use parent's streaming method
return await super.executeStreaming(request, chunkEmitter);
}
catch (error) {
this.logger.error('Ollama streaming request failed', {
error,
request: this.sanitizeRequest(request)
});
throw new llm_gateway_sdk_1.LLMModelError(error);
}
}
transformOllamaError(error) {
if (error.response) {
const status = error.response.status;
const data = error.response.data;
// Handle common Ollama errors
if (status === 404 && data?.error?.includes('model')) {
return new Error(`Ollama model not found: ${data.error}`);
}
if (data?.error) {
return new Error(`Ollama API Error (${status}): ${data.error}`);
}
return new Error(`Ollama API Error (${status}): ${error.message}`);
}
if (error.code === 'ECONNREFUSED') {
return new Error('Ollama server is not running. Please start Ollama service.');
}
if (error.code === 'ECONNABORTED') {
return new Error('Ollama API request timeout - model might be loading');
}
return error;
}
sanitizeRequest(request) {
const sanitized = { ...request };
if (sanitized.messages) {
sanitized.messages = sanitized.messages.map((msg) => ({
...msg,
content: msg.content?.length > 100 ? msg.content.substring(0, 100) + '...' : msg.content
}));
}
return sanitized;
}
}
exports.OllamaProvider = OllamaProvider;
class OllamaProviderFactory {
name = 'Ollama Provider Factory';
type = 'ollama';
create(config, logger) {
return new OllamaProvider(config, logger || new logger_js_1.Logger());
}
}
exports.OllamaProviderFactory = OllamaProviderFactory;
//# sourceMappingURL=ollama.js.map
;