llmverify
Version:
AI Output Verification Toolkit — Local-first LLM safety, hallucination detection, PII redaction, prompt injection defense, and runtime monitoring. Zero telemetry. OWASP LLM Top 10 aligned.
63 lines (62 loc) • 1.78 kB
TypeScript
/**
* Adapter Factory
*
* Creates unified LLM clients from provider-specific configurations.
* Uses a registry pattern for extensibility.
*
* @module adapters/factory
* @author Haiec
* @license MIT
*/
import { ProviderId, LlmClient, AdapterConfig, AdapterBuilder } from './types';
/**
* Creates a unified LLM client from provider configuration.
*
* @param config - Adapter configuration
* @returns Unified LLM client
* @throws UnsupportedProviderError if provider is not registered
*
* @example
* // With existing OpenAI client
* import OpenAI from 'openai';
* const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
* const llm = createAdapter({ provider: 'openai', client: openai });
*
* @example
* // With API key only
* const llm = createAdapter({
* provider: 'openai',
* apiKey: process.env.OPENAI_API_KEY,
* defaultModel: 'gpt-4o-mini'
* });
*
* @example
* // Local model
* const llm = createAdapter({
* provider: 'local',
* client: async (prompt) => await myLocalModel(prompt)
* });
*/
export declare function createAdapter(config: AdapterConfig): LlmClient;
/**
* Registers a custom adapter builder.
* Use this to add support for providers not included by default.
*
* @param provider - Provider identifier
* @param builder - Adapter builder function
*
* @example
* registerAdapter('my-provider', (config) => ({
* provider: 'custom',
* providerName: 'My Provider',
* async generate(request) {
* // Your implementation
* return { text: '...', tokens: 0 };
* }
* }));
*/
export declare function registerAdapter(provider: ProviderId | string, builder: AdapterBuilder): void;
/**
* Gets list of registered provider IDs.
*/
export declare function getRegisteredProviders(): ProviderId[];