promptforge
Version:
Adaptive Prompt Intelligence & Orchestration SDK - Manage, optimize, and serve prompts for LLMs with versioning, feedback loops, and multi-provider support
51 lines • 1.79 kB
TypeScript
import { LLMProvider, LLMConfig } from '../types.js';
export interface LLMResponse {
content: string;
inputTokens: number;
outputTokens: number;
model: string;
provider: LLMProvider;
finishReason?: string;
metadata?: Record<string, unknown>;
}
export interface LLMAdapter {
execute(prompt: string, config: LLMConfig): Promise<LLMResponse>;
isAvailable(): Promise<boolean>;
}
export declare class OpenAIAdapter implements LLMAdapter {
execute(prompt: string, config: LLMConfig): Promise<LLMResponse>;
isAvailable(): Promise<boolean>;
}
export declare class AnthropicAdapter implements LLMAdapter {
execute(prompt: string, config: LLMConfig): Promise<LLMResponse>;
isAvailable(): Promise<boolean>;
}
export declare class GoogleAdapter implements LLMAdapter {
execute(prompt: string, config: LLMConfig): Promise<LLMResponse>;
isAvailable(): Promise<boolean>;
}
export declare class MistralAdapter implements LLMAdapter {
execute(prompt: string, config: LLMConfig): Promise<LLMResponse>;
isAvailable(): Promise<boolean>;
}
export declare class OllamaAdapter implements LLMAdapter {
execute(prompt: string, config: LLMConfig): Promise<LLMResponse>;
isAvailable(): Promise<boolean>;
}
export declare class LLMRouter {
private adapters;
constructor();
/**
* Execute prompt with specified provider or fallback chain
*/
execute(prompt: string, config: LLMConfig, fallbackProviders?: LLMProvider[]): Promise<LLMResponse>;
/**
* Get available providers
*/
getAvailableProviders(): Promise<LLMProvider[]>;
/**
* Check if a specific provider is available
*/
isProviderAvailable(provider: LLMProvider): Promise<boolean>;
}
//# sourceMappingURL=llm-adapters.d.ts.map