@restnfeel/agentc-starter-kit
Version:
한국어 기업용 CMS 모듈 - Task Master AI와 함께 빠르게 웹사이트를 구현할 수 있는 재사용 가능한 컴포넌트 시스템
113 lines • 3.89 kB
TypeScript
import { LLMProvider, LLMConfig, Document } from "../contexts/ChatbotContext";
export interface LLMResponse {
content: string;
metadata?: {
model?: string;
tokens?: {
prompt: number;
completion: number;
total: number;
};
finishReason?: string;
confidence?: number;
};
}
export interface RAGResponse {
response: string;
sources: Document[];
metadata?: {
retrievedDocCount: number;
similarity: number[];
model?: string;
tokens?: {
prompt: number;
completion: number;
total: number;
};
};
}
export declare abstract class BaseLLM {
protected config: LLMConfig;
protected isConnected: boolean;
protected availableModels: string[];
constructor(config: LLMConfig);
abstract connect(): Promise<void>;
abstract disconnect(): Promise<void>;
abstract generateResponse(prompt: string, context?: Document[]): Promise<LLMResponse>;
abstract listModels(): Promise<string[]>;
abstract validateApiKey(): Promise<boolean>;
abstract healthCheck(): Promise<boolean>;
getStatus(): {
isConnected: boolean;
config: LLMConfig;
availableModels: string[];
};
updateConfig(updates: Partial<LLMConfig>): void;
}
export declare class OpenAILLM extends BaseLLM {
private client;
constructor(config: LLMConfig);
connect(): Promise<void>;
disconnect(): Promise<void>;
generateResponse(prompt: string, context?: Document[]): Promise<LLMResponse>;
listModels(): Promise<string[]>;
validateApiKey(): Promise<boolean>;
healthCheck(): Promise<boolean>;
}
export declare class AnthropicLLM extends BaseLLM {
private client;
constructor(config: LLMConfig);
connect(): Promise<void>;
disconnect(): Promise<void>;
generateResponse(prompt: string, context?: Document[]): Promise<LLMResponse>;
listModels(): Promise<string[]>;
validateApiKey(): Promise<boolean>;
healthCheck(): Promise<boolean>;
}
export declare class GoogleLLM extends BaseLLM {
connect(): Promise<void>;
disconnect(): Promise<void>;
generateResponse(prompt: string, context?: Document[]): Promise<LLMResponse>;
listModels(): Promise<string[]>;
validateApiKey(): Promise<boolean>;
healthCheck(): Promise<boolean>;
}
export declare class MistralLLM extends BaseLLM {
connect(): Promise<void>;
disconnect(): Promise<void>;
generateResponse(prompt: string, context?: Document[]): Promise<LLMResponse>;
listModels(): Promise<string[]>;
validateApiKey(): Promise<boolean>;
healthCheck(): Promise<boolean>;
}
export declare function createLLM(config: LLMConfig): BaseLLM;
export declare class RAGChainService {
private llm;
private vectorStore;
constructor(llm: BaseLLM, vectorStore: any);
generateRAGResponse(query: string, options?: {
maxRetrievedDocs?: number;
similarityThreshold?: number;
}): Promise<RAGResponse>;
updateLLM(llm: BaseLLM): void;
updateVectorStore(vectorStore: any): void;
}
export declare class LLMUtils {
static validateConfig(config: LLMConfig): boolean;
static getDefaultConfig(provider: LLMProvider): Partial<LLMConfig>;
static buildContextPrompt(query: string, documents: Document[], systemPrompt?: string): string;
static estimateTokens(text: string): number;
static truncateContext(documents: Document[], maxTokens: number): Document[];
}
declare const _default: {
BaseLLM: typeof BaseLLM;
OpenAILLM: typeof OpenAILLM;
AnthropicLLM: typeof AnthropicLLM;
GoogleLLM: typeof GoogleLLM;
MistralLLM: typeof MistralLLM;
createLLM: typeof createLLM;
RAGChainService: typeof RAGChainService;
LLMUtils: typeof LLMUtils;
};
export default _default;
//# sourceMappingURL=llmService.d.ts.map