UNPKG

gitdb-database

Version:

A production-ready CLI tool for managing a NoSQL database using GitHub repositories as storage

105 lines 2.38 kB
export interface LLMResponse { command: string; confidence: number; explanation: string; parameters?: Record<string, any>; suggestedQuery?: string; } export interface LLMConfig { modelPath: string; modelType: 'llama' | 'gpt4all' | 'mistral'; maxTokens: number; temperature: number; contextWindow: number; } export declare class LocalLLM { private config; private historyPath; private isModelLoaded; constructor(config: LLMConfig); /** * Initialize the local LLM */ initialize(): Promise<void>; /** * Process natural language query using local LLM */ processQuery(query: string, context?: any): Promise<LLMResponse>; /** * Process query using local LLM */ private processWithLLM; /** * Process query using pattern matching (fallback) */ private processWithPatterns; /** * Extract collection name from query */ private extractCollectionFromQuery; /** * Extract query parameters for find operations */ private extractQueryParams; /** * Extract document data for insert operations */ private extractDocumentData; /** * Extract update parameters */ private extractUpdateParams; /** * Extract ID for delete operations */ private extractIdFromQuery; /** * Extract count parameters */ private extractCountParams; /** * Extract collection name for create operations */ private extractCollectionName; /** * Extract rollback parameters */ private extractRollbackParams; /** * Generate suggested query from command and parameters */ private generateSuggestedQuery; /** * Build prompt for LLM */ private buildPrompt; /** * Run LLaMA model */ private runLlama; /** * Run GPT4All model */ private runGPT4All; /** * Run Mistral model */ private runMistral; /** * Parse LLM response */ private parseLLMResponse; /** * Save query to history for learning */ saveToHistory(query: string, response: LLMResponse): Promise<void>; /** * Load query history */ private loadHistory; /** * Save query history */ private saveHistory; } //# sourceMappingURL=local-llm.d.ts.map