@codai/memorai-core
Version:
Simplified advanced memory engine - no tiers, just powerful semantic search with persistence
51 lines • 1.42 kB
TypeScript
/**
* Local AI Embedding Service
* Provides semantic embeddings without external dependencies
*/
import type { EmbeddingResult } from './EmbeddingService.js';
export interface LocalEmbeddingConfig {
model: string;
maxLength: number;
cachePath?: string;
pythonPath?: string;
}
export declare class LocalEmbeddingService {
private config;
private cache;
private pythonScriptPath;
constructor(config?: Partial<LocalEmbeddingConfig>);
/**
* Generate embeddings for text using local sentence-transformers
*/
embed(text: string): Promise<EmbeddingResult>;
/**
* Generate embedding using Python sentence-transformers
*/ private generateEmbedding;
/**
* Ensure the Python script exists
*/ private ensurePythonScript;
/**
* Create the Python embedding script
*/
private createPythonScript;
/**
* Load embedding cache from disk
*/
private loadCache;
/**
* Save embedding cache to disk
*/
private saveCache;
/**
* Estimate token count (rough approximation)
*/
private estimateTokens;
/**
* Check if local AI is available
*/ static isAvailable(pythonPath?: string): Promise<boolean>;
/**
* Install sentence-transformers if not available
*/
static install(pythonPath?: string): Promise<boolean>;
}
//# sourceMappingURL=LocalEmbeddingService.d.ts.map