UNPKG

@ooples/token-optimizer-mcp

Version:

Intelligent context window optimization for Claude Code - store content externally via caching and compression, freeing up your context window for what matters

39 lines 1.4 kB
import { IEmbeddingGenerator } from '../interfaces/IEmbeddingGenerator.js'; /** * A simple embedding generator using hashing and statistical features * This is an MVP implementation that doesn't require external API calls. * Can be extended later to use OpenAI, HuggingFace, or other embedding APIs. */ export declare class FoundationModelEmbeddingGenerator implements IEmbeddingGenerator { private readonly dimensions; /** * @param dimensions - The dimensionality of the embedding vectors (default: 128) */ constructor(dimensions?: number); /** * Generate an embedding vector for the given text * Uses a hybrid approach combining: * 1. Hashing-based features for content similarity * 2. Statistical features (length, character distribution) * 3. N-gram features for semantic similarity */ generateEmbedding(text: string): Promise<number[]>; getDimensions(): number; /** * Normalize text for consistent embedding generation */ private normalizeText; /** * Compute statistical features from text */ private computeStatistics; /** * Compute n-gram features for semantic similarity */ private computeNgramFeatures; /** * Normalize a vector to unit length (L2 normalization) */ private normalizeVector; } //# sourceMappingURL=FoundationModelEmbeddingGenerator.d.ts.map