@stackmemoryai/stackmemory
Version:
Lossless, project-scoped memory for AI coding tools. Durable context across sessions with 56 MCP tools, FTS5 search, conductor orchestrator, loop/watch monitoring, snapshot capture, pre-flight overlap checks, Claude/Codex/OpenCode wrappers, Linear sync, a
55 lines (54 loc) • 1.47 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
class TransformersEmbeddingProvider {
dimension;
pipeline = null;
modelName;
initPromise = null;
constructor(modelName = "Xenova/all-MiniLM-L6-v2", dimension = 384) {
this.modelName = modelName;
this.dimension = dimension;
}
async ensureLoaded() {
if (this.pipeline) return;
if (this.initPromise) {
await this.initPromise;
return;
}
this.initPromise = (async () => {
const { pipeline } = await import("@xenova/transformers");
this.pipeline = await pipeline("feature-extraction", this.modelName);
})();
try {
await this.initPromise;
} catch (err) {
this.initPromise = null;
throw err;
}
}
async embed(text) {
await this.ensureLoaded();
const output = await this.pipeline(text, {
pooling: "mean",
normalize: true
});
return Array.from(output.data);
}
async embedBatch(texts) {
return Promise.all(texts.map((t) => this.embed(t)));
}
}
async function createTransformersProvider(modelName, dimension) {
try {
await import("@xenova/transformers");
return new TransformersEmbeddingProvider(modelName, dimension);
} catch {
return null;
}
}
export {
TransformersEmbeddingProvider,
createTransformersProvider
};