UNPKG

@hpbyte/h-codex-core

Version:

Core indexing and search functionality for h-codex

60 lines 2.27 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.embedder = exports.Embedder = void 0; const openai_1 = __importDefault(require("openai")); const embedding_1 = require("../../config/embedding"); class Embedder { constructor() { this.openai = new openai_1.default({ apiKey: embedding_1.embeddingsConfig.apiKey, baseURL: embedding_1.embeddingsConfig.baseURL, }); } async generateEmbeddings(texts) { try { const response = await this.openai.embeddings.create({ model: embedding_1.embeddingsConfig.model, input: texts, encoding_format: 'float', }); return response.data.map(item => item.embedding); } catch (error) { console.error('Error generating embeddings batch:', error); throw new Error(`Failed to generate embeddings batch: ${error}`); } } async generate(codeChunks) { if (codeChunks.length === 0) { return []; } const textsToEmbed = this.getTextsToEmbed(codeChunks); const batchEmbeddings = await Promise.all(textsToEmbed.map(texts => this.generateEmbeddings(texts))); const embeddings = batchEmbeddings.flat(); if (embeddings.length !== codeChunks.length) { throw new Error('Failed to generate embeddings for all chunks'); } return embeddings; } getTextsToEmbed(codeChunks, chunkSize = 10) { const result = []; const texts = codeChunks.map(chunk => this.prepareTextForEmbedding(chunk)); for (let i = 0; i < texts.length; i += chunkSize) { result.push(texts.slice(i, i + chunkSize)); } return result; } prepareTextForEmbedding(chunk) { let context = ''; context += `File: ${chunk.filePath}\n`; context += `Node Type: ${chunk.nodeType}\n`; context += `Code:\n${chunk.content}`; return context; } } exports.Embedder = Embedder; exports.embedder = new Embedder(); //# sourceMappingURL=index.js.map