openlit
Version:
OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications, facilitating the integration of observability into your GenAI-driven projects
51 lines • 1.76 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.BaseEval = void 0;
const utils_1 = require("./utils");
const providers_1 = require("../llm/providers");
const metrics_1 = require("./metrics");
class BaseEval {
constructor(options = {}) {
this.provider = options.provider || 'openai';
this.apiKey = options.apiKey;
this.model = options.model;
this.baseUrl = options.baseUrl;
this.thresholdScore = options.thresholdScore ?? 0.5;
this.collectMetrics = options.collectMetrics ?? false;
this.customCategories = options.customCategories;
}
async measure(input) {
const systemPrompt = this.getSystemPrompt();
const prompt = (0, utils_1.formatPrompt)(systemPrompt, input);
const response = await this.llmResponse(prompt);
const result = (0, utils_1.parseLlmResponse)(response);
if (this.collectMetrics) {
this.recordMetrics(result);
}
return result;
}
async llmResponse(prompt) {
const providerFn = providers_1.llmProviders[this.provider];
if (!providerFn) {
throw new Error(`Unsupported provider: ${this.provider}`);
}
// Use a union type for options
const options = {
prompt,
model: this.model,
apiKey: this.apiKey,
};
if (this.provider === 'openai') {
options.baseUrl = this.baseUrl;
}
else {
delete options.baseUrl;
}
return providerFn(options);
}
recordMetrics(result) {
(0, metrics_1.recordEvalMetrics)(result, this.provider || 'unknown');
}
}
exports.BaseEval = BaseEval;
//# sourceMappingURL=base.js.map