@ts-dspy/openai
Version:
OpenAI ChatGPT integration for TS-DSPy - enables type-safe LLM interactions with GPT-3.5, GPT-4, and other OpenAI models for TypeScript
155 lines (147 loc) • 6.8 kB
JavaScript
/******************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
/* global Reflect, Promise, SuppressedError, Symbol, Iterator */
function __awaiter(thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
}
typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
var e = new Error(message);
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
};
class OpenAILM {
constructor(config) {
this.usage = {
promptTokens: 0,
completionTokens: 0,
totalTokens: 0,
totalCost: 0
};
this.config = Object.assign({ model: 'gpt-4', organization: '', baseURL: 'https://api.openai.com/v1' }, config);
}
generate(prompt, options) {
return __awaiter(this, void 0, void 0, function* () {
const messages = [{ role: 'user', content: prompt }];
return this.chat(messages, options);
});
}
generateStructured(prompt, schema, options) {
return __awaiter(this, void 0, void 0, function* () {
const structuredPrompt = `${prompt}\n\nRespond with valid JSON matching this schema:\n${JSON.stringify(schema, null, 2)}`;
const response = yield this.generate(structuredPrompt, options);
try {
return JSON.parse(response);
}
catch (error) {
throw new Error(`Failed to parse structured output: ${error}`);
}
});
}
chat(messages, options) {
return __awaiter(this, void 0, void 0, function* () {
var _a, _b, _c, _d, _e;
const headers = {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.config.apiKey}`
};
if (this.config.organization) {
headers['OpenAI-Organization'] = this.config.organization;
}
const body = {
model: this.config.model,
messages,
temperature: (_a = options === null || options === void 0 ? void 0 : options.temperature) !== null && _a !== void 0 ? _a : 0.7,
max_tokens: options === null || options === void 0 ? void 0 : options.maxTokens,
top_p: options === null || options === void 0 ? void 0 : options.topP,
frequency_penalty: options === null || options === void 0 ? void 0 : options.frequencyPenalty,
presence_penalty: options === null || options === void 0 ? void 0 : options.presencePenalty,
stop: options === null || options === void 0 ? void 0 : options.stopSequences
};
// Remove undefined values
Object.keys(body).forEach(key => {
if (body[key] === undefined) {
delete body[key];
}
});
const response = yield fetch(`${this.config.baseURL}/chat/completions`, {
method: 'POST',
headers,
body: JSON.stringify(body)
});
if (!response.ok) {
let errorMessage = `OpenAI API Error: ${response.status} ${response.statusText}`;
try {
const error = yield response.json();
errorMessage = `OpenAI API Error: ${((_b = error.error) === null || _b === void 0 ? void 0 : _b.message) || response.statusText}`;
}
catch (_f) {
// Use the default error message if JSON parsing fails
}
throw new Error(errorMessage);
}
const data = yield response.json();
// Update usage statistics
if (data.usage) {
this.usage.promptTokens += data.usage.prompt_tokens || 0;
this.usage.completionTokens += data.usage.completion_tokens || 0;
this.usage.totalTokens += data.usage.total_tokens || 0;
// Estimate cost (rough pricing for GPT-3.5-turbo)
const inputCost = (data.usage.prompt_tokens || 0) * 0.0015 / 1000;
const outputCost = (data.usage.completion_tokens || 0) * 0.002 / 1000;
this.usage.totalCost = (this.usage.totalCost || 0) + inputCost + outputCost;
}
return ((_e = (_d = (_c = data.choices) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d.message) === null || _e === void 0 ? void 0 : _e.content) || '';
});
}
getUsage() {
return Object.assign({}, this.usage);
}
resetUsage() {
this.usage = {
promptTokens: 0,
completionTokens: 0,
totalTokens: 0,
totalCost: 0
};
}
// Additional utility methods
setModel(model) {
this.config.model = model;
}
getModel() {
return this.config.model;
}
setBaseURL(baseURL) {
this.config.baseURL = baseURL;
}
getModelName() {
return this.config.model;
}
getCapabilities() {
return {
supportsStreaming: false,
supportsStructuredOutput: true,
supportsFunctionCalling: false,
supportsVision: false,
maxContextLength: 4096,
supportedFormats: ['json_object'],
};
}
}
export { OpenAILM };
//# sourceMappingURL=index.esm.js.map