generator-begcode
Version:
Spring Boot + Angular/React/Vue in one handy generator
77 lines (76 loc) • 3.02 kB
JavaScript
import OpenAIApi from 'openai';
import { cleanOpenAIError } from '../utils/openai.js';
export class OpenAILlmApi {
_apiKey;
_defaultModel;
_defaultMaxTokens;
_defaultMaxResponseTokens;
_logger;
_maxRateLimitRetries;
_api;
constructor(_apiKey, _defaultModel, _defaultMaxTokens, _defaultMaxResponseTokens, _logger, baseURL, _maxRateLimitRetries = 5) {
this._apiKey = _apiKey;
this._defaultModel = _defaultModel;
this._defaultMaxTokens = _defaultMaxTokens;
this._defaultMaxResponseTokens = _defaultMaxResponseTokens;
this._logger = _logger;
this._maxRateLimitRetries = _maxRateLimitRetries;
this._api = new OpenAIApi({
apiKey: this._apiKey,
dangerouslyAllowBrowser: true,
baseURL,
});
}
getMaxContextTokens() {
return this._defaultMaxTokens;
}
getMaxResponseTokens() {
return this._defaultMaxResponseTokens;
}
getModel() {
return this._defaultModel;
}
async getResponse(chatLog, functionDefinitions, options, tries) {
try {
const completion = await this._createChatCompletion({
messages: chatLog.messages,
functions: functionDefinitions,
temperature: options?.temperature || 0,
max_tokens: options?.max_tokens || this._defaultMaxResponseTokens,
model: options?.model || this._defaultModel,
});
if (completion.choices.length < 1) {
throw Error('Chat completion choices length was 0...');
}
const choice = completion.choices[0];
if (!choice.message) {
throw Error(`Chat completion message was undefined: ${JSON.stringify(choice, null, 2)}`);
}
return choice.message;
}
catch (err) {
const error = cleanOpenAIError(err);
if (typeof error === 'object') {
const maybeOpenAiError = error;
if (maybeOpenAiError.status === 429) {
await this._logger.warning('Warning: OpenAI rate limit exceeded, sleeping for 15 seconds.');
await new Promise(resolve => setTimeout(resolve, 15000));
if (!tries || tries < this._maxRateLimitRetries) {
return this.getResponse(chatLog, functionDefinitions, options, tries === undefined ? 0 : ++tries);
}
}
}
throw new Error(JSON.stringify(error, null, 2));
}
}
_createChatCompletion(options) {
return this._api.chat.completions.create({
messages: options.messages,
model: options.model || this._defaultModel,
functions: options.functions,
function_call: options.functions ? 'auto' : undefined,
temperature: options.temperature || 0,
max_tokens: options.max_tokens,
});
}
}