tprompter
Version:
```bash $ ask anything ```
106 lines (105 loc) • 5.55 kB
JavaScript
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var __param = (this && this.__param) || function (paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var LLMService_1;
import { AppConfig } from '../config/AppConfig.js';
import { OpenAIService } from './OpenAIService.js';
import { countTokens } from 'gpt-tokenizer';
import { PromptTooLongError } from './errors/PromptTooLongError.js';
import { Service } from 'typedi';
import { EnvironmentInfo } from '../config/EnvironmentInfo.js';
import { InjectLogger } from '../logger/logger.decorator.js';
let LLMService = LLMService_1 = class LLMService {
constructor(openAI, config, env, logger) {
this.openAI = openAI;
this.config = config;
this.env = env;
this.logger = logger;
}
getCompletion(prompt, opts) {
return __awaiter(this, void 0, void 0, function* () {
var _a, _b;
this.checkPromptLength(prompt, opts);
const timeoutId = setTimeout(() => {
this.logger.info('Request to LLM takes a little bit too long...');
}, LLMService_1.WARN_DELAY);
try {
return yield this.openAI.getCompletion(prompt, {
model: (_b = (_a = opts === null || opts === void 0 ? void 0 : opts.model) !== null && _a !== void 0 ? _a : this.config.getConfig().agentDefaultModel) !== null && _b !== void 0 ? _b : 'gpt-4o-mini',
developerMessages: opts === null || opts === void 0 ? void 0 : opts.developerMessages,
});
}
finally {
clearTimeout(timeoutId);
}
});
}
agent(prompt, mbModel) {
return __awaiter(this, void 0, void 0, function* () {
const model = mbModel !== null && mbModel !== void 0 ? mbModel : this.config.getConfig().agentDefaultModel;
return this.getCompletion(prompt, { model });
});
}
question(prompt, input, model) {
return __awaiter(this, void 0, void 0, function* () {
const config = this.config.getConfig();
const userMessages = [];
if (input) {
userMessages.push(`<input from="pipe">${input}</input>`);
}
userMessages.push(`<question>${prompt}</question>`);
return this.getCompletion(userMessages, {
developerMessages: [
'You are helpful CLI tool, that works in a user shell. Be brief, clear and provide a highly structured responses. Your users are skilled developers, who is looking for a quick solution to a problem.',
`Your environment: Platform: ${this.env.platform} ${this.env.machine}; Shell: ${this.env.shell}`,
`To access the text from the <input> section provided below you can suggest to a user to run \`${config.appName} archive last\` in the shell. Example, \`${config.appName} archive last input | grep "some text"\``,
],
model,
});
});
}
checkPromptLength(userMessages, opts) {
var _a, _b;
let fullMessage = Array.isArray(userMessages) ? userMessages.join(' ') : userMessages;
fullMessage += (_b = (_a = opts === null || opts === void 0 ? void 0 : opts.developerMessages) === null || _a === void 0 ? void 0 : _a.join(' ').length) !== null && _b !== void 0 ? _b : '';
const config = this.config.getConfig();
if (config.agentMaxTokens) {
const tokenLength = countTokens(fullMessage);
if (tokenLength > config.agentMaxTokens) {
throw new PromptTooLongError(tokenLength, config.agentMaxTokens);
}
}
}
listModels() {
return __awaiter(this, void 0, void 0, function* () {
return this.openAI.getSupportedModels();
});
}
};
LLMService.WARN_DELAY = 5000;
LLMService = LLMService_1 = __decorate([
Service(),
__param(3, InjectLogger()),
__metadata("design:paramtypes", [OpenAIService,
AppConfig,
EnvironmentInfo, Object])
], LLMService);
export { LLMService };