cli-ai
Version:
A command line interface client for interacting with OpenAI's GPT3
92 lines • 4.14 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.render_cli = void 0;
const child_process_1 = require("child_process");
const config_1 = require("../config");
const chalk_1 = __importDefault(require("chalk"));
const figlet_1 = __importDefault(require("figlet"));
const console_1 = require("console");
const commander_1 = require("commander");
const openai_1 = require("../api/openai");
const process_1 = require("process");
const node_https_1 = __importDefault(require("node:https"));
const banner_setup = () => {
(0, console_1.clear)();
console.log(chalk_1.default.bold.green(figlet_1.default.textSync(config_1.config.package_name.toUpperCase(), {
whitespaceBreak: true,
horizontalLayout: "fitted",
})), chalk_1.default.bold.bgGreenBright.white(` v${config_1.config.package_version} `));
divider();
};
const ask = (prompt) => {
(0, openai_1.get_result)(prompt)
.then((answer) => {
(0, console_1.clear)();
banner_setup();
check_updates();
say_voice_assistant(answer);
console.log(`${chalk_1.default.bold.green("Question")}: ${commander_1.program.opts().question}`);
console.log(`${chalk_1.default.bold.hex("#FFD700")("Answer")}: ${answer.trimStart()} \n`);
})
.catch((err) => {
console.log(err);
});
};
const check_updates = () => {
const request = node_https_1.default.get(`https://registry.npmjs.org/${config_1.config.package_name}`, (response) => {
let data = "";
response.on("data", (chunk) => (data += chunk));
response.on("end", () => {
const latest_version = JSON.parse(data)["dist-tags"].latest;
if (config_1.config.package_version !== latest_version) {
divider();
console.log(`✨ Update available: ${chalk_1.default.bold.red(config_1.config.package_version)} → ${chalk_1.default.bold.green(latest_version)}`);
console.log(`↥ run: ${chalk_1.default.green("yarn global upgrade cli-ai")} or ${chalk_1.default.green("npm update -g cli-ai")}\n`);
divider();
}
});
});
request.end();
request.on("error", (e) => {
divider();
console.log(`💥 checking updates failed: ${e}`);
console.log(`Please report this issue on GitHub: ${chalk_1.default.red.bold(config_1.config.repo_url + "issues")} \n`);
divider();
});
};
const divider = () => {
console.log(`${chalk_1.default.gray("————————————————————————————————————————————————")} \n`);
};
const handle_missing_api_token = () => {
if (!config_1.config.openai_api_token ||
config_1.config.openai_api_token === null ||
config_1.config.openai_api_token === "") {
console.log("Please set your OpenAI API Token in order to use this CLI", chalk_1.default.red("https://beta.openai.com/account/api-keys \n"));
(0, process_1.exit)();
}
};
const render_ui = () => {
if (!commander_1.program.opts().question)
console.log(commander_1.program.help());
console.log("✨ Processing request");
const chat_mode = commander_1.program.opts().chat === "active" ? config_1.openai_api_options.stop[0] : " ";
const question = `${chat_mode} ${commander_1.program.opts().question}`;
console.log(`✦ ${chalk_1.default.green(chat_mode)} ${JSON.stringify(commander_1.program.opts().question)}`);
ask(question);
};
const say_voice_assistant = (answer) => {
// execute MacOS's voice assistant to read the message
process.platform === "darwin" && commander_1.program.opts().voiceAssistant === "active"
? (0, child_process_1.exec)(`say "${answer}"`, () => (0, process_1.exit)())
: "";
};
const render_cli = () => {
banner_setup();
handle_missing_api_token();
render_ui();
};
exports.render_cli = render_cli;
//# sourceMappingURL=cli.js.map