cookie-ai-cli
Version:
A command-line interface tool designed to bridge the gap between natural language processing and command-line operations.
103 lines (102 loc) • 3.98 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.sendChat = void 0;
const get_settings_1 = require("./settings/get-settings");
const handle_action_1 = require("./handle-action");
const settings_constants_1 = require("./settings/settings-constants");
const get_headers_1 = require("./settings/get-headers");
const main_1 = require("./main");
const debug_log_1 = require("./utils/debug-log");
const colors_1 = require("./utils/colors");
let payload = {
model: "gpt-4",
messages: [{ role: "system", content: settings_constants_1.systemInstructions }],
temperature: 0.7,
};
async function sendChat({ message, rl, isError, }) {
const settings = await (0, get_settings_1.getSettings)({ rl });
if (settings.service === "custom" && settings.custom?.payload) {
// Allow passing in custom payload for service "custom"
Object.assign(payload, settings.custom?.payload);
}
if (settings.model) {
payload.model = settings.model;
}
payload.messages.push({
role: !isError ? "user" : "system",
content: !isError ? message : `error with last command: ${message}`,
});
console.log(`${colors_1.colors.yellow}🤔 AI thinking...${colors_1.colors.reset}`);
const endpoint = settings.service === "openai"
? "https://api.openai.com/v1/chat/completions"
: settings.endpoint;
if (!endpoint) {
debug_log_1.debug.error(`Failed to resolve endpoint from settings: ${settings}`);
process.exit(1);
}
const response = await fetch(endpoint, {
method: "POST",
headers: (0, get_headers_1.getHeaders)(settings),
body: JSON.stringify(payload),
});
if (!response.ok) {
debug_log_1.debug.error(`Failed to send message to endpoint: ${endpoint}`);
process.exit(1);
}
console.log(`${colors_1.colors.green}✅ AI responded!${colors_1.colors.reset}`);
let responseJson;
try {
responseJson = await response.json();
if (main_1.isDebug) {
console.log("responseJson", responseJson);
}
}
catch (err) {
debug_log_1.debug.error("Failed to parse endpoint response as JSON");
console.error(err);
process.exit(1);
}
const aiResponseChatMessage = responseJson?.choices?.[0]?.message;
if (!aiResponseChatMessage) {
await handleEmptyResponse({ rl });
}
// add the AI response to the chat history
payload.messages.push(aiResponseChatMessage);
if (main_1.isDebug) {
console.log("payload", payload);
}
const aiResponseContent = responseJson?.choices?.[0]?.message?.content;
let pamperedResponseData;
const json = aiResponseContent?.slice(aiResponseContent.indexOf("{"), aiResponseContent.lastIndexOf("}") + 1);
try {
pamperedResponseData = JSON.parse(json);
await (0, handle_action_1.handleAction)({ result: pamperedResponseData, rl });
}
catch (error) {
debug_log_1.debug.error("Failed to parse AI response as JSON");
debug_log_1.debug.log("Asking AI to retry...");
if (main_1.isDebug) {
console.log("AI Response: ");
console.log(aiResponseContent);
}
await sendChat({
isError: true,
message: "Your last response was not valid JSON. Please try again.",
rl,
});
}
return pamperedResponseData;
}
exports.sendChat = sendChat;
async function handleEmptyResponse({ rl }) {
const settings = await (0, get_settings_1.getSettings)({ rl });
if (settings.service === "openai") {
debug_log_1.debug.error(`No message in response from the AI.
Check that your OpenAI account does not have restricted usage limits at https://platform.openai.com/account/limits
and that you have enough credits to use the API.
Make sure you're also using the correct API key and model.
`);
}
process.exit(1);
}
//# sourceMappingURL=send-chat.js.map
;