UNPKG

json-gpt

Version:

Json-GPT permite interactuar de forma sencilla con el modelo GPT-3.5-turbo en formato JSON

347 lines (337 loc) 11.6 kB
"use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getOwnPropSymbols = Object.getOwnPropertySymbols; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __propIsEnum = Object.prototype.propertyIsEnumerable; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __spreadValues = (a, b) => { for (var prop in b || (b = {})) if (__hasOwnProp.call(b, prop)) __defNormalProp(a, prop, b[prop]); if (__getOwnPropSymbols) for (var prop of __getOwnPropSymbols(b)) { if (__propIsEnum.call(b, prop)) __defNormalProp(a, prop, b[prop]); } return a; }; var __objRest = (source, exclude) => { var target = {}; for (var prop in source) if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0) target[prop] = source[prop]; if (source != null && __getOwnPropSymbols) for (var prop of __getOwnPropSymbols(source)) { if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop)) target[prop] = source[prop]; } return target; }; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( // If the importer is in node compatibility mode or this is not an ESM // file that has been converted to a CommonJS file using a Babel- // compatible transform (i.e. "__esModule" has not been set), then set // "default" to the CommonJS "module.exports" for node compatibility. isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); var __async = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { step(generator.next(value)); } catch (e) { reject(e); } }; var rejected = (value) => { try { step(generator.throw(value)); } catch (e) { reject(e); } }; var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); step((generator = generator.apply(__this, __arguments)).next()); }); }; // index.ts var Json_GPT_exports = {}; __export(Json_GPT_exports, { solve: () => solve, solveChat: () => solveChat, solveJson: () => solveJson }); module.exports = __toCommonJS(Json_GPT_exports); // solve/solve.ts var import_dotenv = __toESM(require("dotenv")); var import_openai = require("openai"); import_dotenv.default.config(); var openai = new import_openai.OpenAIApi(new import_openai.Configuration({ apiKey: process.env.OPENAI_API_KEY })); function formatRequest(request) { const messages = []; if (Array.isArray(request)) { messages.push(...request); } else if (typeof request === "string") { messages.push({ role: "system", content: request }); } else { throw { status: 0, data: '{"error": "Invalid request format", "text": "void" }' }; } return messages; } function formatError(status, error, text) { return { status, data: `{"error": "${error}", "text": "${text}" }` }; } function callGPT(request) { return __async(this, null, function* () { return (yield openai.createChatCompletion(__spreadValues({ model: "gpt-3.5-turbo", temperature: 0, top_p: 1, frequency_penalty: 0, presence_penalty: 0, max_tokens: 2e3, n: 1 }, request))).data.choices[0].message.content || ""; }); } var MAX_RETRIES = 4; var INITIAL_DELAY = 4e3; var DELAY_EXPONENTIAL = 2; function solve(request, options) { return __async(this, null, function* () { const _a = options || {}, { max_retries = MAX_RETRIES, initial_delay = INITIAL_DELAY, delay_exponential = DELAY_EXPONENTIAL, verbose = false } = _a, apiOptions = __objRest(_a, [ "max_retries", "initial_delay", "delay_exponential", "verbose" ]); let delay = initial_delay; let messages; try { messages = formatRequest(request); } catch (error) { return error; } if (verbose) console.log("Sending to GPT-3.5-turbo", messages, apiOptions); let retries = 0; while (retries <= max_retries) { try { const response = yield callGPT(__spreadValues({ messages }, apiOptions)); if (verbose) console.log("GPT-3.5-turbo response: ", response); return { status: 200, data: response }; } catch (error) { if (error.response && error.response.status === 429) { yield new Promise((resolve) => setTimeout(resolve, delay)); delay *= delay_exponential; retries++; if (verbose) console.log("Retrying...", retries); } else { const err2 = formatError( error.response.status, "OpenAI API Error", `${error.response.statusText}: ${error.response.data.error.message}` ); if (verbose) console.log(err2); return err2; } } } const err = formatError( 429, "MAX RETRIES REACHED", `Exponential fail with inital delay: ${initial_delay}, max reties: ${max_retries} and delay exponential: ${delay_exponential}` ); if (verbose) console.log(err); return err; }); } // solve/utils.ts var import_tiktoken = require("@dqbd/tiktoken"); var import_zod_to_json_schema = require("zod-to-json-schema"); var tiktoken = (0, import_tiktoken.encoding_for_model)("gpt-3.5-turbo"); function getJsonSchema(schema, name) { const _a = (0, import_zod_to_json_schema.zodToJsonSchema)(schema, name), { $schema } = _a, jsonSchema = __objRest(_a, ["$schema"]); return jsonSchema; } // solve/solve-json.ts function solveParseError(text, error, textKey = "text") { return __async(this, null, function* () { return solve( ` ###INSTRUCTIONS: Catched error while trying to parse JSON from text. The text may contain a json string, read the error and correct it without modifing the content. If the text do not contain a json string please return it like this "{"${textKey}":"text..."}" Make sure to scape all line breaks and bad characters!. ###ERROR: ${error} ###TEXT: ${text} ` ); }); } function solveJson(json, options) { return __async(this, null, function* () { const verbose = (options == null ? void 0 : options.verbose) || false; const jsonSchema = getJsonSchema(json.zodSchema, "Output"); const solved = yield solve([ { role: "system", content: JSON.stringify({ instructions: `Read the data, the ${json.target.key} and use your knowledge to respond to it following this instructions and the outputSchema. ${json.instructions} Your output must be a json following the exact outputSchema. IMPORTANT: Your output will de parsed to JSON so do not output plain text!`, outputSchema: jsonSchema, [json.target.key]: json.target.value, data: json.data }) } ], options); if (solved.status === 200) return fullParse(solved.data, json.zodSchema, verbose, json.safeKey); const data = JSON.parse(solved.data); if (verbose) console.log("SOLVE ERROR", data); return handleError(solved.status, data.error, data.text, verbose); }); } function handleParseError(text, error, z, verbose, safeKey) { return __async(this, null, function* () { if (verbose) console.log("PARSE ERROR", text, error); const solved = yield solveParseError(text, error, safeKey); if (verbose) console.log("SOLVED?", solved); if (solved.status === 200) try { return { status: solved.status, data: JSON.parse(solved.data) }; } catch (error2) { return handleError(1, "ERROR PARSING HANDLED PARSE ERROR", error2.message, verbose); } return handleError(solved.status, "ERROR SOLVING HANDLED PARSE ERROR", solved.data, verbose); }); } function handleError(status, error, message, verbose) { if (verbose) console.error(`STATUS: ${status}`, `ERROR: ${error}: ${message}`); return { status, data: { error, text: message } }; } function fullParse(text, z, verbose, safeKey) { try { const open = text.indexOf("{"); const close = text.lastIndexOf("}"); const json = text.slice(open, close + 1); const obj = JSON.parse(json); return zodParse(obj, z, verbose); } catch (error) { return handleParseError(text, error.message, z, verbose, safeKey); } } function zodParse(obj, z, verbose) { const zParse = z.safeParse(obj); if (zParse.success) return { status: 200, data: obj }; return handleError(2, "ZOD PARSE ERROR", zParse.error.message, verbose); } // solve/solve-chat.ts function solveChat(json, options) { return __async(this, null, function* () { const verbose = (options == null ? void 0 : options.verbose) || false; const jsonSchema = getJsonSchema(json.zodSchema, "Output"); const messages = formatMessages(json.messages); const solved = yield solve([ { role: "system", content: JSON.stringify({ instructions: `Read the data, the conversation and use your knowledge to respond to it following this instructions and the outputSchema. ${json.instructions} Your output must be a json based on outputSchema. Your output will de parsed to JSON so do not output plain text!`, outputSchema: jsonSchema, data: __spreadValues({}, json.custom) }) }, ...messages // CORTAR CON CONTADOR DE TOKENS Y CON EMMBEDDINGS BASADOS EN LOS DOS ULTIMOS MENSAJES ?? ], options); if (solved.status === 200) return fullParse(solved.data, json.zodSchema, verbose, json.safeKey); const data = JSON.parse(solved.data); if (verbose) console.log("SOLVE ERROR", data); return handleError(solved.status, data.error, data.text, verbose); }); } function formatMessages(messages) { const important = { important: "Dont forget to output following the outputSchema!" }; const lastUserMessageIndex = messages.reverse().findIndex((m) => m.role === "user"); return messages.map((message, index) => { const _a = message, { role } = _a, content = __objRest(_a, ["role"]); let processedContent = {}; if (index === lastUserMessageIndex) { processedContent = __spreadValues(__spreadValues({}, content), important); } else processedContent = __spreadValues({}, content); return { role, content: JSON.stringify(processedContent) }; }).reverse(); } // Annotate the CommonJS export names for ESM import in node: 0 && (module.exports = { solve, solveChat, solveJson });