auto-gpt-ts
Version:
my take of Auto-GPT in typescript
97 lines • 4.97 kB
JavaScript
;
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.summarizeText = exports.splitText = void 0;
const sentence_splitter_1 = require("sentence-splitter");
const logging_1 = require("../logging");
const token_counter_1 = require("../llm/token-counter");
const llm_utils_1 = require("../llm/llm-utils");
const memory_1 = require("../memory");
const config_1 = require("../config/config");
const logger = (0, logging_1.getLogger)();
const CFG = new config_1.Config();
function* splitText(text, max_length = CFG.browseChunkMaxLength, model = CFG.fastLlmModel, question = "") {
const sentences = (0, sentence_splitter_1.split)(text).filter(s => s.type === 'Sentence').map(s => s.raw.trim());
let current_chunk = [];
for (const sentence of sentences) {
const message_with_additional_sentence = [
createMessage(current_chunk.join(" ") + " " + sentence, question)
];
const expected_token_usage = (0, token_counter_1.countMessageTokens)(message_with_additional_sentence, model) + 1;
if (expected_token_usage <= max_length) {
current_chunk.push(sentence);
}
else {
yield current_chunk.join(" ");
current_chunk = [sentence.slice(0, 100)];
const message_this_sentence_only = [
createMessage(current_chunk.join(" "), question)
];
const expected_token_usage = (0, token_counter_1.countMessageTokens)(message_this_sentence_only, model) + 1;
if (expected_token_usage > max_length) {
throw new Error(`Sentence is too long in webpage: ${expected_token_usage} tokens.`);
}
}
}
if (current_chunk.length) {
yield current_chunk.join(" ");
}
}
exports.splitText = splitText;
function summarizeText(url, text, question, driver) {
return __awaiter(this, void 0, void 0, function* () {
if (!text) {
return "Error: No text to summarize";
}
const model = CFG.fastLlmModel;
const textLength = text.length;
logger.info(`Text length: ${textLength} characters`);
const summaries = [];
const chunks = Array.from(splitText(text, CFG.browseChunkMaxLength, model, question)).slice(0, 10);
const scrollRatio = 1 / chunks.length;
for (let i = 0; i < chunks.length; i++) {
const chunk = chunks[i];
if (driver) {
scroll_to_percentage(driver, scrollRatio * i);
}
logger.info(`Adding chunk ${i + 1} / ${chunks.length} to memory`);
const memoryToAdd = `Source: ${url}\nRaw content part#${i + 1}: ${chunk}`;
const memory = (0, memory_1.getMemory)();
memory.add(memoryToAdd);
const messages = [createMessage(chunk, question)];
const tokensForChunk = (0, token_counter_1.countMessageTokens)(messages, model);
logger.info(`Summarizing chunk ${i + 1} / ${chunks.length} of length ${chunk.length} characters, or ${tokensForChunk} tokens`);
const summary = yield (0, llm_utils_1.createChatCompletion)(messages, model);
summaries.push(summary);
logger.info(`Added chunk ${i + 1} summary to memory, of length ${summary.length} characters`);
const memoryToAdd2 = `Source: ${url}\nContent summary part#${i + 1}: ${summary}`;
memory.add(memoryToAdd2);
}
logger.info(`Summarized ${chunks.length} chunks.`);
const combinedSummary = summaries.join("\n");
const messages = [createMessage(combinedSummary, question)];
return (0, llm_utils_1.createChatCompletion)(messages, model);
});
}
exports.summarizeText = summarizeText;
function scroll_to_percentage(driver, ratio) {
if (ratio < 0 || ratio > 1) {
throw new Error("Percentage should be between 0 and 1");
}
driver.evaluate(`window.scrollTo(0, document.body.scrollHeight * ${ratio});`);
}
function createMessage(chunk, question) {
return {
role: "user",
content: `"""${chunk}""" Using the above text, answer the following question: "${question}" -- if the question cannot be answered using the text, summarize the text.`,
};
}
//# sourceMappingURL=text.js.map