generator-begcode
Version:
Spring Boot + Angular/React/Vue in one handy generator
57 lines (56 loc) • 2.5 kB
JavaScript
import readline from 'readline';
import path from 'path';
import cl100k_base from 'gpt-tokenizer';
import fs from 'fs';
import { AgentContext, Evo, Chat, DebugLlmApi, DebugLog, OpenAILlmApi, OpenAIEmbeddingAPI } from './index.js';
import { Env, ConsoleLogger, Logger, SubWorkspace, FileLogger } from './agent-utils/index.js';
import { FileSystemWorkspace } from './agent-utils-fs/index.js';
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
const prompt = (fileLogger) => (query) => new Promise(resolve => {
const callback = async (answer) => {
await fileLogger.info(`# User\n**${query}:** ${answer}`);
resolve(answer);
};
rl.question(`${query}: `, callback);
});
export async function createApp(config) {
const rootDir = config?.rootDir || '';
const date = new Date();
const defaultSessionName = `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}_${date.getHours()}-${date.getMinutes()}-${date.getSeconds()}`;
const sessionName = config?.sessionName ?? defaultSessionName;
const env = new Env({
OPENAI_API_KEY: config?.openAiKey || '',
GPT_MODEL: 'gpt-4-turbo',
CONTEXT_WINDOW_TOKENS: '128000',
MAX_RESPONSE_TOKENS: '4096',
});
const sessionPath = path.join(rootDir, 'sessions', sessionName);
const userWorkspace = new FileSystemWorkspace(rootDir, config?.fs || fs);
const sessionWorkspace = new FileSystemWorkspace(sessionPath, fs);
const internals = new SubWorkspace('.evo', sessionWorkspace);
const fileLogger = await FileLogger.create('chat.md', internals);
const consoleLogger = new ConsoleLogger();
const logger = new Logger([fileLogger, consoleLogger], {
promptUser: prompt(fileLogger),
});
let llm = new OpenAILlmApi(env.OPENAI_API_KEY || '', env.GPT_MODEL, env.CONTEXT_WINDOW_TOKENS || 512, env.MAX_RESPONSE_TOKENS || 512, logger, env.OPENAI_API_BASE_URL);
const chat = new Chat(cl100k_base);
let debugLog;
if (config?.debug) {
debugLog = new DebugLog(internals);
llm = new DebugLlmApi(debugLog, llm);
}
const embedding = new OpenAIEmbeddingAPI(env.OPENAI_API_KEY || '', logger, cl100k_base, env.OPENAI_API_BASE_URL);
const evo = new Evo(new AgentContext(llm, embedding, chat, logger, userWorkspace, internals, env), config?.timeout);
return {
evo,
logger,
fileLogger,
consoleLogger,
debugLog,
chat,
};
}