nicechat
Version:
An extensible AI chat framework for OpenAi's models
105 lines (104 loc) • 4.12 kB
JavaScript
;
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __asyncValues = (this && this.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.chat = void 0;
const chalk_1 = __importDefault(require("chalk"));
const nicechat_1 = require("./nicechat");
const replicate_1 = __importDefault(require("replicate"));
const MAX_TOKENS = 1024;
function chat(apiKey, model, system) {
var _a, e_1, _b, _c;
return __awaiter(this, void 0, void 0, function* () {
console.log("apiKey", apiKey);
const client = new replicate_1.default({
auth: apiKey,
});
console.log("[" + chalk_1.default.blueBright(system) + "]");
const messages = [];
// initial use input
const input = yield (0, nicechat_1.readLine)();
messages.push(user(input));
while (true) {
let reply = "";
const i = buildInput(buildPrompt(messages), system);
try {
for (var _d = true, _e = (e_1 = void 0, __asyncValues(client.stream(model, { input: i }))), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
_c = _f.value;
_d = false;
const event = _c;
const m = event.toString();
process.stdout.write(chalk_1.default.greenBright(m));
reply += m;
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
}
finally { if (e_1) throw e_1.error; }
}
messages.push(assistant(reply));
// ask user for next input
console.log("\n");
const input = yield (0, nicechat_1.readLine)();
messages.push(user(input));
}
});
}
exports.chat = chat;
function user(content) {
return {
role: "user",
content,
};
}
function assistant(content) {
return {
role: "assistant",
content,
};
}
const buildPrompt = (messages) => {
return messages
.map((m) => {
if (m.role === "user") {
return `[INST] ${m.content} [/INST]`;
}
else {
return `${m.content}`;
}
})
.join("\n");
};
function buildInput(prompt, system) {
return {
top_k: 10,
top_p: 0.95,
prompt,
max_tokens: MAX_TOKENS,
temperature: 0.8,
system_prompt: system,
repeat_penalty: 1.1,
presence_penalty: 0,
frequency_penalty: 0,
};
}