catai
Version:
Chat UI and Local API for the Llama models
47 lines • 2.1 kB
JavaScript
import AppDb from '../../storage/app-db.js';
import NodeLlamaCppV2 from './binds/node-llama-cpp/node-llama-cpp-v2/node-llama-cpp-v2.js';
import { withLock } from 'lifecycle-utils';
import { ModelNotInstalledError } from './errors/ModelNotInstalledError.js';
import { NoActiveModelError } from './errors/NoActiveModelError.js';
import { NoModelBindError } from './errors/NoModelBindError.js';
import { BindNotFoundError } from './errors/BindNotFoundError.js';
export const ALL_BINDS = [NodeLlamaCppV2];
const cachedBinds = {};
export function findLocalModel(modelName) {
const modelDetails = AppDb.db.models[modelName || AppDb.db.activeModel];
if (!modelDetails) {
if (modelName) {
throw new ModelNotInstalledError(`Model ${modelName} not installed`);
}
throw new NoActiveModelError('No active model');
}
if (!modelDetails.settings.bind)
throw new NoModelBindError('No bind class');
return modelDetails;
}
export function getCacheBindClass(modelDetails = findLocalModel()) {
const bind = modelDetails.settings.bind;
if (cachedBinds[bind])
return cachedBinds[bind];
return null;
}
const lockContext = {};
export default async function createChat(options) {
return await withLock(lockContext, "createChat", async () => {
const modelDetails = findLocalModel(options?.model);
const cachedBindClass = getCacheBindClass(modelDetails);
if (cachedBindClass)
return await cachedBindClass.createChat(options);
const bind = modelDetails.settings.bind;
const bindClass = ALL_BINDS.find(x => x.shortName === bind);
if (!bindClass)
throw new BindNotFoundError(`Bind class "${bind}" not found. Try to update the model/CatAI`);
const bindClassInstance = cachedBinds[bind] ??= new bindClass(modelDetails);
await bindClassInstance.initialize();
return await bindClassInstance.createChat(options);
});
}
export function getModelPath(name) {
return findLocalModel(name).downloadedFiles?.model;
}
//# sourceMappingURL=bind-class.js.map