@cherrystudio/embedjs-llama-cpp
Version:
Enable usage of Node-Llama-Cpp with embedjs
12 lines (11 loc) • 464 B
TypeScript
import { AIMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
import { BaseModel, ModelResponse } from '@cherrystudio/embedjs-interfaces';
export declare class LlamaCpp extends BaseModel {
private readonly debug;
private model;
constructor({ temperature, modelPath }: {
temperature?: number;
modelPath: string;
});
runQuery(messages: (AIMessage | SystemMessage | HumanMessage)[]): Promise<ModelResponse>;
}