@jackhua/mini-langchain
Version:
A lightweight TypeScript implementation of LangChain with cost optimization features
119 lines • 3.01 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.BaseChatLLM = exports.BaseLLM = void 0;
/**
* Abstract base class for all LLM implementations
*/
class BaseLLM {
constructor(config) {
this.callbacks = [];
this.verbose = false;
if (config?.callbacks) {
this.callbacks = config.callbacks;
}
if (config?.verbose !== undefined) {
this.verbose = config.verbose;
}
}
/**
* Call the LLM with a simple string prompt
*/
async call(prompt, options) {
const messages = [{ type: 'human', content: prompt }];
const result = await this.generate(messages, options);
return result.text;
}
/**
* Predict the next message in a conversation
*/
async predict(messages, options) {
const result = await this.generate(messages, options);
return {
type: 'ai',
content: result.text,
additionalKwargs: result.llmOutput
};
}
/**
* Add a callback handler
*/
addCallback(callback) {
this.callbacks.push(callback);
}
/**
* Remove a callback handler
*/
removeCallback(callback) {
const index = this.callbacks.indexOf(callback);
if (index > -1) {
this.callbacks.splice(index, 1);
}
}
/**
* Handle LLM start callbacks
*/
async handleLLMStart(prompts) {
for (const callback of this.callbacks) {
if (callback.handleLLMStart) {
await callback.handleLLMStart(this.constructor.name, prompts);
}
}
}
/**
* Handle LLM end callbacks
*/
async handleLLMEnd(output) {
for (const callback of this.callbacks) {
if (callback.handleLLMEnd) {
await callback.handleLLMEnd(output);
}
}
}
/**
* Handle LLM error callbacks
*/
async handleLLMError(error) {
for (const callback of this.callbacks) {
if (callback.handleLLMError) {
await callback.handleLLMError(error);
}
}
}
}
exports.BaseLLM = BaseLLM;
/**
* Base class for chat-based LLMs
*/
class BaseChatLLM extends BaseLLM {
constructor() {
super(...arguments);
/**
* Default temperature for the model
*/
this.defaultTemperature = 0.7;
/**
* Default max tokens for the model
*/
this.defaultMaxTokens = 1000;
}
/**
* Get default options
*/
getDefaultOptions() {
return {
temperature: this.defaultTemperature,
maxTokens: this.defaultMaxTokens
};
}
/**
* Merge options with defaults
*/
mergeOptions(options) {
return {
...this.getDefaultOptions(),
...options
};
}
}
exports.BaseChatLLM = BaseChatLLM;
//# sourceMappingURL=base.js.map