@jackhua/mini-langchain
Version:
A lightweight TypeScript implementation of LangChain with cost optimization features
86 lines • 2.54 kB
TypeScript
import { BaseChain } from './base';
import { BaseLLM } from '../llms/base';
import { PromptTemplate, ChatPromptTemplate } from '../prompts/prompt';
import { ChainValues, BaseCallbackHandler, Message } from '../core/types';
import { BaseMemory } from '../memory/memory';
/**
* Chain to run a prompt through an LLM
*/
export declare class LLMChain extends BaseChain {
private llm;
private prompt;
private outputKey;
constructor(config: {
llm: BaseLLM;
prompt: PromptTemplate | ChatPromptTemplate;
outputKey?: string;
memory?: BaseMemory;
callbacks?: BaseCallbackHandler[];
verbose?: boolean;
});
get inputKeys(): string[];
get outputKeys(): string[];
call(inputs: ChainValues, callbacks?: BaseCallbackHandler[]): Promise<ChainValues>;
/**
* Format the prompt with the inputs
*/
prepPrompt(inputs: ChainValues): Promise<string | Message[]>;
/**
* Stream the response
*/
streamResponse(inputs: ChainValues): AsyncGenerator<string>;
/**
* Create an LLMChain from a prompt template string
*/
static fromLLM(llm: BaseLLM, prompt: string, config?: {
outputKey?: string;
memory?: BaseMemory;
}): LLMChain;
}
/**
* Conversation Chain - a chain specifically for conversations
*/
export declare class ConversationChain extends LLMChain {
constructor(config: {
llm: BaseLLM;
prompt?: PromptTemplate | ChatPromptTemplate;
outputKey?: string;
memory?: BaseMemory;
callbacks?: BaseCallbackHandler[];
verbose?: boolean;
});
get inputKeys(): string[];
}
/**
* Question-Answering Chain
*/
export declare class QAChain extends BaseChain {
private llm;
private prompt;
private inputKey;
private documentVariableName;
private outputKey;
constructor(config: {
llm: BaseLLM;
prompt?: PromptTemplate;
inputKey?: string;
documentVariableName?: string;
outputKey?: string;
memory?: BaseMemory;
});
get inputKeys(): string[];
get outputKeys(): string[];
call(inputs: ChainValues, callbacks?: BaseCallbackHandler[]): Promise<ChainValues>;
/**
* Create a QA chain from documents
*/
static fromDocuments(llm: BaseLLM, documents: Array<{
pageContent: string;
metadata?: any;
}>, config?: {
prompt?: PromptTemplate;
inputKey?: string;
outputKey?: string;
}): QAChain;
}
//# sourceMappingURL=llm.d.ts.map