@kaibanjs/tools
Version:
A set of tools to work with LLMs and KaibanJS
39 lines (38 loc) • 1.32 kB
TypeScript
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { MemoryVectorStore } from 'langchain/vectorstores/memory';
import { BaseDocumentLoader } from '@langchain/core/document_loaders/base';
import { Document } from 'langchain/document';
interface RAGToolkitOptions {
embeddings?: OpenAIEmbeddings;
vectorStore?: MemoryVectorStore;
llmInstance?: ChatOpenAI;
promptQuestionTemplate?: string;
chunkOptions?: {
chunkSize: number;
chunkOverlap: number;
};
env?: {
OPENAI_API_KEY: string;
};
}
interface DocumentSource {
source: string | File;
type: string;
}
type LoaderFunction = (source: string | File) => BaseDocumentLoader;
export declare class RAGToolkit {
private embeddings;
private vectorStore;
private llmInstance;
private promptQuestionTemplate;
private chunkOptions;
private loaders;
constructor(options?: RAGToolkitOptions);
registerLoader(type: string, loaderFunction: LoaderFunction): void;
addDocuments(sources: DocumentSource[]): Promise<void>;
loadDocuments(sources: DocumentSource[]): Promise<Document[]>;
chunkDocuments(documents: Document[]): Promise<Document[]>;
search(query: string): Promise<Document[]>;
askQuestion(query: string): Promise<string>;
}
export {};