generator-begcode
Version:
Spring Boot + Angular/React/Vue in one handy generator
33 lines (32 loc) • 1.44 kB
TypeScript
import { ChatCompletionRole } from 'openai/resources/index.js';
import { Tokenizer, ChatLogs, ChatMessage, ChatLogType } from './index.js';
import { FunctionDefinition } from '../index.js';
export type ChatRole = ChatCompletionRole;
export declare class Chat {
protected _tokenizer: Tokenizer;
protected options?: {
onMessagesAdded?: (type: ChatLogType, msgs: ChatMessage[]) => Promise<void>;
} | undefined;
protected _chatLogs: ChatLogs;
constructor(_tokenizer: Tokenizer, options?: {
onMessagesAdded?: (type: ChatLogType, msgs: ChatMessage[]) => Promise<void>;
} | undefined);
get chatLogs(): ChatLogs;
get tokens(): number;
get tokenizer(): Tokenizer;
get messages(): ChatMessage[];
add(type: ChatLogType, msg: ChatMessage | ChatMessage[]): Promise<void>;
addWithoutEvents(type: ChatLogType, msg: ChatMessage | ChatMessage[]): void;
persistent(role: ChatRole, content: string): Promise<void>;
persistent(msg: ChatMessage): Promise<void>;
persistent(msgs: ChatMessage[]): Promise<void>;
temporary(role: ChatRole, content?: string): Promise<void>;
temporary(msg: ChatMessage): Promise<void>;
temporary(msgs: ChatMessage[]): Promise<void>;
addFunction(fn: FunctionDefinition): void;
getLastMessage(type: ChatLogType): ChatMessage | undefined;
cloneChatLogs(): ChatLogs;
cloneEmpty(): Chat;
toString(): string;
toJSON(): ChatLogs;
}