@restnfeel/agentc-starter-kit
Version:
한국어 기업용 CMS 모듈 - Task Master AI와 함께 빠르게 웹사이트를 구현할 수 있는 재사용 가능한 컴포넌트 시스템
298 lines • 8.15 kB
TypeScript
import React, { ReactNode } from "react";
export declare enum ChatbotStatus {
IDLE = "idle",
LOADING = "loading",
READY = "ready",
ERROR = "error",
DISCONNECTED = "disconnected"
}
export declare enum VectorStoreType {
QDRANT = "qdrant",
PINECONE = "pinecone",
SUPABASE = "supabase"
}
export declare enum LLMProvider {
OPENAI = "openai",
ANTHROPIC = "anthropic",
GOOGLE = "google",
MISTRAL = "mistral"
}
export interface ChatbotError {
code: string;
message: string;
details?: any;
timestamp: Date;
}
export interface VectorStoreConfig {
type: VectorStoreType;
url?: string;
apiKey?: string;
collection?: string;
dimensions?: number;
similarity?: "cosine" | "euclidean" | "dot";
namespace?: string;
}
export interface LLMConfig {
provider: LLMProvider;
model: string;
apiKey?: string;
temperature?: number;
maxTokens?: number;
topP?: number;
frequencyPenalty?: number;
presencePenalty?: number;
systemPrompt?: string;
}
export interface StorageConfig {
bucket: string;
url?: string;
apiKey?: string;
maxFileSize?: number;
allowedTypes?: string[];
}
export interface ChatbotConfig {
systemPrompt: string;
welcomeMessage: string;
suggestedQuestions: string[];
maxConversationHistory: number;
enableRAG: boolean;
ragSettings: {
similarityThreshold: number;
maxRetrievedDocs: number;
chunkSize: number;
chunkOverlap: number;
};
uiSettings: {
theme: string;
language: string;
showTimestamps: boolean;
enableTypingIndicator: boolean;
};
}
export interface Document {
id: string;
title: string;
content: string;
metadata: {
source?: string;
uploadedAt: Date;
size: number;
type: string;
description?: string;
tags?: string[];
};
embedding?: number[];
status: "processing" | "ready" | "error";
}
export interface ConversationMessage {
id: string;
content: string;
role: "user" | "assistant" | "system";
timestamp: Date;
metadata?: {
retrievedDocs?: Document[];
confidence?: number;
tokens?: number;
};
}
export interface Conversation {
id: string;
title?: string;
messages: ConversationMessage[];
createdAt: Date;
updatedAt: Date;
metadata?: {
userId?: string;
sessionId?: string;
tags?: string[];
};
}
export interface VectorStoreState {
status: ChatbotStatus;
isConnected: boolean;
config: VectorStoreConfig | null;
error: ChatbotError | null;
collections: string[];
documentCount: number;
}
export interface LLMState {
status: ChatbotStatus;
isConnected: boolean;
config: LLMConfig | null;
error: ChatbotError | null;
availableModels: string[];
currentModel: string | null;
}
export interface StorageState {
status: ChatbotStatus;
isConnected: boolean;
config: StorageConfig | null;
error: ChatbotError | null;
uploadProgress: {
[fileId: string]: number;
};
}
export interface ChatbotContextState {
isInitialized: boolean;
globalStatus: ChatbotStatus;
initializationProgress: number;
vectorStore: VectorStoreState;
llm: LLMState;
storage: StorageState;
config: ChatbotConfig;
documents: Document[];
conversations: Conversation[];
currentConversation: Conversation | null;
isProcessing: boolean;
lastError: ChatbotError | null;
}
export type ChatbotAction = {
type: "INITIALIZE_START";
} | {
type: "INITIALIZE_SUCCESS";
} | {
type: "INITIALIZE_ERROR";
payload: ChatbotError;
} | {
type: "SET_INITIALIZATION_PROGRESS";
payload: number;
} | {
type: "VECTOR_STORE_CONNECTING";
} | {
type: "VECTOR_STORE_CONNECTED";
payload: {
collections: string[];
documentCount: number;
};
} | {
type: "VECTOR_STORE_ERROR";
payload: ChatbotError;
} | {
type: "VECTOR_STORE_CONFIG_UPDATE";
payload: Partial<VectorStoreConfig>;
} | {
type: "LLM_CONNECTING";
} | {
type: "LLM_CONNECTED";
payload: {
availableModels: string[];
currentModel: string;
};
} | {
type: "LLM_ERROR";
payload: ChatbotError;
} | {
type: "LLM_CONFIG_UPDATE";
payload: Partial<LLMConfig>;
} | {
type: "STORAGE_CONNECTING";
} | {
type: "STORAGE_CONNECTED";
} | {
type: "STORAGE_ERROR";
payload: ChatbotError;
} | {
type: "STORAGE_UPLOAD_PROGRESS";
payload: {
fileId: string;
progress: number;
};
} | {
type: "STORAGE_CONFIG_UPDATE";
payload: Partial<StorageConfig>;
} | {
type: "CONFIG_UPDATE";
payload: Partial<ChatbotConfig>;
} | {
type: "DOCUMENTS_SET";
payload: Document[];
} | {
type: "DOCUMENT_ADD";
payload: Document;
} | {
type: "DOCUMENT_UPDATE";
payload: {
id: string;
updates: Partial<Document>;
};
} | {
type: "DOCUMENT_REMOVE";
payload: string;
} | {
type: "CONVERSATIONS_SET";
payload: Conversation[];
} | {
type: "CONVERSATION_START";
payload: Conversation;
} | {
type: "CONVERSATION_SELECT";
payload: string;
} | {
type: "CONVERSATION_MESSAGE_ADD";
payload: {
conversationId: string;
message: ConversationMessage;
};
} | {
type: "SET_PROCESSING";
payload: boolean;
} | {
type: "CLEAR_ERROR";
} | {
type: "SET_ERROR";
payload: ChatbotError;
};
export interface ChatbotContextMethods {
initialize: () => Promise<void>;
reset: () => Promise<void>;
initializeVectorStore: (config: VectorStoreConfig) => Promise<void>;
addDocuments: (documents: Document[]) => Promise<void>;
removeDocuments: (documentIds: string[]) => Promise<void>;
searchSimilarDocuments: (query: string, limit?: number) => Promise<Document[]>;
initializeLLM: (config: LLMConfig) => Promise<void>;
generateResponse: (prompt: string, context?: Document[]) => Promise<string>;
generateRAGResponse: (query: string) => Promise<{
response: string;
sources: Document[];
}>;
initializeStorage: (config: StorageConfig) => Promise<void>;
uploadDocument: (file: File, metadata?: Partial<Document["metadata"]>) => Promise<Document>;
downloadDocument: (documentId: string) => Promise<Blob>;
deleteDocument: (documentId: string) => Promise<void>;
listDocuments: () => Promise<Document[]>;
updateConfig: (updates: Partial<ChatbotConfig>) => Promise<void>;
exportConfig: () => ChatbotConfig;
importConfig: (config: ChatbotConfig) => Promise<void>;
startConversation: (title?: string) => Promise<Conversation>;
selectConversation: (conversationId: string) => Promise<void>;
sendMessage: (content: string) => Promise<ConversationMessage>;
getConversationHistory: (conversationId?: string) => Promise<ConversationMessage[]>;
deleteConversation: (conversationId: string) => Promise<void>;
getStatus: () => {
overall: ChatbotStatus;
subsystems: {
vectorStore: ChatbotStatus;
llm: ChatbotStatus;
storage: ChatbotStatus;
};
};
clearError: () => void;
healthCheck: () => Promise<{
vectorStore: boolean;
llm: boolean;
storage: boolean;
}>;
}
export type ChatbotContextType = ChatbotContextState & ChatbotContextMethods;
export declare const defaultChatbotConfig: ChatbotConfig;
export declare function chatbotReducer(state: ChatbotContextState, action: ChatbotAction): ChatbotContextState;
export declare const ChatbotContext: React.Context<ChatbotContextType>;
export interface ChatbotProviderProps {
children: ReactNode;
initialConfig?: Partial<ChatbotConfig>;
autoInitialize?: boolean;
}
export declare function useChatbot(): ChatbotContextType;
export declare function ChatbotProvider({ children, initialConfig, autoInitialize, }: ChatbotProviderProps): import("react/jsx-runtime").JSX.Element;
export default ChatbotProvider;
//# sourceMappingURL=ChatbotContext.d.ts.map