@nlux/react
Version:
nlux React is a library for building conversational AI interfaces, with support for OpenAI, HuggingFace, and more.
1,123 lines (1,078 loc) • 44.8 kB
TypeScript
import * as react from 'react';
import { useEffect, JSX, RefObject, FC, ReactNode, ReactElement, DependencyList as DependencyList$1, ComponentClass, Context } from 'react';
import { ConversationOptions as ConversationOptions$2, MessageOptions as MessageOptions$2, StandardChatAdapter as StandardChatAdapter$2, EventsConfig as EventsConfig$1, DisplayOptions as DisplayOptions$1, ComposerOptions as ComposerOptions$1, ContextItems, AiContext as AiContext$1, ContextItemDataType, ContextAdapter, ContextAdapterBuilder } from '@nlux/core';
export { AiChatPropsInEvents, ComposerOptions, ConversationLayout, DisplayOptions, ErrorCallback, ErrorEventDetails, EventCallback, EventName, EventsConfig, HighlighterExtension, HistoryPayloadSize, IObserver, MessageReceivedCallback, MessageReceivedEventDetails, MessageRenderedCallback, MessageRenderedEventDetails, MessageSentCallback, MessageSentEventDetails, MessageStreamStartedCallback, MessageStreamStartedEventDetails, PreDestroyCallback, PreDestroyEventDetails, ReadyCallback, ReadyEventDetails, UpdatableAiChatProps } from '@nlux/core';
import * as react_jsx_runtime from 'react/jsx-runtime';
type UseEffectParams = Parameters<typeof useEffect>;
type EffectCallback = UseEffectParams[0];
type DependencyList = UseEffectParams[1];
type UseEffectReturn = ReturnType<typeof useEffect>;
declare function useDeepCompareEffect$1(callback: EffectCallback, dependencies: DependencyList): UseEffectReturn;
/**
* A single item in the conversation.
* This can be a message from the user, the assistant, or a system message.
*
* - `role: 'assistant'`: A message from the assistant (an AI response).
* The message type is generic and should match the generic type `AiMsg` used across the component.
* The `serverResponse` field is optional and can be used to store the full response received from the server.
*
* - `role: 'user'`: A message from the user.
* This is typically a string representing the prompt typed by the user.
*
* - `role: 'system'`: A system message.
* This message is not displayed in the UI, but it will be used to when sending the conversation history to the AI.
*/
type ChatItem$1<AiMsg = string> = {
role: 'assistant';
message: AiMsg;
serverResponse?: string | object | undefined;
} | {
role: 'user';
message: string;
} | {
role: 'system';
message: string;
};
/**
* A type representing a function to use as HTML sanitizer.
* This type can be passed to markdown parser, to be used to sanitize generated
* HTML before appending it to the document.
*/
type SanitizerExtension = (html: string) => string;
/**
* This type is used to indicate the mode in which the adapter should request data from the API.
*/
type DataTransferMode$1 = 'stream' | 'batch';
/**
* This interface is used to capture the stream of data being generated by the API and send it to the AiChat
* user interface as it's being generated.
*/
interface StreamingAdapterObserver$1<ChunkType = string> {
/**
* This method should be called by the adapter when it has completed sending data to the AiChat user interface.
* This will result in the AiChat component removing the loading indicator and resetting the conversation
* text input.
*/
complete(): void;
/**
* This method should be called by the adapter when it has an error to send to the AiChat user interface.
* This will result in the AiChat component displaying an error message to the user, resetting the
* conversation text input, removing the loading indicator, removing the message sent from the conversation.
*
* The error will be logged to the console, but it will not be displayed to the user. A generic error message
* will be displayed to the user instead.
*
* @param {Error} error
*/
error(error: Error): void;
/**
* This method should be called by the adapter when it has new data to send to the AiChat user interface.
* @param {ChunkType} chunk being sent as part of the stream.
*/
next(chunk: ChunkType): void;
}
/**
* This type represents the information that the AiChat needs to know about an adapter.
* It is used to determine which adapters are available and what capabilities they have.
*/
type StandardAdapterInfo$1 = Readonly<{
id: string;
capabilities: Readonly<{
chat: boolean;
fileUpload: boolean;
textToSpeech: boolean;
speechToText: boolean;
}>;
}>;
/**
* This interface is used by standard adapters provided by nlux to communicate with the AiChat component.
*/
interface StandardChatAdapter$1<AiMsg = string> {
batchText(message: string, extras: ChatAdapterExtras<AiMsg>): Promise<string | object | undefined>;
get dataTransferMode(): DataTransferMode$1;
get id(): string;
get info(): StandardAdapterInfo$1;
preProcessAiBatchedMessage(message: string | object | undefined, extras: ChatAdapterExtras<AiMsg>): AiMsg | undefined;
preProcessAiStreamedChunk(chunk: string | object | undefined, extras: ChatAdapterExtras<AiMsg>): AiMsg | undefined;
streamText(message: string, observer: StreamingAdapterObserver$1<string | object | undefined>, extras: ChatAdapterExtras<AiMsg>): void;
}
/**
* The base interface for creating a new instance of a StandardChatAdapter.
* Adapter builders can extend this interface to add additional methods for configuration.
*/
interface ChatAdapterBuilder<AiMsg> {
create(): StandardChatAdapter$1<AiMsg>;
}
/**
* This interface is used by standard adapters provided by nlux to communicate with the AiChat component.
*/
interface StandardChatAdapter<AiMsg = string> {
batchText(message: string, extras: ChatAdapterExtras<AiMsg>): Promise<string | object | undefined>;
get dataTransferMode(): DataTransferMode$1;
get id(): string;
get info(): StandardAdapterInfo$1;
preProcessAiBatchedMessage(message: string | object | undefined, extras: ChatAdapterExtras<AiMsg>): AiMsg | undefined;
preProcessAiStreamedChunk(chunk: string | object | undefined, extras: ChatAdapterExtras<AiMsg>): AiMsg | undefined;
streamText(message: string, observer: StreamingAdapterObserver$1<string | object | undefined>, extras: ChatAdapterExtras<AiMsg>): void;
}
interface ComposerOptions {
/**
* Indicates whether the prompt input field should be focused when the prompt is shown.
* @default false
*/
autoFocus?: boolean;
/**
* This will override the disabled state of the submit button when the composer is in 'typing' status.
* It will not have any impact in the composer 'submitting-prompt' and 'waiting' statuses, as the submit button
* is always disabled in these statuses.
*
* @default: Submit button is only enabled when the message is not empty.
*/
disableSubmitButton?: boolean;
/**
* Indicates whether the stop button should be hidden.
*
* @default false
*/
hideStopButton?: boolean;
/**
* The placeholder message to be displayed in the prompt input field when empty.
*/
placeholder?: string;
/**
* The shortcut to submit the prompt message.
*
* - `Enter`: The user can submit the prompt message by pressing the `Enter` key. In order to add a new line, the
* user can press `Shift + Enter`.
* - `CommandEnter`: When this is used, the user can submit the prompt message by pressing `Ctrl + Enter` on
* Windows/Linux or `Cmd + Enter` on macOS. In order to add a new line, the user can press `Enter`.
*
* @default 'Enter'
*/
submitShortcut?: 'Enter' | 'CommandEnter';
}
/**
* This represents a single item displayed in the chat UI while the conversation has not started yet.
*
*/
type ConversationStarter$1 = {
/**
* The prompt to type in the composer input and submit to start the conversation.
*/
prompt: string;
/**
* An optional label to display inside the conversation starter option button.
*/
label?: string;
/**
* An optional icon to display inside the conversation starter option button.
* This could either be a URL to an image or an HTML element.
*/
icon?: string | Readonly<HTMLElement>;
};
type HistoryPayloadSize = number | 'max';
type ConversationLayout = 'bubbles' | 'list';
interface ConversationOptions$1 {
/**
* Indicates whether the conversation should be scrolled to the bottom when a new message is added.
*
* @default true
*/
autoScroll?: boolean;
/**
* Suggested prompts to display in the UI to help the user start a conversation.
* Conversation starters are only displayed when the conversation is empty, and no conversation history is present.
*/
conversationStarters?: ConversationStarter$1[];
/**
* Indicates the number of messages from conversation history that should be sent to the backend with each message.
* For custom adapters, the history will be available as part of `extras.conversationHistory` attribute.
* For standard adapters, the history will be automatically handled by the adapter.
*
* By default, the entire conversation history is sent with each message.
* Set to `0` to disable sending conversation history with each message.
* Or set to a positive integer to send a specific number of messages.
*
* @default 'max'
*/
historyPayloadSize?: HistoryPayloadSize;
/**
* Indicates how items in the conversation should be displayed.
*
* - `list`: Chat items are displayed as a list with the AI responses underneath each user message.
* - `bubbles`: Items are displayed as chat bubbles with the prompts on the right and the AI messages on the left.
*
* @default 'bubbles'
*/
layout?: ConversationLayout;
/**
* Indicates whether the welcome message should be displayed when no conversation history is provided.
* The welcome message consists of:
* - The assistant's name and avatar
* - The assistant's tagline as configured in the `personaOptions`
*
* When no assistant persona is provided, the welcome message will be the NLUX logo.
*/
showWelcomeMessage?: boolean;
}
type DisplayOptions = {
/**
* The theme ID to use.
* This should be the ID of a theme that has been loaded into the page.
*/
themeId?: string;
/**
* Color scheme for the component.
* This can be 'light', 'dark', or 'auto'.
*
* If 'auto' is used, the component will automatically switch between 'light' and 'dark' based on the user's
* operating system preferences (if it can be detected), otherwise it will default to 'light'.
*
* @default 'auto'
*/
colorScheme?: 'light' | 'dark' | 'auto';
/**
* The width of the component.
*/
width?: number | string;
/**
* The height of the component.
*/
height?: number | string;
};
type Highlighter = (input: string, language: string) => string;
type HighlighterColorMode = 'dark' | 'light';
type CreateHighlighterOptions = {
language?: string;
colorMode?: HighlighterColorMode;
};
interface HighlighterExtension {
createHighlighter(options?: CreateHighlighterOptions): Highlighter;
highlightingClass(language?: string): string;
}
/**
* Props for the custom function that renders a message sent by the server in batch mode.
* @template AiMsg The type of the message received from the AI. Defaults to string for standard NLUX adapters.
*
* @property {string} uid The unique identifier of the message.
* @property {'batch'} dataTransferMode The data transfer mode used by the adapter.
* @property {'complete'} status The status of the message.
*
* @property {AiMsg} content The content of the message. The content is a single message.
* @property {unknown} serverResponse The raw server response. The server response is a single object or string
* representing the raw response received from the server.
*/
type ResponseRendererProps$1<AiMsg> = {
uid: string;
dataTransferMode: 'stream' | 'batch';
status: 'streaming' | 'complete';
content: [AiMsg];
serverResponse: unknown[];
};
type ResponseRenderer$1<AiMsg> = (props: ResponseRendererProps$1<AiMsg>) => HTMLElement | null;
type PromptRendererProps$1 = {
uid: string;
prompt: string;
};
type PromptRenderer$1 = (props: PromptRendererProps$1) => HTMLElement | null;
type MessageOptions$1<AiMsg = string> = {
/**
* Highlighter extension for code blocks inside messages.
*/
syntaxHighlighter?: HighlighterExtension;
/**
* Custom function to sanitize the HTML content of the messages. This function is called before any HTML content
* is rendered in the chat.
*
* @param {string} html
* @returns {string}
*/
htmlSanitizer?: SanitizerExtension;
/**
* Indicates the target of the links in the markdown messages.
* - 'blank': Open links in a new tab.
* - 'self': Open links in the same tab.
*
* @default 'blank'
*/
markdownLinkTarget?: 'blank' | 'self';
/**
* Indicates whether the copy button should be shown for code blocks.
*
* @default true
*/
showCodeBlockCopyButton?: boolean;
/**
* Indicates whether the streaming animation should be skipped.
*
* @default false
*/
skipStreamingAnimation?: boolean;
/**
* The interval in milliseconds at which new characters are added when a message is being generated and
* streamed to the user.
*
* @default 10
*/
streamingAnimationSpeed?: number;
/**
* In streaming data transfer mode, this represents the wait time in milliseconds after last chunk of data
* is received before marking the streaming as complete. This can be used to prevent the streaming from being
* marked as complete too early.
*
* If set to 'never', the streaming will never be automatically be marked as complete. It will be up to the
* adapter to manually mark the streaming as complete by calling the `observer.complete()` method.
*
* @default 2000
*/
waitTimeBeforeStreamCompletion?: number | 'never';
/**
* Custom function to render the message received from the AI.
*/
responseRenderer?: ResponseRenderer$1<AiMsg>;
/**
* Custom function to render the message sent by the user.
*/
promptRenderer?: PromptRenderer$1;
/**
* Indicates whether the user should be able to edit the message after sending it.
* Editing a message will replace the original message and will remove all subsequent messages in the conversation.
*
* @default false
*/
editableUserMessages?: boolean;
};
interface AssistantPersona$1 {
avatar: string | Readonly<HTMLElement>;
name: string;
tagline?: string;
}
interface UserPersona$1 {
avatar: string | Readonly<HTMLElement>;
name: string;
}
interface PersonaOptions$1 {
assistant?: AssistantPersona$1;
user?: UserPersona$1;
}
declare const NLErrors: {
'data-transfer-mode-not-supported': string;
'no-data-transfer-mode-supported': string;
'connection-error': string;
'invalid-credentials': string;
'invalid-api-key': string;
'http-server-side-error': string;
'http-client-side-error': string;
'failed-to-load-content': string;
'failed-to-stream-content': string;
'failed-to-stream-server-component': string;
'failed-to-render-content': string;
};
type NLErrorId = keyof typeof NLErrors;
/**
* These are the props that are exposed to the user of the AiChat component.
*/
type AiChatProps$1<AiMsg = string> = {
adapter: ChatAdapter$1<AiMsg> | StandardChatAdapter<AiMsg>;
className?: string;
events?: EventsConfig<AiMsg>;
initialConversation?: ChatItem$1<AiMsg>[];
composerOptions?: ComposerOptions;
conversationOptions?: ConversationOptions$1;
messageOptions?: MessageOptions$1<AiMsg>;
personaOptions?: PersonaOptions$1;
displayOptions?: DisplayOptions;
};
/**
* When sending props to event callbacks, we exclude the adapter and events properties.
* This is because they are not serializable and because the events are already being called.
*/
type AiChatPropsInEvents<AiMsg = string> = Omit<AiChatProps$1<AiMsg>, 'adapter' | 'events'>;
type MessageSentEventDetails = {
uid: string;
message: string;
};
type MessageStreamStartedEventDetails = {
uid: string;
};
type ServerComponentStreamStartedEventDetails = {
uid: string;
};
type ServerComponentRenderedEventDetails = {
uid: string;
};
type MessageRenderedEventDetails<AiMsg = string> = {
uid: string;
message?: AiMsg;
};
type MessageReceivedEventDetails<AiMsg = string> = {
uid: string;
message: AiMsg;
};
type ErrorEventDetails = {
errorId: NLErrorId;
message: string;
errorObject?: Error;
};
type ReadyEventDetails<AiMsg = string> = {
aiChatProps: AiChatPropsInEvents<AiMsg>;
};
type PreDestroyEventDetails<AiMsg = string> = {
aiChatProps: AiChatPropsInEvents<AiMsg>;
conversationHistory: Readonly<ChatItem$1<AiMsg>[]>;
};
/**
* The callback for when an error event is emitted.
*
* @param errorDetails The details of the error event such as the error message and the error id.
*/
type ErrorCallback = (errorDetails: ErrorEventDetails) => void;
/**
* The callback for when a message is sent.
* This is called when the chat component sends the message to the adapter.
*
* @param message The message that was sent.
*/
type MessageSentCallback = (event: MessageSentEventDetails) => void;
/**
* The callback for when a response starts streaming from the adapter.
* This is called when the chat component receives the first part of the response from the adapter.
* This does not mean that the message has been rendered yet. You should use the messageRendered event
* if you want to know when the message has been rendered.
*
* @param event The event details such as the uid of the message.
*/
type MessageStreamStartedCallback = (event: MessageStreamStartedEventDetails) => void;
/**
* The callback for when a server component stream starts.
* This is used with React Server Component adapters to trigger an event when the component is about
* to get mounted.
*/
type ServerComponentStreamStartedCallback = (event: ServerComponentStreamStartedEventDetails) => void;
/**
* The callback for when a server component is loaded and successfully rendered on the screen.
*
* @param event The event details such as the uid of the message.
*/
type ServerComponentRenderedCallback = (event: ServerComponentRenderedEventDetails) => void;
/**
* The callback for when a message is received.
* This is called when the chat component receives the full response from the adapter.
* This does not mean that the message has been rendered yet. You should use the messageRendered
* event if you want to know when the message has been rendered.
*
* @param event The event details such as the uid of the message and the message content.
*/
type MessageReceivedCallback<AiMsg = string> = (event: MessageReceivedEventDetails<AiMsg>) => void;
/**
* The callback for when a message is fully rendered on the screen.
* This event is only relevant when the user is using the NLUX markdown renderer.
* If the user is using a custom renderer and directly accessing the props.content property to render the message,
* this event will not be relevant.
*
* @param event The event details such as the uid of the message.
*/
type MessageRenderedCallback<AiMsg = string> = (event: MessageRenderedEventDetails<AiMsg>) => void;
/**
* The callback for when the chat component is ready.
* This is called when the chat component is fully initialized and ready to be used.
*
* @param readyDetails The details of the ready event such as the AiChatProps used to initialize the chat component.
*/
type ReadyCallback<AiMsg = string> = (readyDetails: ReadyEventDetails<AiMsg>) => void;
/**
* The callback for when the chat component is about to be destroyed.
* This is called when the chat component is about to be destroyed and unmounted from the DOM.
*
* @param preDestroyDetails The details of the pre-destroy event such as the AiChatProps used to initialize the chat
* component and the conversation history.
*/
type PreDestroyCallback<AiMsg = string> = (preDestroyDetails: PreDestroyEventDetails<AiMsg>) => void;
type EventsMap<AiMsg> = {
ready: ReadyCallback<AiMsg>;
preDestroy: PreDestroyCallback<AiMsg>;
messageSent: MessageSentCallback;
messageStreamStarted: MessageStreamStartedCallback;
messageReceived: MessageReceivedCallback<AiMsg>;
messageRendered: MessageRenderedCallback<AiMsg>;
serverComponentStreamStarted: ServerComponentStreamStartedCallback;
serverComponentRendered: ServerComponentRenderedCallback;
error: ErrorCallback;
};
type EventsConfig<AiMsg = string> = Partial<EventsMap<AiMsg>>;
/**
* This type represents the information that the AiChat needs to know about an adapter.
* It is used to determine which adapters are available and what capabilities they have.
*/
type StandardAdapterInfo = Readonly<{
id: string;
capabilities: Readonly<{
chat: boolean;
fileUpload: boolean;
textToSpeech: boolean;
speechToText: boolean;
}>;
}>;
/**
* A single item in the conversation.
* This can be a message from the user, the assistant, or a system message.
*
* - `role: 'assistant'`: A message from the assistant (an AI response).
* The message type is generic and should match the generic type `AiMsg` used across the component.
* The `serverResponse` field is optional and can be used to store the full response received from the server.
*
* - `role: 'user'`: A message from the user.
* This is typically a string representing the prompt typed by the user.
*
* - `role: 'system'`: A system message.
* This message is not displayed in the UI, but it will be used to when sending the conversation history to the AI.
*/
type ChatItem<AiMsg = string> = {
role: 'assistant';
message: AiMsg;
serverResponse?: string | object | undefined;
} | {
role: 'user';
message: string;
} | {
role: 'system';
message: string;
};
/**
* Additional data sent to the adapter when a message is sent.
*/
type ChatAdapterExtras$1<AiMsg = string> = {
/**
* This attribute contains the properties used with the AiChat component.
*/
aiChatProps: AiChatPropsInEvents<AiMsg>;
/**
* This attribute contains the conversation history.
* It's only included if the `conversationOptions.historyPayloadSize` is set to a positive number or 'all'.
*/
conversationHistory?: ChatItem<AiMsg>[];
/**
* This attribute contains the unique identifier of the context instance.
* It's only included if a context instance is used with the AiChat component.
* This can be used to send the context ID to the API and get a response that is specific to the context instance.
*/
contextId?: string;
/**
* This contains the headers that implementers can use to send additional data such as authentication headers.
*/
headers?: Record<string, string>;
};
/**
* Additional data sent to the adapter when a message is sent.
*/
type ChatAdapterExtras<AiMsg = string> = {
/**
* This attribute contains the properties used with the AiChat component.
*/
aiChatProps: AiChatPropsInEvents<AiMsg>;
/**
* This attribute contains the conversation history.
* It's only included if the `conversationOptions.historyPayloadSize` is set to a positive number or 'all'.
*/
conversationHistory?: ChatItem<AiMsg>[];
/**
* This attribute contains the unique identifier of the context instance.
* It's only included if a context instance is used with the AiChat component.
* This can be used to send the context ID to the API and get a response that is specific to the context instance.
*/
contextId?: string;
/**
* This contains the headers that implementers can use to send additional data such as authentication headers.
*/
headers?: Record<string, string>;
};
/**
* This type is used to indicate the mode in which the adapter should request data from the API.
*/
type DataTransferMode = 'stream' | 'batch';
/**
* The type for the function used to submit a message to the API in stream mode.
*
* @param {string} message
* @param {StreamingAdapterObserver} observer
* @param {ChatAdapterExtras} extras
*/
type StreamSend<AiMsg = string> = (message: string, observer: StreamingAdapterObserver<AiMsg>, extras: ChatAdapterExtras<AiMsg>) => void;
/**
* The type for the function used to submit a message to the API in batch mode.
* It should return a promise that resolves to the response from the API.
*
* @param `string` message
* @param `ChatAdapterExtras` extras
* @returns Promise<string>
*/
type BatchSend<AiMsg = string> = (message: string, extras: ChatAdapterExtras<AiMsg>) => Promise<AiMsg>;
/**
* This interface exposes methods that should be implemented by any chat adapter to connect the AiChat component
* to any API or AI backend. Chat adapters can be used to request data from the API in batch mode or stream mode.
*
* The difference between this and the `AssistAdapter` interface is that this adapter can only return a text response
* to be displayed to the user. It cannot return a task to be executed by the client. If you are using the `AiChat`
* component in co-pilot mode, you should use the `AssistAdapter` interface instead.
*/
interface ChatAdapter$1<AiMsg = string> {
/**
* This method should be implemented by any adapter that wants to request data from the API in batch mode.
* It should return a promise that resolves to the response from the API.
* Either this method or `streamText` (or both) should be implemented by any adapter.
*
* @param `string` message
* @param `ChatAdapterExtras` extras
* @returns Promise<string>
*/
batchText?: BatchSend<AiMsg>;
/**
* This method should be implemented by any adapter to be used with nlux.
* Either this method or `batchText` (or both) should be implemented by any adapter.
*
* @param {string} message
* @param {StreamingAdapterObserver} observer
* @param {ChatAdapterExtras} extras
*/
streamText?: StreamSend<AiMsg>;
}
/**
* This interface is used to capture the stream of data being generated by the API and send it to the AiChat
* user interface as it's being generated.
*/
interface StreamingAdapterObserver<ChunkType = string> {
/**
* This method should be called by the adapter when it has completed sending data to the AiChat user interface.
* This will result in the AiChat component removing the loading indicator and resetting the conversation
* text input.
*/
complete(): void;
/**
* This method should be called by the adapter when it has an error to send to the AiChat user interface.
* This will result in the AiChat component displaying an error message to the user, resetting the
* conversation text input, removing the loading indicator, removing the message sent from the conversation.
*
* The error will be logged to the console, but it will not be displayed to the user. A generic error message
* will be displayed to the user instead.
*
* @param {Error} error
*/
error(error: Error): void;
/**
* This method should be called by the adapter when it has new data to send to the AiChat user interface.
* @param {ChunkType} chunk being sent as part of the stream.
*/
next(chunk: ChunkType): void;
}
/**
* The result of an import() call that loads a React Server Component (RSC) from the server.
*/
type StreamedServerComponent = {
default: Function;
};
type ServerComponentExecutionResult = any;
/**
* The props that are passed to the server component it's used with useAsRscAdapter().
* - `message`: The message that was sent to the API.
* - `extras`: The extras object that was passed to the adapter.
*/
type StreamedServerComponentProps = {
message: string;
extras: ChatAdapterExtras;
};
/**
* The function used to send a message to the backend hosting the React Server Component (RSC) and
* get a React Server Component in return.
*/
type StreamSendServerComponent<AiMsg = string> = (message: string, extras: ChatAdapterExtras<AiMsg>, events: {
onServerComponentReceived: () => void;
onError: (error: Error) => void;
}) => ServerComponentExecutionResult;
interface AssistantPersona {
avatar: string | JSX.Element;
name: string;
tagline?: string;
}
interface UserPersona {
avatar: string | JSX.Element;
name: string;
}
interface PersonaOptions {
assistant?: AssistantPersona;
user?: UserPersona;
}
/**
* This interface exposes methods that should be implemented by any chat adapter to connect the AiChat component
* to any API or AI backend. Chat adapters can be used to request data from the API in batch mode or stream mode.
*
* The difference between this and the `AssistAdapter` interface is that this adapter can only return a text response
* to be displayed to the user. It cannot return a task to be executed by the client. If you are using the `AiChat`
* component in co-pilot mode, you should use the `AssistAdapter` interface instead.
*/
interface ChatAdapter<AiMsg = string> {
/**
* This method should be implemented by any adapter that wants to request data from the API in batch mode.
* It should return a promise that resolves to the response from the API.
* Either this method or `streamText` (or both) should be implemented by any adapter.
*
* @param `string` message
* @param `ChatAdapterExtras` extras
* @returns Promise<string>
*/
batchText?: BatchSend<AiMsg>;
/**
* This method should be implemented by any adapter that wants to send a prompt to the API and get
* a React Server Component in return.
*
* @param `string` message
* @param `ChatAdapterExtras` extras
* @returns Promise<string>
*/
streamServerComponent?: StreamSendServerComponent<AiMsg>;
/**
* This method should be implemented by any adapter to be used with nlux.
* Either this method or `batchText` (or both) should be implemented by any adapter.
*
* @param {string} message
* @param {StreamingAdapterObserver} observer
* @param {ChatAdapterExtras} extras
*/
streamText?: StreamSend<AiMsg>;
}
/**
* This represents a single item displayed in the chat UI while the conversation has not started yet.
*
*/
type ConversationStarter = {
/**
* The prompt to type in the composer input and submit to start the conversation.
*/
prompt: string;
/**
* An optional label to display inside the conversation starter option button.
*/
label?: string;
/**
* An optional icon to display inside the conversation starter option button.
* This could either be a URL to an image or a JSX element.
*/
icon?: string | JSX.Element;
};
type ConversationOptions = Omit<ConversationOptions$2, 'conversationStarters'> & {
/**
* Suggested prompts to display in the UI to help the user start a conversation.
* Conversation starters are only displayed when the conversation is empty, and no conversation history is present.
*/
conversationStarters?: ConversationStarter[];
};
/**
* AiChat API methods.
*/
type AiChatApi = {
/**
* API methods related to sending messages.
*/
composer: {
/**
* Types the message in the composer and sends it to the chat adapter automatically.
*
* @param {string} prompt
*/
send: (prompt: string) => void;
/**
* Cancel the request being sent.
* If a message is being sent, it will be cancelled.
* If a message is being generated (in streaming mode), the generation will stop and the message will deleted.
* If no message is being sent, this method does nothing.
*/
cancel: () => void;
};
/**
* API methods related to the conversation.
*/
conversation: {
/**
* Reset the conversation.
*/
reset: () => void;
};
};
/**
* Hook to get the AiChat API reference.
*
* @returns {AiChatApi}
*/
declare const useAiChatApi: () => AiChatApi;
/**
* Props for the custom React component that renders a message sent by the server.
* @template AiMsg The type of the message received from the AI. Defaults to string for standard NLUX adapters.
*
* @property {string} uid The unique identifier of the message.
* @property {'stream' | 'batch'} dataTransferMode The data transfer mode used by the adapter.
* @property {'streaming' | 'complete'} status The status of the message. It's always 'complete' for batch mode.
*
* @property {AiMsg} content The content of the message. It's updated as the message is being streamed. The content is
* an array of messages when the data transfer mode is 'stream'. The content is an array with a single message when the
* data transfer mode is 'batch'.
*
* @property {unknown[]} serverResponse The raw server response. The server response is an array of objects or strings
* representing each raw chunk of the response received from the server. The server response is only provided with
* NLUX standard adapters. For custom adapters, everything is handled through the content prop and it will be empty.
*
* @property {StreamedServerComponent} [serverComponent] The server component to render. This is only provided when
* <AiChat /> is used with a server-rendered UI component such as a React Server Component (RSC).
*
* @property {RefObject<never>} containerRef If you opt for the NLUX markdown renderer, you can use this reference to
* attach the rendered content to the DOM. Otherwise, you can ignore this prop and render the `content` directly.
*/
type ResponseRendererProps<AiMsg> = {
uid: string;
dataTransferMode: 'stream' | 'batch';
status: 'streaming' | 'complete';
contentType: 'text' | 'server-component';
content: AiMsg[];
serverComponent?: StreamedServerComponent;
serverResponse: unknown[];
containerRef?: RefObject<never>;
};
type ResponseRenderer<AiMsg> = FC<ResponseRendererProps<AiMsg>>;
type PromptRendererProps = {
uid: string;
prompt: string;
onResubmit: (newPrompt: string) => void;
};
type PromptRenderer = FC<PromptRendererProps>;
type ReactSpecificMessageOptions<AiMsg> = {
/**
* Custom React component to render the message received from the AI.
*/
responseRenderer?: ResponseRenderer<AiMsg>;
/**
* Custom React component to render the message sent by the user.
*/
promptRenderer?: PromptRenderer;
};
/**
* Options for a message in the conversation.
* We use all options from @nlux/core except the React-specific options
* defined in ReactSpecificMessageOptions.
*/
type MessageOptions<AiMsg = string> = Omit<MessageOptions$2<AiMsg>, 'responseRenderer' | 'promptRenderer'> & ReactSpecificMessageOptions<AiMsg>;
/**
* Props for the AiChat React component.
*/
type AiChatProps<AiMsg = string> = {
/**
* The chat adapter to use. This is required and essential for the component to work.
* You can either provide a standard adapter from @nlux or create a custom adapter.
*/
adapter: ChatAdapter<AiMsg> | StandardChatAdapter$2<AiMsg> | ChatAdapterBuilder<AiMsg>;
/**
* The API to use for submitting messages. This is use for an imperative control of the chat,
* to perform actions such as submitting messages from outside the chat.
*/
api?: AiChatApi;
/**
* A map of event handlers.
*/
events?: EventsConfig$1<AiMsg>;
/**
* The class name to add to the root element of the component.
*/
className?: string;
/**
* The initial conversation history to display.
* This is not a reactive prop! Changing it after the component is mounted will not update the conversation.
*/
initialConversation?: ChatItem$1<AiMsg>[];
/**
* Display options, such as color scheme, width, etc.
*/
displayOptions?: DisplayOptions$1;
/**
* Options for the conversation.
*/
conversationOptions?: ConversationOptions;
/**
* Options related to a single message in the conversation.
*/
messageOptions?: MessageOptions<AiMsg>;
/**
* Options for the composer.
*/
composerOptions?: ComposerOptions$1;
/**
* Options for the persona.
*/
personaOptions?: PersonaOptions;
/**
* The children of the component, in case you want to render something inside the chat.
* Only NLUX UI overrides are accepted at this stage.
*/
children?: ReactNode | undefined;
};
declare const AiChat: <AiMsg>(props: AiChatProps<AiMsg>) => ReactElement;
/**
* A primitive to parse markdown using the same parser as the one used in markdown streams.
* This will also generate the appropriate HTML tags for code blocks and syntax highlighting.
*
* @param children
*/
declare const Markdown: ({ children }: MarkdownProps) => react_jsx_runtime.JSX.Element;
type MarkdownProps = {
/**
* The markdown content to be parsed, provided as a string or an array of strings
* inside the component.
*/
children: string | Array<string>;
};
type AiChatUIOverrides = {
Loader: ReactElement;
Greeting?: ReactElement;
};
/**
* Wrapper for possible UI components that can be overridden in the default NLUX chat components.
*/
declare const AiChatUI: {
Loader: react.FunctionComponent<{
children?: react.ReactNode | undefined;
}>;
Greeting: react.FunctionComponent<{
children?: react.ReactNode | undefined;
}>;
};
/**
* Use the function provided as a batch adapter to send and receive messages in a single batch.
*
* @param send
* @param dependencies
*/
declare const useAsBatchAdapter: <AiMsg = string>(send: BatchSend<AiMsg>, dependencies?: DependencyList$1) => ChatAdapter<AiMsg>;
/**
* Use the function provided as a stream adapter to send messages and receive responses in a stream of chunks.
*
* @param submit
* @param dependencies
*/
declare const useAsStreamAdapter: <AiMsg = string>(submit: StreamSend<AiMsg>, dependencies?: DependencyList$1) => ChatAdapter<AiMsg>;
/**
* This hook is used to create a ChatAdapter that uses a React Server Component (RSC) as the assistant message.
* The RSC is loaded asynchronously. The user prompt and the adapter extras are passed to the RSC as props.
*
* @param {Promise<StreamedServerComponent>} moduleLoadingPromise
* @param {React.ReactNode} loader
* @returns {ChatAdapter}
*/
declare const useAsRscAdapter: (moduleLoadingPromise: Promise<StreamedServerComponent>, loader?: ReactNode) => ChatAdapter;
type AiContextProviderProps = {
initialItems?: ContextItems;
errorComponent?: FC<{
error?: string;
}> | ComponentClass<{
error?: string;
}>;
loadingComponent?: FC | ComponentClass;
children: ReactNode;
};
/**
* An object that represents the AI context.
* This object is created as a result of calling createAiContext().
*
* The Provider property is a React component that provides the AI context to the children.
* To be used as <aiContextInstance.Provider> Context Aware App ... </aiContextInstance.Provider>
*
* The ref property is a React context that can be used to access the React context value.
* Do not use the ref property directly, the useAiContext() and useAiTask() hooks should be used instead.
*/
type AiContext = {
Provider: (props: AiContextProviderProps) => ReactNode;
ref: Context<AiContext$1>;
};
type UpdateContextItem = (itemValue: ContextItemDataType) => void;
type DiscardContextItem = () => void;
/**
* Use this hook to sync parts of the component state with the AI context.
* It will create a new AI context item and will keep it in sync with the AI context.
* When the state item changes, the itemValue will should be kept up to date.
* When the component is unmounted, the context item will be discarded.
*
* @param {AiContext} aiContext The AI context instance to use, created with createAiContext()
* @param {string} itemDescription The description of the item. This will be used by LLMs to understand context
* @param {ContextItemDataType} itemValue The item value to be synced. Changing this value will update the AI context.
*
* Usage example:
*
* ```tsx
* const MyComponent = () => {
* const [myStateItem, setMyStateItem] = useState('initial value');
* useAiContext(MyAiContext, 'Detailed description of my state item', myStateItem);
* return <div>...</div>;
* };
*/
declare const useAiContext: (aiContext: AiContext, itemDescription: string, itemValue: ContextItemDataType) => void;
type CallbackArgType = object | string | number | boolean | symbol | null | undefined | void;
type CallbackFunction = (...args: CallbackArgType[]) => CallbackArgType;
/**
* Use this hook to register a new task that can be trigger by the AI during <AiChat /> conversations.
* It will create a new AI context task and will keep it in sync with the AI context.
* The description is used by LLMs to understand the purpose of the task.
* The callback is the function that will be called when the task is triggered.
* The parametersDescription is used by LLMs to determine the value of each parameter to be passed to the task.
*
* @param {AiContext} aiContext The AI context instance to use, created with createAiContext()
* @param {string} taskDescription The description of the task. This will be used by LLMs to understand context.
* @param {Function} callback The function to be called when the task is triggered.
* @param {string[]} parametersDescription An array of descriptions for each parameter of the task.
*
* Usage example:
*
* ```tsx
* const MyComponent = () => {
* const taskCallback = useCallback((param1, param2) => {
* // Do something with the parameters
* }, []);
*
* useAiTask(
* MyAiContext, 'Description of the task',
* taskCallback, ['Description of the first parameter', 'Description of the second parameter']
* );
*
* return <div>...</div>;
* };
* ```
*/
declare const useAiTask: (aiContext: AiContext, taskDescription: string, callback: CallbackFunction, parametersDescription?: string[]) => void;
/**
* Creates a new AI context with a React context provider that can be used to sync application state
* with the backend for AI processing.
*
* The input is a context adapter that handles the communication with the backend. nlux provides several
* context adapters out of the box, but developers can also create their own adapters by implementing the
* ContextAdapter interface.
*
* Usage:
*
* At the root level of the application, create the AI context and wrap the app with the context provider:
* ```tsx
* const MyAiContext = createAiContext(contextAdapter);
*
* const App = () => (
* <MyAiContext.Provider>
* <MyApp/>
* </MyAiContext.Provider>
* );
* ```
*
* Then, in any component that needs to access the AI context, use the context reference:
*
* ```tsx
* useAiContext(MyAiContext, 'Description of the data', dataToSync);
* useAiTask(MyAiContext, 'Description of the task', callbackFunction, ['Description of the parameters']);
* ```
*
* @param {ContextAdapter | ContextAdapterBuilder} adapter
* @returns {AiContext}
*/
declare const createAiContext: (adapter: ContextAdapter | ContextAdapterBuilder) => AiContext;
declare const useDeepCompareEffect: typeof useDeepCompareEffect$1;
export { AiChat, type AiChatApi, type AiChatProps, AiChatUI, type AiChatUIOverrides, type AiContext, type AiContextProviderProps, type AssistantPersona, type BatchSend, type ChatAdapter, type ChatAdapterBuilder, type ChatAdapterExtras$1 as ChatAdapterExtras, type ChatItem$1 as ChatItem, type ConversationOptions, type ConversationStarter, type DataTransferMode, type DiscardContextItem, Markdown, type MessageOptions, type PersonaOptions, type PromptRenderer, type PromptRendererProps, type ResponseRenderer, type ResponseRendererProps, type SanitizerExtension, type StandardAdapterInfo, type StandardChatAdapter, type StreamSend, type StreamedServerComponent, type StreamedServerComponentProps, type StreamingAdapterObserver, type UpdateContextItem, type UserPersona, createAiContext, useAiChatApi, useAiContext, useAiTask, useAsBatchAdapter, useAsRscAdapter, useAsStreamAdapter, useDeepCompareEffect };