@agentica/core
Version:
Agentic AI Library specialized in LLM Function Calling
92 lines (91 loc) • 3.15 kB
TypeScript
import type { ILlmSchema } from "@samchon/openapi";
import type { IMicroAgenticaExecutor } from "./IMicroAgenticaExecutor";
import type { IMicroAgenticaSystemPrompt } from "./IMicroAgenticaSystemPrompt";
/**
* Configuration for Micro Agentic Agent.
*
* `INicroAgenticaConfig` is an interface that defines the configuration
* properties of the {@link MicroAgentica}. With this configuration, you
* can set the user's {@link locale}, {@link timezone}, and some of
* {@link systemPrompt system prompts}.
*
* @author Samchon
*/
export interface IMicroAgenticaConfig<Model extends ILlmSchema.Model> {
/**
* Agent executor.
*
* Executor function of Agentic AI's iteration plan to internal agents
* running by the {@link Agentica.conversate} function.
*
* If you want to customize the agent execution plan, you can do it
* by assigning you logic function of entire or partial to this property.
* When customizing it, it would better to reference the
* {@link ChatGptAgent.execute} function.
*
* @param ctx Context of the agent
* @returns Lit of prompts generated by the executor
* @default ChatGptAgent.execute
*/
executor?: undefined | Partial<IMicroAgenticaExecutor<Model>>;
/**
* System prompt messages.
*
* System prompt messages if you want to customize the system prompt
* messages for each situation.
*/
systemPrompt?: IMicroAgenticaSystemPrompt<Model>;
/**
* Locale of the A.I. chatbot.
*
* If you configure this property, the A.I. chatbot will conversate with
* the given locale. You can get the locale value by
*
* - Browser: `navigator.language`
* - NodeJS: `process.env.LANG.split(".")[0]`
*
* @default your_locale
*/
locale?: string;
/**
* Timezone of the A.I. chatbot.
*
* If you configure this property, the A.I. chatbot will consider the
* given timezone. You can get the timezone value by
* `Intl.DateTimeFormat().resolvedOptions().timeZone`.
*
* @default your_timezone
*/
timezone?: string;
/**
* Retry count.
*
* If LLM function calling composed arguments are invalid,
* the A.I. chatbot will retry to call the function with
* the modified arguments.
*
* By the way, if you configure it to 0 or 1, the A.I. chatbot
* will not retry the LLM function calling for correcting the
* arguments.
*
* @default 3
*/
retry?: number;
/**
* Backoff strategy.
*
* If OpenAI SDK fails to connect LLM API Server, this Backoff factor
* would be used to retry for the next connection.
*
* If the function returns `true`, the retry would be stopped.
* Otherwise, the retry would be continued.
*
* @default (props) => throw props.error
* @returns {number} The number of milliseconds to wait before the next retry
* @throws {Error} If the function want to stop the retry, you can throw an error
*/
backoffStrategy?: (props: {
count: number;
error: unknown;
}) => number;
}