@samchon/openapi
Version:
Universal OpenAPI to LLM function calling schemas. Transform any Swagger/OpenAPI document into type-safe schemas for OpenAI, Claude, Qwen, and more.
139 lines (133 loc) • 5.81 kB
text/typescript
import { ILlmFunction } from "./ILlmFunction";
import { ILlmSchema } from "./ILlmSchema";
import { IValidation } from "./IValidation";
/**
* Application of LLM function calling.
*
* `ILlmApplication` is a data structure representing a collection of
* {@link ILlmFunction LLM function calling schemas}, composed from a native
* TypeScript class (or interface) type by the `typia.llm.application<App>()`
* function.
*
* Also, there can be some parameters (or their nested properties) which must be
* composed by Human, not by LLM. File uploading feature or some sensitive
* information like secret key (password) are the examples. In that case, you
* can separate the function parameters to both LLM and human sides by
* configuring the {@link ILlmApplication.IConfig.separate} property. The
* separated parameters are assigned to the {@link ILlmFunction.separated}
* property.
*
* For reference, when both LLM and Human filled parameter values to call, you
* can merge them by calling the {@link HttpLlm.mergeParameters} function. In
* other words, if you've configured the {@link ILlmApplication.IConfig.separate}
* property, you have to merge the separated parameters before the function call
* execution.
*
* @author Jeongho Nam - https://github.com/samchon
* @reference https://platform.openai.com/docs/guides/function-calling
*/
export interface ILlmApplication<Class extends object = any> {
/**
* List of function metadata.
*
* List of function metadata that can be used for the LLM function call.
*/
functions: ILlmFunction[];
/** Configuration for the application. */
config: ILlmApplication.IConfig<Class>;
/**
* Class type, the source of the LLM application.
*
* This property is just for the generic type inference, and its value is
* always `undefined`.
*/
__class?: Class | undefined;
}
export namespace ILlmApplication {
/** Configuration for application composition. */
export interface IConfig<Class extends object = any>
extends ILlmSchema.IConfig {
/**
* Separator function for the parameters.
*
* When composing parameter arguments through LLM function call, there can
* be a case that some parameters must be composed by human, or LLM cannot
* understand the parameter.
*
* For example, if the parameter type has configured
* {@link ILlmSchema.IString.contentMediaType} which indicates file
* uploading, it must be composed by human, not by LLM (Large Language
* Model).
*
* In that case, if you configure this property with a function that
* predicating whether the schema value must be composed by human or not,
* the parameters would be separated into two parts.
*
* - {@link ILlmFunction.separated.llm}
* - {@link ILlmFunction.separated.human}
*
* When writing the function, note that returning value `true` means to be a
* human composing the value, and `false` means to LLM composing the value.
* Also, when predicating the schema, it would better to utilize the
* {@link LlmTypeChecker} like features.
*
* @default null
* @param schema Schema to be separated.
* @returns Whether the schema value must be composed by human or not.
*/
separate: null | ((schema: ILlmSchema) => boolean);
/**
* Custom validation functions for specific class methods.
*
* The `validate` property allows you to provide custom validation functions
* that will replace the default validation behavior for specific methods
* within the application class. When specified, these custom validators
* take precedence over the standard type validation generated by
* `typia.llm.application()`.
*
* This feature is particularly useful when you need to:
*
* - Implement business logic validation beyond type checking
* - Add custom constraints that cannot be expressed through type annotations
* - Provide more specific error messages for AI agents
* - Validate dynamic conditions based on runtime state
*
* Each validation function receives the same arguments as its corresponding
* method and must return an {@link IValidation} result. On validation
* success, it should return `{ success: true, data }`. On failure, it
* should return `{ success: false, data, errors }` with detailed error
* information that helps AI agents understand and correct their mistakes.
*
* @default null
*/
validate: null | Partial<ILlmApplication.IValidationHook<Class>>;
}
/**
* Type for custom validation function hooks.
*
* `IValidationHook` defines the structure for custom validation functions
* that can be provided for each method in the application class. It creates a
* mapped type where each property corresponds to a method in the class, and
* the value is a validation function for that method's parameters.
*
* The validation hook functions:
*
* - Receive the same argument type as the original method
* - Must return an {@link IValidation} result indicating success or failure
* - Replace the default type validation when specified
* - Enable custom business logic and runtime validation
*
* Type constraints:
*
* - Only methods (functions) from the class can have validation hooks
* - Non-function properties are typed as `never` and cannot be validated
* - The validation function must match the method's parameter signature
*
* @template Class The application class type containing methods to validate
*/
export type IValidationHook<Class extends object> = {
[K in keyof Class]?: Class[K] extends (args: infer Argument) => unknown
? (input: unknown) => IValidation<Argument>
: never;
};
}