UNPKG

@samchon/openapi

Version:

Universal OpenAPI to LLM function calling schemas. Transform any Swagger/OpenAPI document into type-safe schemas for OpenAI, Claude, Qwen, and more.

1 lines 9.45 kB
{"version":3,"file":"HttpLlm.mjs","sources":["../src/HttpLlm.ts"],"sourcesContent":["import { HttpMigration } from \"./HttpMigration\";\nimport { OpenApi } from \"./OpenApi\";\nimport { OpenApiV3 } from \"./OpenApiV3\";\nimport { OpenApiV3_1 } from \"./OpenApiV3_1\";\nimport { SwaggerV2 } from \"./SwaggerV2\";\nimport { HttpLlmComposer } from \"./composers/HttpLlmApplicationComposer\";\nimport { HttpLlmFunctionFetcher } from \"./http/HttpLlmFunctionFetcher\";\nimport { IHttpConnection } from \"./structures/IHttpConnection\";\nimport { IHttpLlmApplication } from \"./structures/IHttpLlmApplication\";\nimport { IHttpLlmFunction } from \"./structures/IHttpLlmFunction\";\nimport { IHttpMigrateApplication } from \"./structures/IHttpMigrateApplication\";\nimport { IHttpResponse } from \"./structures/IHttpResponse\";\nimport { ILlmFunction } from \"./structures/ILlmFunction\";\nimport { LlmDataMerger } from \"./utils/LlmDataMerger\";\n\n/**\n * LLM function calling application composer from OpenAPI document.\n *\n * `HttpLlm` is a module for composing LLM (Large Language Model) function\n * calling application from the {@link OpenApi.IDocument OpenAPI document}, and\n * also for LLM function call execution and parameter merging.\n *\n * At first, you can construct the LLM function calling application by the\n * {@link HttpLlm.application HttpLlm.application()} function. And then the LLM\n * has selected a {@link IHttpLlmFunction function} to call and composes its\n * arguments, you can execute the function by\n * {@link HttpLlm.execute HttpLlm.execute()} or\n * {@link HttpLlm.propagate HttpLlm.propagate()}.\n *\n * By the way, if you have configured the\n * {@link IHttpLlmApplication.IConfig.separate} option to separate the parameters\n * into human and LLM sides, you can merge these human and LLM sides' parameters\n * into one through {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()}\n * before the actual LLM function call execution.\n *\n * @author Jeongho Nam - https://github.com/samchon\n */\nexport namespace HttpLlm {\n /* -----------------------------------------------------------\n COMPOSERS\n ----------------------------------------------------------- */\n /** Properties for the LLM function calling application composer. */\n export interface IApplicationProps {\n /** OpenAPI document to convert. */\n document:\n | OpenApi.IDocument\n | SwaggerV2.IDocument\n | OpenApiV3.IDocument\n | OpenApiV3_1.IDocument;\n\n /** Configuration for the LLM function calling schema conversion. */\n config?: Partial<IHttpLlmApplication.IConfig>;\n }\n\n /**\n * Convert OpenAPI document to LLM function calling application.\n *\n * Converts {@link OpenApi.IDocument OpenAPI document} or\n * {@link IHttpMigrateApplication migrated application} to the\n * {@link IHttpLlmApplication LLM function calling application}. Every\n * {@link OpenApi.IOperation API operations} in the OpenAPI document are\n * converted to the {@link IHttpLlmFunction LLM function} type, and they would\n * be used for the LLM function calling.\n *\n * If you have configured the {@link IHttpLlmApplication.IConfig.separate}\n * option, every parameters in the {@link IHttpLlmFunction} would be separated\n * into both human and LLM sides. In that case, you can merge these human and\n * LLM sides' parameters into one through {@link HttpLlm.mergeParameters}\n * before the actual LLM function call execution.\n *\n * @param props Properties for composition\n * @returns LLM function calling application\n */\n export const application = (\n props: IApplicationProps,\n ): IHttpLlmApplication => {\n // MIGRATE\n const migrate: IHttpMigrateApplication = HttpMigration.application(\n props.document,\n );\n return HttpLlmComposer.application({\n migrate,\n config: {\n reference: props.config?.reference ?? true,\n strict: props.config?.strict ?? false,\n separate: props.config?.separate ?? null,\n maxLength: props.config?.maxLength ?? 64,\n equals: props.config?.equals ?? false,\n },\n });\n };\n\n /* -----------------------------------------------------------\n FETCHERS\n ----------------------------------------------------------- */\n /** Properties for the LLM function call. */\n export interface IFetchProps {\n /** Application of the LLM function calling. */\n application: IHttpLlmApplication;\n\n /** LLM function schema to call. */\n function: IHttpLlmFunction;\n\n /** Connection info to the HTTP server. */\n connection: IHttpConnection;\n\n /** Input arguments for the function call. */\n input: object;\n }\n\n /**\n * Execute the LLM function call.\n *\n * `HttmLlm.execute()` is a function executing the target\n * {@link OpenApi.IOperation API endpoint} with with the connection information\n * and arguments composed by Large Language Model like OpenAI (+human\n * sometimes).\n *\n * By the way, if you've configured the\n * {@link IHttpLlmApplication.IConfig.separate}, so that the parameters are\n * separated to human and LLM sides, you have to merge these human and LLM\n * sides' parameters into one through {@link HttpLlm.mergeParameters}\n * function.\n *\n * For reference, if the target API endpoint responds none 200/201 status,\n * this would be considered as an error and the {@link HttpError} would be\n * thrown. Otherwise you don't want such rule, you can use the\n * {@link HttpLlm.propagate} function instead.\n *\n * @param props Properties for the LLM function call\n * @returns Return value (response body) from the API endpoint\n * @throws HttpError when the API endpoint responds none 200/201 status\n */\n export const execute = (props: IFetchProps): Promise<unknown> =>\n HttpLlmFunctionFetcher.execute(props);\n\n /**\n * Propagate the LLM function call.\n *\n * `HttmLlm.propagate()` is a function propagating the target\n * {@link OpenApi.IOperation API endpoint} with with the connection information\n * and arguments composed by Large Language Model like OpenAI (+human\n * sometimes).\n *\n * By the way, if you've configured the\n * {@link IHttpLlmApplication.IConfig.separate}, so that the parameters are\n * separated to human and LLM sides, you have to merge these humand and LLM\n * sides' parameters into one through {@link HttpLlm.mergeParameters}\n * function.\n *\n * For reference, the propagation means always returning the response from the\n * API endpoint, even if the status is not 200/201. This is useful when you\n * want to handle the response by yourself.\n *\n * @param props Properties for the LLM function call\n * @returns Response from the API endpoint\n * @throws Error only when the connection is failed\n */\n export const propagate = (props: IFetchProps): Promise<IHttpResponse> =>\n HttpLlmFunctionFetcher.propagate(props);\n\n /* -----------------------------------------------------------\n MERGERS\n ----------------------------------------------------------- */\n /** Properties for the parameters' merging. */\n export interface IMergeProps {\n /** Metadata of the target function. */\n function: ILlmFunction;\n\n /** Arguments composed by the LLM. */\n llm: object | null;\n\n /** Arguments composed by the human. */\n human: object | null;\n }\n\n /**\n * Merge the parameters.\n *\n * If you've configured the {@link IHttpLlmApplication.IConfig.separate}\n * option, so that the parameters are separated to human and LLM sides, you\n * can merge these humand and LLM sides' parameters into one through this\n * `HttpLlm.mergeParameters()` function before the actual LLM function call\n * execution.\n *\n * On contrary, if you've not configured the\n * {@link IHttpLlmApplication.IConfig.separate} option, this function would\n * throw an error.\n *\n * @param props Properties for the parameters' merging\n * @returns Merged parameter values\n */\n export const mergeParameters = (props: IMergeProps): object =>\n LlmDataMerger.parameters(props);\n\n /**\n * Merge two values.\n *\n * If both values are objects, then combines them in the properties level.\n *\n * Otherwise, returns the latter value if it's not null, otherwise the former\n * value.\n *\n * - `return (y ?? x)`\n *\n * @param x Value X to merge\n * @param y Value Y to merge\n * @returns Merged value\n */\n export const mergeValue = (x: unknown, y: unknown): unknown =>\n LlmDataMerger.value(x, y);\n}\n"],"names":["HttpLlm","application","props","migrate","HttpMigration","document","HttpLlmComposer","config","reference","strict","separate","maxLength","equals","execute","HttpLlmFunctionFetcher","propagate","mergeParameters","LlmDataMerger","parameters","mergeValue","x","y","value"],"mappings":";;;;;;;;AAqCM,IAAWA;;CAAjB,SAAiBA;IAoCFA,QAAAC,cACXC;QAGA,MAAMC,UAAmCC,cAAcH,YACrDC,MAAMG;QAER,OAAOC,gBAAgBL,YAAY;YACjCE;YACAI,QAAQ;gBACNC,WAAWN,MAAMK,QAAQC,aAAa;gBACtCC,QAAQP,MAAMK,QAAQE,UAAU;gBAChCC,UAAUR,MAAMK,QAAQG,YAAY;gBACpCC,WAAWT,MAAMK,QAAQI,aAAa;gBACtCC,QAAQV,MAAMK,QAAQK,UAAU;;;;IA8CzBZ,QAAAa,UAAWX,SACtBY,uBAAuBD,QAAQX;IAwBpBF,QAAAe,YAAab,SACxBY,uBAAuBC,UAAUb;IAiCtBF,QAAAgB,kBAAmBd,SAC9Be,cAAcC,WAAWhB;IAgBdF,QAAAmB,aAAa,CAACC,GAAYC,MACrCJ,cAAcK,MAAMF,GAAGC;AAC1B,EA9KD,CAAiBrB,YAAAA,UAAO,CAAA;;"}