@langgraph-js/pro
Version:
The Pro SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces and build complex AI workflows
31 lines (30 loc) • 1.57 kB
TypeScript
import { OpenAI as OpenAIClient } from "openai";
import { InteropZodType } from "@langchain/core/utils/types";
import { ResponseFormatJSONSchema } from "openai/resources";
import { ContentBlock, UsageMetadata } from "@langchain/core/messages";
declare const SUPPORTED_METHODS: readonly ["jsonSchema", "functionCalling", "jsonMode"];
type SupportedMethod = (typeof SUPPORTED_METHODS)[number];
/**
* Get the structured output method for a given model. By default, it uses
* `jsonSchema` if the model supports it, otherwise it uses `functionCalling`.
*
* @throws if the method is invalid, e.g. is not a string or invalid method is provided.
* @param model - The model name.
* @param config - The structured output method options.
* @returns The structured output method.
*/
export declare function getStructuredOutputMethod(model: string, method: unknown): SupportedMethod;
export declare function interopZodResponseFormat(zodSchema: InteropZodType, name: string, props: Omit<ResponseFormatJSONSchema.JSONSchema, "schema" | "strict" | "name">): {
json_schema: ResponseFormatJSONSchema.JSONSchema;
type: "json_schema";
};
/**
* Handle multi modal response content.
*
* @param content The content of the message.
* @param messages The messages of the response.
* @returns The new content of the message.
*/
export declare function handleMultiModalOutput(content: string, messages: unknown): ContentBlock[] | string;
export declare function _convertOpenAIResponsesUsageToLangChainUsage(usage?: OpenAIClient.Responses.ResponseUsage): UsageMetadata;
export {};