@langgraph-js/pro
Version:
The Pro SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces and build complex AI workflows
42 lines (41 loc) • 2.8 kB
TypeScript
import { OpenAI as OpenAIClient } from "openai";
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import { type BaseMessage, BaseMessageChunk } from "@langchain/core/messages";
import { ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
import { BaseChatOpenAICallOptions } from "@langchain/openai";
import { BaseChatOpenAI } from "@langchain/openai";
export interface ChatOpenAICompletionsCallOptions extends BaseChatOpenAICallOptions {
}
type ChatCompletionsInvocationParams = Omit<OpenAIClient.Chat.Completions.ChatCompletionCreateParams, "messages">;
/**
* OpenAI Completions API implementation.
* support deepseek and other reasoning models
* @internal
*/
export declare class ChatOpenAICompletions<CallOptions extends ChatOpenAICompletionsCallOptions = ChatOpenAICompletionsCallOptions> extends BaseChatOpenAI<CallOptions> {
/** @internal */
invocationParams(options?: this["ParsedCallOptions"], extra?: {
streaming?: boolean;
}): ChatCompletionsInvocationParams;
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
completionWithRetry(request: OpenAIClient.Chat.ChatCompletionCreateParamsStreaming, requestOptions?: OpenAIClient.RequestOptions): Promise<AsyncIterable<OpenAIClient.Chat.Completions.ChatCompletionChunk>>;
completionWithRetry(request: OpenAIClient.Chat.ChatCompletionCreateParamsNonStreaming, requestOptions?: OpenAIClient.RequestOptions): Promise<OpenAIClient.Chat.Completions.ChatCompletion>;
/**
* @deprecated
* This function was hoisted into a publicly accessible function from a
* different export, but to maintain backwards compatibility with chat models
* that depend on ChatOpenAICompletions, we'll keep it here as an overridable
* method. This will be removed in a future release
*/
protected _convertCompletionsDeltaToBaseMessageChunk(delta: Record<string, any>, rawResponse: OpenAIClient.Chat.Completions.ChatCompletionChunk, defaultRole?: OpenAIClient.Chat.ChatCompletionRole): BaseMessageChunk;
/**
* @deprecated
* This function was hoisted into a publicly accessible function from a
* different export, but to maintain backwards compatibility with chat models
* that depend on ChatOpenAICompletions, we'll keep it here as an overridable
* method. This will be removed in a future release
*/
protected _convertCompletionsMessageToBaseMessage(message: OpenAIClient.ChatCompletionMessage, rawResponse: OpenAIClient.ChatCompletion): BaseMessage;
}
export {};