@objectiveai/langchain
Version:
Objective AI integrations for LangChain.js
25 lines (24 loc) • 1.48 kB
TypeScript
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from "@langchain/core/language_models/chat_models";
import { AIMessageChunk, BaseMessage } from "@langchain/core/messages";
import { ChatResult } from "@langchain/core/outputs";
import { Query } from "objectiveai";
import { type ClientOptions } from "openai";
export interface QueryOptions extends BaseChatModelCallOptions {
chat_completion_create_params?: Omit<Query.Completions.Request.ChatCompletionCreateParamsNonStreaming, "messages">;
}
export interface BaseQueryModelParams extends BaseChatModelParams {
chat_completion_create_params: Omit<Query.Completions.Request.ChatCompletionCreateParamsNonStreaming, "messages">;
openai: ClientOptions;
}
export declare class QueryObjectiveAI extends BaseChatModel<QueryOptions, AIMessageChunk> {
chat_completion_create_params: Omit<Query.Completions.Request.ChatCompletionCreateParamsNonStreaming, "messages">;
openai: ClientOptions;
constructor(fields: BaseQueryModelParams);
_llmType(): string;
invocationParams(_options?: this["ParsedCallOptions"]): {
chat_completion_create_params: Omit<Query.Completions.Request.ChatCompletionCreateParamsNonStreaming, "messages">;
openai: ClientOptions;
};
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
}