@convo-lang/convo-lang
Version:
The language of AI
51 lines (50 loc) • 2.74 kB
TypeScript
import { SecretManager } from "@iyio/common";
import { ConvoCompletionCtx, ConvoCompletionService, ConvoModelInfo, FlatConvoConversationBase } from "./convo-types.js";
import { ChatCompletion, ChatCompletionCreateParamsNonStreaming } from './open-ai/resources/chat/index.js';
export interface BaseOpenAiConvoCompletionServiceOptions {
apiKey?: string;
apiBaseUrl?: string;
completionsEndpoint?: string;
secretManager?: SecretManager;
secretsName?: string;
models?: ConvoModelInfo[];
getModelsAsync?: () => Promise<ConvoModelInfo[]>;
inputType: string;
outputType: string;
apiKeyHeader?: string;
apiKeyHeaderValuePrefix?: string | null;
headers?: Record<string, string>;
updateRequest?: (requestBody: Record<string, any>, headers: Record<string, string | undefined>) => void;
completeAsync?: (input: ChatCompletionCreateParamsNonStreaming, flat: FlatConvoConversationBase, apiKey: string | undefined, url: string) => Promise<ChatCompletion | undefined>;
isFallback?: boolean;
serviceId: string;
/** Whether to log HTTP requests and responses for debugging purposes */
logRequests?: boolean;
canComplete?: (model: string | undefined, flat: FlatConvoConversationBase) => boolean;
}
export declare class BaseOpenAiConvoCompletionService implements ConvoCompletionService<ChatCompletionCreateParamsNonStreaming, ChatCompletion> {
readonly serviceId: string;
readonly inputType: string;
readonly outputType: string;
private readonly secretManager?;
private readonly apiKey?;
private readonly apiBaseUrl;
private readonly completionsEndpoint;
private readonly secretsName?;
private readonly models?;
private readonly apiKeyHeader;
private readonly apiKeyHeaderValuePrefix?;
private readonly headers;
private readonly isFallback;
private readonly logRequests;
private readonly updateRequest?;
private readonly completeAsync?;
private readonly _getModelsAsync?;
private readonly _canComplete?;
constructor({ apiKey, secretManager, secretsName, apiBaseUrl, completionsEndpoint, inputType, outputType, models, isFallback, apiKeyHeader, apiKeyHeaderValuePrefix, headers, serviceId, logRequests, completeAsync, updateRequest, getModelsAsync, canComplete }: BaseOpenAiConvoCompletionServiceOptions);
canComplete(model: string | undefined, flat: FlatConvoConversationBase): boolean;
private clientPromises;
private getApiClientAsync;
completeConvoAsync(input: ChatCompletionCreateParamsNonStreaming, flat: FlatConvoConversationBase, ctx: ConvoCompletionCtx<ChatCompletionCreateParamsNonStreaming, ChatCompletion>): Promise<ChatCompletion>;
getModelsAsync(): Promise<ConvoModelInfo[]>;
}