openpipe
Version:
OpenPipe TypeScript SDK: Fine-Tuning, Inference, and Metrics for Production Apps
62 lines (58 loc) • 3.3 kB
text/typescript
import * as openai$1 from 'openai';
import { ChatCompletion, ChatCompletionChunk, ChatCompletionCreateParams, ChatCompletionParseParams, ParsedChatCompletion, ChatCompletionCreateParamsNonStreaming, ChatCompletionCreateParamsStreaming, ChatCompletionCreateParamsBase } from 'openai/resources/chat/completions';
import { WrappedStream } from './openai/streaming.cjs';
import { b as OpenPipeConfig, D as DefaultService, c as OpenPipeArgs, O as OpenPipeMeta, d as OpenPipeChatCompletion } from './shared-DlTR1Jth.cjs';
import { Stream } from 'openai/streaming';
import OpenPipe from './client.cjs';
import { RequestOptions } from 'openai/internal/request-options';
import { ExtractParsedContentFromParams } from 'openai/lib/parser';
import 'openai/resources/chat';
import 'openai/resources';
type ClientOptions = openai$1.ClientOptions & {
openpipe?: OpenPipeConfig | OpenPipe;
};
declare class OpenAI extends openai$1.OpenAI {
constructor({ openpipe, ...options }?: ClientOptions);
chat: WrappedChat;
}
declare class WrappedChat extends openai$1.OpenAI.Chat {
completions: WrappedCompletions;
setClients(opClient: OpenPipe, opCompletionClient: openai$1.OpenAI, fallbackClient: openai$1.OpenAI): void;
}
declare class WrappedCompletions extends openai$1.OpenAI.Chat.Completions {
openaiClient: openai$1.OpenAI;
opClient?: OpenPipe;
opCompletionClient?: openai$1.OpenAI;
fallbackClient: openai$1.OpenAI;
constructor(client: openai$1.OpenAI);
_report(args: Parameters<DefaultService["report"]>[0]): Promise<void>;
_handleResponse(response: ChatCompletion | Stream<ChatCompletionChunk>, usedBody: ChatCompletionCreateParams, openpipeArgs: OpenPipeArgs, requestedAt: number): Promise<WrappedStream | {
openpipe: {
reportingFinished: Promise<void>;
};
id: string;
choices: Array<ChatCompletion.Choice>;
created: number;
model: string;
object: "chat.completion";
service_tier?: "auto" | "default" | "flex" | "scale" | "priority" | null;
system_fingerprint?: string;
usage?: openai$1.OpenAI.Completions.CompletionUsage;
}>;
_handleResponseError(error: unknown, usedBody: ChatCompletionCreateParams, openpipeArgs: OpenPipeArgs, requestedAt: number): Promise<unknown>;
parse<Params extends ChatCompletionParseParams, ParsedT = ExtractParsedContentFromParams<Params>>(body: Params, options?: RequestOptions): openai$1.APIPromise<ParsedChatCompletion<ParsedT> & {
openpipe?: OpenPipeMeta;
}>;
create(body: ChatCompletionCreateParamsNonStreaming & OpenPipeArgs, options?: RequestOptions): openai$1.APIPromise<OpenPipeChatCompletion & {
openpipe?: OpenPipeMeta;
}>;
create(body: ChatCompletionCreateParamsStreaming & OpenPipeArgs, options?: RequestOptions): openai$1.APIPromise<Stream<ChatCompletionChunk> & {
openpipe?: OpenPipeMeta;
}>;
create(body: ChatCompletionCreateParamsBase & OpenPipeArgs, options?: RequestOptions): openai$1.APIPromise<Stream<ChatCompletionChunk> | OpenPipeChatCompletion>;
}
type openai_ClientOptions = ClientOptions;
declare namespace openai {
export { type openai_ClientOptions as ClientOptions, OpenAI as default };
}
export { type ClientOptions, OpenAI as default, openai as o };