openpipe
Version:
OpenPipe TypeScript SDK: Fine-Tuning, Inference, and Metrics for Production Apps
78 lines (73 loc) • 3.8 kB
TypeScript
import * as openai_resources from 'openai/resources';
import * as openai$1 from 'openai';
import * as Core from 'openai/core';
import { ChatCompletion, ChatCompletionChunk, ChatCompletionCreateParams, ChatCompletionCreateParamsNonStreaming, ChatCompletionCreateParamsStreaming, ChatCompletionCreateParamsBase } from 'openai/resources/chat/completions';
import { WrappedStream } from './openai/streaming.js';
import { O as OpenPipeMeta, b as OpenPipeConfig, D as DefaultService, c as OpenPipeArgs, d as OpenPipeChatCompletion } from './shared-CaW1D4kA.js';
import { Stream } from 'openai/streaming';
import OpenPipe from './client.js';
import { Beta } from 'openai/resources/beta/beta';
import { Completions, ChatCompletionParseParams, ParsedChatCompletion } from 'openai/resources/beta/chat/completions';
import { ExtractParsedContentFromParams } from 'openai/lib/parser';
import { Chat } from 'openai/resources/beta/chat/chat';
import 'openai/resources/chat';
declare class WrappedBeta extends Beta {
constructor(client: openai$1.OpenAI);
chat: WrappedBetaChat;
}
declare class WrappedBetaChat extends Chat {
constructor(client: openai$1.OpenAI);
completions: WrappedBetaCompletions;
}
declare class WrappedBetaCompletions extends Completions {
constructor(client: openai$1.OpenAI);
parse<Params extends ChatCompletionParseParams, ParsedT = ExtractParsedContentFromParams<Params>>(body: Params, options?: Core.RequestOptions): Core.APIPromise<ParsedChatCompletion<ParsedT> & {
openpipe?: OpenPipeMeta;
}>;
}
type ClientOptions = openai$1.ClientOptions & {
openpipe?: OpenPipeConfig | OpenPipe;
};
declare class OpenAI extends openai$1.OpenAI {
constructor({ openpipe, ...options }?: ClientOptions);
chat: WrappedChat;
beta: WrappedBeta;
}
declare class WrappedChat extends openai$1.OpenAI.Chat {
completions: WrappedCompletions;
setClients(opClient: OpenPipe, opCompletionClient: openai$1.OpenAI, fallbackClient: openai$1.OpenAI): void;
}
declare class WrappedCompletions extends openai$1.OpenAI.Chat.Completions {
openaiClient: openai$1.OpenAI;
opClient?: OpenPipe;
opCompletionClient?: openai$1.OpenAI;
fallbackClient: openai$1.OpenAI;
constructor(client: openai$1.OpenAI);
_report(args: Parameters<DefaultService["report"]>[0]): Promise<void>;
_handleResponse(response: ChatCompletion | Stream<ChatCompletionChunk>, usedBody: ChatCompletionCreateParams, openpipeArgs: OpenPipeArgs, requestedAt: number): Promise<WrappedStream | {
openpipe: {
reportingFinished: Promise<void>;
};
id: string;
choices: Array<ChatCompletion.Choice>;
created: number;
model: string;
object: "chat.completion";
service_tier?: "scale" | "default" | null;
system_fingerprint?: string;
usage?: openai_resources.CompletionUsage;
}>;
_handleResponseError(error: unknown, usedBody: ChatCompletionCreateParams, openpipeArgs: OpenPipeArgs, requestedAt: number): Promise<unknown>;
create(body: ChatCompletionCreateParamsNonStreaming & OpenPipeArgs, options?: Core.RequestOptions): Core.APIPromise<OpenPipeChatCompletion & {
openpipe?: OpenPipeMeta;
}>;
create(body: ChatCompletionCreateParamsStreaming & OpenPipeArgs, options?: Core.RequestOptions): Core.APIPromise<Stream<ChatCompletionChunk> & {
openpipe?: OpenPipeMeta;
}>;
create(body: ChatCompletionCreateParamsBase & OpenPipeArgs, options?: Core.RequestOptions): Core.APIPromise<Stream<ChatCompletionChunk> | OpenPipeChatCompletion>;
}
type openai_ClientOptions = ClientOptions;
declare namespace openai {
export { type openai_ClientOptions as ClientOptions, OpenAI as default };
}
export { type ClientOptions, OpenAI as default, openai as o };