@gentrace/openai
Version:
Gentrace OpenAI v4 plugin for Node.JS
188 lines (187 loc) • 8.64 kB
TypeScript
import { Context, Configuration as GentraceConfiguration, PipelineRun, StepRun } from "@gentrace/core";
import OpenAI, { ClientOptions } from "openai";
import { RequestOptions } from "openai/core";
import { Chat, ChatCompletionTool, Completion, CompletionCreateParams, CreateEmbeddingResponse, EmbeddingCreateParams, ModerationCreateParams, ModerationCreateResponse } from "openai/resources";
import { ChatCompletionParseParams, Completions, ParsedChatCompletion } from "openai/resources/beta/chat/completions";
import { ChatCompletion, ChatCompletionChunk } from "openai/resources/chat";
import { Stream } from "openai/streaming";
import { ExtractParsedContentFromParams } from "openai/lib/parser";
export type OpenAIPipelineHandlerOptions = {
pipelineRun?: PipelineRun;
gentraceConfig: GentraceConfiguration;
};
type DistributiveOmit<T, K extends keyof any> = T extends any ? Omit<T, K> : never;
type ChatCompletionRequestMessageTemplate = DistributiveOmit<Chat.ChatCompletionMessageParam, "content"> & {
content?: string;
contentTemplate?: string;
contentInputs?: Record<string, string>;
};
export declare class GentraceStream<Item> implements AsyncIterable<Item> {
private stream;
private pipelineRun;
private partialStepRun;
private isSelfContained;
private aggregator;
constructor(stream: Stream<Item>, pipelineRun: PipelineRun, partialStepRun: StepRun, isSelfContained: boolean, aggregator: (streamList: any[]) => Record<string, any>);
[Symbol.asyncIterator](): AsyncIterator<Item, any, undefined>;
}
export declare class GentraceEmbeddings extends OpenAI.Embeddings {
private pipelineRun?;
private gentraceConfig;
constructor({ client, pipelineRun, gentraceConfig, }: {
client: OpenAI;
pipelineRun?: PipelineRun;
gentraceConfig: GentraceConfiguration;
});
createInner(body: EmbeddingCreateParams & {
pipelineSlug?: string;
gentrace?: Context;
}, options?: RequestOptions): Promise<CreateEmbeddingResponse & {
pipelineRunId?: string;
}>;
}
export declare class GentraceModerations extends OpenAI.Moderations {
private pipelineRun?;
private gentraceConfig;
constructor({ client, pipelineRun, gentraceConfig, }: {
client: OpenAI;
pipelineRun?: PipelineRun;
gentraceConfig: GentraceConfiguration;
});
createInner(body: ModerationCreateParams & {
pipelineSlug?: string;
gentrace?: Context;
}, options?: RequestOptions): Promise<ModerationCreateResponse & {
pipelineRunId?: string;
}>;
}
export interface GentraceChatCompletionCreateParams extends Omit<Chat.CompletionCreateParams, "messages"> {
messages: Array<ChatCompletionRequestMessageTemplate>;
pipelineSlug?: string;
gentrace?: Context;
}
export interface GentraceChatCompletionCreateParamsStreaming extends GentraceChatCompletionCreateParams {
stream: true;
}
export interface GentraceChatCompletionCreateParamsNonStreaming extends GentraceChatCompletionCreateParams {
stream?: false | null;
}
export type GentraceChatCompletion = ChatCompletion & {
pipelineRunId?: string;
};
export declare class GentraceChatCompletions extends OpenAI.Chat.Completions {
private pipelineRun?;
private gentraceConfig;
constructor({ client, pipelineRun, gentraceConfig, }: {
client: OpenAI;
pipelineRun?: PipelineRun;
gentraceConfig: GentraceConfiguration;
});
createInner(body: GentraceChatCompletionCreateParams, requestOptions?: RequestOptions): Promise<GentraceChatCompletion | (GentraceStream<ChatCompletionChunk> & {
pipelineRunId?: string;
})>;
}
export interface GentraceCompletionCreateParams extends Omit<CompletionCreateParams, "prompt"> {
prompt?: string | Array<string> | Array<number> | Array<Array<number>> | null;
promptTemplate?: string;
promptInputs: Record<string, string>;
pipelineSlug?: string;
gentrace?: Context;
}
export interface GentraceCompletionCreateParamsStreaming extends GentraceCompletionCreateParams {
stream: true;
}
export interface GentraceCompletionCreateParamsNonStreaming extends GentraceCompletionCreateParams {
stream?: false | null;
}
export type GentraceCompletion = Completion & {
pipelineRunId?: string;
};
export declare class GentraceCompletions extends OpenAI.Completions {
private pipelineRun?;
private gentraceConfig;
constructor({ client, pipelineRun, gentraceConfig, }: {
client: OpenAI;
pipelineRun?: PipelineRun;
gentraceConfig: GentraceConfiguration;
});
createInner(body: GentraceCompletionCreateParams, requestOptions?: RequestOptions): Promise<GentraceCompletion | (GentraceStream<Completion> & {
pipelineRunId?: string;
})>;
}
export interface GentraceChatCompletionParseParams<Params extends ChatCompletionParseParams> {
messages: Array<ChatCompletionRequestMessageTemplate>;
pipelineSlug?: string;
gentrace?: Context;
}
export declare class GentraceBetaChatCompletions extends Completions {
private pipelineRun?;
private gentraceConfig;
constructor({ client, pipelineRun, gentraceConfig, }: {
client: OpenAI;
pipelineRun?: PipelineRun;
gentraceConfig: GentraceConfiguration;
});
parseInner<Params extends ChatCompletionParseParams, ParsedT = ExtractParsedContentFromParams<Params>>(body: GentraceChatCompletionCreateParams, options?: RequestOptions): Promise<ParsedChatCompletion<ParsedT>>;
}
export type GentraceClientOptions = ClientOptions & {
gentraceApiKey?: string;
gentraceBasePath?: string;
gentraceLogger?: {
info: (message: string, context?: any) => void;
warn: (message: string | Error, context?: any) => void;
};
};
export declare class OpenAIPipelineHandler extends OpenAI {
protected config: GentraceClientOptions;
protected pipelineRun?: PipelineRun;
protected gentraceConfig: GentraceConfiguration;
constructor({ pipelineRun, gentraceConfig, ...config }: ClientOptions & OpenAIPipelineHandlerOptions);
}
export declare class OpenAICreateChatCompletionStepRun extends StepRun {
modelParams: Omit<Chat.CompletionCreateParams, "messages" | "user">;
inputs: {
messages?: Array<Chat.CreateChatCompletionRequestMessage>;
user?: string;
};
response: OpenAI.Chat.Completions.ChatCompletion | Stream<OpenAI.Chat.Completions.ChatCompletionChunk>;
constructor(elapsedTime: number, startTime: string, endTime: string, inputs: {
messages?: Array<Chat.CreateChatCompletionRequestMessage>;
tools?: Array<ChatCompletionTool>;
user?: string;
contentInputs?: Record<string, string>[];
}, modelParams: Omit<Chat.CompletionCreateParams, "messages" | "user"> & {
contentTemplates?: string[];
}, response: OpenAI.Chat.Completions.ChatCompletion | GentraceStream<OpenAI.Chat.Completions.ChatCompletionChunk>, context: Context);
}
export declare class OpenAICreateCompletionStepRun extends StepRun {
inputs: {
prompt?: Record<string, string>;
user?: string;
suffix?: string;
};
modelParams: Omit<CompletionCreateParams, "prompt" | "user" | "suffix"> & {
promptTemplate: string;
};
response: Completion | Stream<Completion>;
constructor(elapsedTime: number, startTime: string, endTime: string, inputs: {
prompt?: Record<string, string> | string | any[];
user?: string;
suffix?: string;
}, modelParams: Omit<CompletionCreateParams, "prompt" | "user" | "suffix"> & {
promptTemplate: string;
}, response: Completion | GentraceStream<Completion>, context: Context);
}
export declare class OpenAICreateEmbeddingStepRun extends StepRun {
inputs: Omit<EmbeddingCreateParams, "model">;
modelParams: Omit<EmbeddingCreateParams, "input" | "user">;
response: CreateEmbeddingResponse;
constructor(elapsedTime: number, startTime: string, endTime: string, inputs: Omit<EmbeddingCreateParams, "model">, modelParams: Omit<EmbeddingCreateParams, "input" | "user">, response: CreateEmbeddingResponse, context: Context);
}
export declare class OpenAICreateModerationStepRun extends StepRun {
inputs: Omit<ModerationCreateParams, "model">;
modelParams: Omit<ModerationCreateParams, "input" | "user">;
response: ModerationCreateResponse;
constructor(elapsedTime: number, startTime: string, endTime: string, inputs: Omit<ModerationCreateParams, "model">, modelParams: Omit<ModerationCreateParams, "input" | "user">, response: ModerationCreateResponse, context: Context);
}
export {};