@promptbook/azure-openai
Version:
Promptbook: Run AI apps in plain human language across multiple models and platforms
75 lines (74 loc) • 3.24 kB
TypeScript
import type { AvailableModel } from '../../execution/AvailableModel';
import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
import type { Usage } from '../../execution/Usage';
import type { string_markdown } from '../../types/typeAliases';
import type { string_markdown_text } from '../../types/typeAliases';
import type { string_model_name } from '../../types/typeAliases';
import type { string_title } from '../../types/typeAliases';
import { RemoteLlmExecutionTools } from '../remote/RemoteLlmExecutionTools';
import { computeOpenAiUsage } from './computeOpenAiUsage';
import { OpenAiCompatibleExecutionTools } from './OpenAiCompatibleExecutionTools';
import type { OpenAiCompatibleExecutionToolsNonProxiedOptions } from './OpenAiCompatibleExecutionToolsOptions';
import type { OpenAiCompatibleExecutionToolsOptions } from './OpenAiCompatibleExecutionToolsOptions';
/**
* Execution Tools for calling OpenAI compatible API
*
* Note: This can be used for any OpenAI compatible APIs
*
* @public exported from `@promptbook/openai`
*/
export declare const createOpenAiCompatibleExecutionTools: ((options: OpenAiCompatibleExecutionToolsOptions & {
/**
* The model name to use for all operations
*
* This will be the only model available through this LLM provider and it will be a chat model.
* Other variants won't be available for now.
*/
defaultModelName: string_model_name;
}) => OpenAiCompatibleExecutionTools | RemoteLlmExecutionTools) & {
packageName: string;
className: string;
};
/**
* Execution Tools for calling ONE SPECIFIC PRECONFIGURED OpenAI compatible provider
*
* @private for `createOpenAiCompatibleExecutionTools`
*/
export declare class HardcodedOpenAiCompatibleExecutionTools extends OpenAiCompatibleExecutionTools implements LlmExecutionTools {
private readonly defaultModelName;
protected readonly options: OpenAiCompatibleExecutionToolsNonProxiedOptions;
/**
* Creates OpenAI compatible Execution Tools.
*
* @param options which are relevant are directly passed to the OpenAI compatible client
*/
constructor(defaultModelName: string_model_name, options: OpenAiCompatibleExecutionToolsNonProxiedOptions);
get title(): string_title & string_markdown_text;
get description(): string_markdown;
/**
* List all available models (non dynamically)
*
* Note: Purpose of this is to provide more information about models than standard listing from API
*/
protected get HARDCODED_MODELS(): ReadonlyArray<AvailableModel>;
/**
* Computes the usage
*/
protected computeUsage(...args: Parameters<typeof computeOpenAiUsage>): Usage;
/**
* Default model for chat variant.
*/
protected getDefaultChatModel(): AvailableModel;
/**
* Default model for completion variant.
*/
protected getDefaultCompletionModel(): AvailableModel;
/**
* Default model for completion variant.
*/
protected getDefaultEmbeddingModel(): AvailableModel;
}
/**
* TODO: [🦺] Is there some way how to put `packageName` and `className` on top and function definition on bottom?
* TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
*/