UNPKG

ai-utils.js

Version:

Build AI applications, chatbots, and agents with JavaScript and TypeScript.

76 lines (75 loc) 3.4 kB
import z from "zod"; import { AbstractModel } from "../../model-function/AbstractModel.js"; import { FunctionOptions } from "../../model-function/FunctionOptions.js"; import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js"; import { RetryFunction } from "../../util/api/RetryFunction.js"; import { ThrottleFunction } from "../../util/api/ThrottleFunction.js"; import { PromptMapping } from "../../prompt/PromptMapping.js"; import { PromptMappingTextGenerationModel } from "../../prompt/PromptMappingTextGenerationModel.js"; export interface HuggingFaceTextGenerationModelSettings extends TextGenerationModelSettings { model: string; baseUrl?: string; apiKey?: string; retry?: RetryFunction; throttle?: ThrottleFunction; topK?: number; topP?: number; temperature?: number; repetitionPenalty?: number; maxNewTokens?: number; maxTime?: number; numReturnSequences?: number; doSample?: boolean; options?: { useCache?: boolean; waitForModel?: boolean; }; } /** * Create a text generation model that calls a Hugging Face Inference API Text Generation Task. * * @see https://huggingface.co/docs/api-inference/detailed_parameters#text-generation-task * * @example * const model = new HuggingFaceTextGenerationModel({ * model: "tiiuae/falcon-7b", * temperature: 0.7, * maxTokens: 500, * retry: retryWithExponentialBackoff({ maxTries: 5 }), * }); * * const { text } = await generateText( * model, * "Write a short story about a robot learning to love:\n\n" * ); */ export declare class HuggingFaceTextGenerationModel extends AbstractModel<HuggingFaceTextGenerationModelSettings> implements TextGenerationModel<string, HuggingFaceTextGenerationResponse, undefined, HuggingFaceTextGenerationModelSettings> { constructor(settings: HuggingFaceTextGenerationModelSettings); readonly provider = "huggingface"; get modelName(): string; readonly contextWindowSize: undefined; readonly tokenizer: undefined; private get apiKey(); callAPI(prompt: string, options?: FunctionOptions<HuggingFaceTextGenerationModelSettings>): Promise<HuggingFaceTextGenerationResponse>; readonly countPromptTokens: undefined; generateTextResponse(prompt: string, options?: FunctionOptions<HuggingFaceTextGenerationModelSettings>): Promise<{ generated_text: string; }[]>; extractText(response: HuggingFaceTextGenerationResponse): string; generateDeltaStreamResponse: undefined; extractTextDelta: undefined; mapPrompt<INPUT_PROMPT>(promptMapping: PromptMapping<INPUT_PROMPT, string>): PromptMappingTextGenerationModel<INPUT_PROMPT, string, HuggingFaceTextGenerationResponse, undefined, HuggingFaceTextGenerationModelSettings, this>; withSettings(additionalSettings: Partial<HuggingFaceTextGenerationModelSettings>): this; get maxCompletionTokens(): number | undefined; withMaxCompletionTokens(maxCompletionTokens: number): this; withStopTokens(): this; } declare const huggingFaceTextGenerationResponseSchema: z.ZodArray<z.ZodObject<{ generated_text: z.ZodString; }, "strip", z.ZodTypeAny, { generated_text: string; }, { generated_text: string; }>, "many">; export type HuggingFaceTextGenerationResponse = z.infer<typeof huggingFaceTextGenerationResponseSchema>; export {};