UNPKG

@huggingface/inference

Version:

Typescript client for the Hugging Face Inference Providers and Inference Endpoints

28 lines 1.18 kB
import type { getProviderHelper } from "../lib/getProviderHelper"; import type { InferenceTask, Options, RequestArgs } from "../types"; export interface ResponseWrapper<T> { data: T; requestContext: { url: string; info: RequestInit; }; } /** * Primitive to make custom calls to the inference provider */ export declare function innerRequest<T>(args: RequestArgs, providerHelper: ReturnType<typeof getProviderHelper>, options?: Options & { /** In most cases (unless we pass a endpointUrl) we know the task */ task?: InferenceTask; /** Is chat completion compatible */ chatCompletion?: boolean; }): Promise<ResponseWrapper<T>>; /** * Primitive to make custom inference calls that expect server-sent events, and returns the response through a generator */ export declare function innerStreamingRequest<T>(args: RequestArgs, providerHelper: ReturnType<typeof getProviderHelper>, options?: Options & { /** In most cases (unless we pass a endpointUrl) we know the task */ task?: InferenceTask; /** Is chat completion compatible */ chatCompletion?: boolean; }): AsyncGenerator<T>; //# sourceMappingURL=request.d.ts.map