@ai-sdk/openai
Version:
The **[OpenAI provider](https://ai-sdk.dev/providers/ai-sdk-providers/openai)** for the [AI SDK](https://ai-sdk.dev/docs) contains language model support for the OpenAI chat and completion APIs and embedding model support for the OpenAI embeddings API.
96 lines (87 loc) • 2.6 kB
text/typescript
import {
EmbeddingModelV3,
TooManyEmbeddingValuesForCallError,
} from '@ai-sdk/provider';
import {
combineHeaders,
createJsonResponseHandler,
parseProviderOptions,
postJsonToApi,
} from '@ai-sdk/provider-utils';
import { OpenAIConfig } from '../openai-config';
import { openaiFailedResponseHandler } from '../openai-error';
import {
OpenAIEmbeddingModelId,
openaiEmbeddingModelOptions,
} from './openai-embedding-options';
import { openaiTextEmbeddingResponseSchema } from './openai-embedding-api';
export class OpenAIEmbeddingModel implements EmbeddingModelV3 {
readonly specificationVersion = 'v3';
readonly modelId: OpenAIEmbeddingModelId;
readonly maxEmbeddingsPerCall = 2048;
readonly supportsParallelCalls = true;
private readonly config: OpenAIConfig;
get provider(): string {
return this.config.provider;
}
constructor(modelId: OpenAIEmbeddingModelId, config: OpenAIConfig) {
this.modelId = modelId;
this.config = config;
}
async doEmbed({
values,
headers,
abortSignal,
providerOptions,
}: Parameters<EmbeddingModelV3['doEmbed']>[0]): Promise<
Awaited<ReturnType<EmbeddingModelV3['doEmbed']>>
> {
if (values.length > this.maxEmbeddingsPerCall) {
throw new TooManyEmbeddingValuesForCallError({
provider: this.provider,
modelId: this.modelId,
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
values,
});
}
// Parse provider options
const openaiOptions =
(await parseProviderOptions({
provider: 'openai',
providerOptions,
schema: openaiEmbeddingModelOptions,
})) ?? {};
const {
responseHeaders,
value: response,
rawValue,
} = await postJsonToApi({
url: this.config.url({
path: '/embeddings',
modelId: this.modelId,
}),
headers: combineHeaders(this.config.headers(), headers),
body: {
model: this.modelId,
input: values,
encoding_format: 'float',
dimensions: openaiOptions.dimensions,
user: openaiOptions.user,
},
failedResponseHandler: openaiFailedResponseHandler,
successfulResponseHandler: createJsonResponseHandler(
openaiTextEmbeddingResponseSchema,
),
abortSignal,
fetch: this.config.fetch,
});
return {
warnings: [],
embeddings: response.data.map(item => item.embedding),
usage: response.usage
? { tokens: response.usage.prompt_tokens }
: undefined,
response: { headers: responseHeaders, body: rawValue },
};
}
}