@varlabs/ai.openai
Version:
AI sdk for interfacing with AI models
455 lines • 19.3 kB
JavaScript
import { defineProvider } from '@varlabs/ai/provider';
import fetch from '@varlabs/ai.utils/fetch.server';
import { handleStreamResponse } from '@varlabs/ai/utils/streaming';
1;
export const customTool = (tool) => {
return tool;
};
/**
* Transforms a StructuredSchema to an OpenAiStructuredSchema efficiently
* Moving individual 'required' flags to a single 'required' array on objects
*/
const transformToOpenAiSchema = (schema) => {
// Handle the root object
const result = {
type: 'object',
properties: {},
additionalProperties: false,
required: [],
description: schema.description,
};
// Process all properties
for (const propName in schema.properties) {
const property = schema.properties[propName];
result.properties[propName] = transformProperty(property);
if (property.required === false) {
result.properties[propName].type = [result.properties[propName].type, 'null']; // Allow null if not required
}
result.required.push(propName);
}
return result;
};
/**
* Helper function to transform individual property schemas
*/
function transformProperty(property) {
// Copy basic properties but exclude 'required' flag
const { required, ...baseProperty } = property;
// Handle primitive types directly
if (property.type === 'string' || property.type === 'number' || property.type === 'boolean') {
return baseProperty;
}
// Handle arrays
if (property.type === 'array') {
return {
...baseProperty,
items: property.items.map(item => transformProperty(item))
};
}
// Handle objects
if (property.type === 'object') {
const result = {
type: 'object',
description: property.description,
properties: {},
additionalProperties: false,
required: []
};
// Process nested properties
for (const propName in property.properties) {
const nestedProp = property.properties[propName];
result.properties[propName] = transformProperty(nestedProp);
if (property.required === false) {
result.properties[propName].type = [result.properties[propName].type, 'null']; // Allow null if not required
}
result.required.push(propName);
}
return result;
}
// Default case, shouldn't get here if types are properly constrained
return baseProperty;
}
const openAiProvider = defineProvider({
name: 'OpenAI',
context: {
config: {
apiKey: '',
baseUrl: 'https://api.openai.com/v1',
}
},
models: {
embedding: {
embed: async (input, ctx) => {
const { input: { model, text }, config } = input;
const response = await fetch(`${ctx.config.baseUrl}/embeddings`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model,
input: text,
}),
MAX_FETCH_TIME: config?.fetchTimeout,
});
return response;
}
},
text: {
create_response: async (inputArgs, ctx) => {
const { input, config } = inputArgs;
const bodyToolsArray = [];
if (input.built_in_tools) {
for (const tool of input.built_in_tools) {
bodyToolsArray.push(tool);
}
}
if (input.custom_tools) {
for (const [name, tool] of Object.entries(input.custom_tools)) {
const functionTool = {
type: 'function',
name,
strict: tool.strict ?? false,
description: tool.description,
parameters: tool.parameters,
};
bodyToolsArray.push(functionTool);
}
}
const requestBody = {
model: input.model,
instructions: input.instructions,
input: input.input,
stream: false,
reasoning: input.reasoning,
max_output_tokens: input.max_output_tokens,
metadata: input.metadata,
truncation: input.truncation,
user: input.user,
previous_response_id: input.previous_response_id,
store: input.store,
parallel_tool_calls: input.parallel_tool_calls,
tool_choice: input.tool_choice,
temperature: input.temperature,
top_p: input.top_p,
tools: bodyToolsArray,
text: input.structured_output ? {
format: {
type: 'json_schema',
name: input.structured_output.name,
strict: input.structured_output.strict,
schema: transformToOpenAiSchema(input.structured_output.schema),
}
} : undefined,
};
const response = await fetch(`${ctx.config.baseUrl}/responses`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
MAX_FETCH_TIME: config?.fetchTimeout,
});
for (const item of response.output) {
if (item.type === 'function_call') {
const tool = input.custom_tools?.[item.name];
if (tool) {
const args = JSON.parse(item.arguments);
const toolResponse = await tool.execute(args);
item.result = toolResponse;
}
}
if (item.type === 'message') {
for (const content of item.content) {
if (content.type === 'output_text') {
try {
const result = JSON.parse(content.text);
content.structured_output = result;
}
catch {
// If parsing fails, we can just leave it as is
}
}
}
}
}
return response;
},
stream_response: async (inputArgs, ctx) => {
const { input, config } = inputArgs;
const bodyToolsArray = [];
if (input.built_in_tools) {
for (const tool of input.built_in_tools) {
bodyToolsArray.push(tool);
}
}
if (input.custom_tools) {
for (const [name, tool] of Object.entries(input.custom_tools)) {
const functionTool = {
type: 'function',
name,
strict: tool.strict ?? false,
description: tool.description,
parameters: tool.parameters,
};
bodyToolsArray.push(functionTool);
}
}
const requestBody = {
model: input.model,
instructions: input.instructions,
input: input.input,
stream: true,
reasoning: input.reasoning,
max_output_tokens: input.max_output_tokens,
metadata: input.metadata,
truncation: input.truncation,
user: input.user,
previous_response_id: input.previous_response_id,
store: input.store,
parallel_tool_calls: input.parallel_tool_calls,
tool_choice: input.tool_choice,
temperature: input.temperature,
top_p: input.top_p,
tools: bodyToolsArray,
text: input.structured_output ? {
format: {
type: 'json_schema',
name: input.structured_output.name,
strict: input.structured_output.strict,
schema: transformToOpenAiSchema(input.structured_output.schema),
}
} : undefined,
};
const response = await fetch(`${ctx.config.baseUrl}/responses`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
MAX_FETCH_TIME: config?.fetchTimeout,
}, false);
return handleStreamResponse(response);
},
get_response: async (input, ctx) => {
const { input: { id }, config } = input;
const response = await fetch(`${ctx.config.baseUrl}/responses/${id}`, {
method: 'GET',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'application/json',
},
MAX_FETCH_TIME: config?.fetchTimeout,
});
return response;
},
delete_response: async (input, ctx) => {
const { input: { id }, config } = input;
const response = await fetch(`${ctx.config.baseUrl}/responses/${id}`, {
method: 'DELETE',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'application/json',
},
MAX_FETCH_TIME: config?.fetchTimeout,
});
return response;
},
list_input_item_list: async (input, ctx) => {
const { input: { id }, config } = input;
const response = await fetch(`${ctx.config.baseUrl}/responses/${id}/input_items`, {
method: 'GET',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'application/json',
},
MAX_FETCH_TIME: config?.fetchTimeout,
});
return response;
},
},
images: {
create: async (inputArgs, ctx) => {
const { input, config } = inputArgs;
const response = await fetch(`${ctx.config.baseUrl}/images/generations`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(input),
MAX_FETCH_TIME: config?.fetchTimeout,
});
return response;
},
edit: async (inputArgs, ctx) => {
const { input, config } = inputArgs;
const formData = new FormData();
if (Array.isArray(input.image)) {
for (const image of input.image) {
// @ts-ignore
formData.append('image', image);
}
}
else {
// @ts-ignore
formData.append('image', input.image);
}
if (input.mask) {
// @ts-ignore
formData.append('mask', input.mask);
}
formData.append('prompt', input.prompt);
if (input.model) {
formData.append('model', input.model);
}
if (input.n) {
formData.append('n', input.n.toString());
}
if (input.quality) {
formData.append('quality', input.quality);
}
if (input.response_format) {
formData.append('response_format', input.response_format);
}
if (input.size) {
formData.append('size', input.size);
}
const response = await fetch(`${ctx.config.baseUrl}/images/edits`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
},
body: formData,
MAX_FETCH_TIME: config?.fetchTimeout,
});
return response;
},
generate_variations: async (inputArgs, ctx) => {
const { input, config } = inputArgs;
const formData = new FormData();
// @ts-ignore
formData.append('image', input.image);
formData.append('model', 'dall-e-2');
if (input.n) {
formData.append('n', input.n.toString());
}
if (input.repsonse_format) {
formData.append('response_format', input.repsonse_format);
}
if (input.size) {
formData.append('size', input.size);
}
const response = await fetch(`${ctx.config.baseUrl}/images/variations`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
},
body: formData,
MAX_FETCH_TIME: config?.fetchTimeout,
});
return response;
},
},
speech: {
generate_audio: async (inputArgs, ctx) => {
const { input, config } = inputArgs;
const contentTypeMap = {
'mp3': 'audio/mpeg',
'opus': 'audio/ogg',
'aac': 'audio/mp4',
'wav': 'audio/wav',
'pcm': 'audio/pcm'
};
const response = await fetch(`${ctx.config.baseUrl}/audio/generations`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(input),
MAX_FETCH_TIME: config?.fetchTimeout,
}, false);
const blob = await response.blob();
return {
blob,
contentType: contentTypeMap[input.response_format ?? 'mp3'],
};
},
transcribe_audio: async (inputArgs, ctx) => {
const { input, config } = inputArgs;
const formData = new FormData();
// @ts-ignore
formData.append('file', input.file);
formData.append('model', input.model);
if (input.chunking_strategy) {
formData.append('chunking_strategy', JSON.stringify(input.chunking_strategy));
}
if (input.language) {
formData.append('language', input.language);
}
if (input.prompt) {
formData.append('prompt', input.prompt);
}
if (input.response_format) {
formData.append('response_format', input.response_format);
}
if (input.stream) {
formData.append('stream', input.stream.toString());
}
if (input.temperature) {
formData.append('temperature', input.temperature.toString());
}
if (input.include) {
formData.append('include', JSON.stringify(input.include));
}
const fetchResponseFormat = (input.response_format === 'json' || input.response_format === 'verbose_json');
const response = await fetch(`${ctx.config.baseUrl}/audio/transcriptions`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'multipart/form-data',
},
body: formData,
MAX_FETCH_TIME: config?.fetchTimeout,
}, fetchResponseFormat);
if (fetchResponseFormat) {
return input.response_format === 'json' ? response : response;
}
const blob = await response.blob();
return blob;
},
translate_audio: async (inputArgs, ctx) => {
const { input, config } = inputArgs;
const formData = new FormData();
// @ts-ignore
formData.append('file', input.file);
formData.append('model', input.model);
if (input.prompt) {
formData.append('prompt', input.prompt);
}
if (input.response_format) {
formData.append('response_format', input.response_format);
}
if (input.temperature) {
formData.append('temperature', input.temperature.toString());
}
const response = await fetch(`${ctx.config.baseUrl}/audio/translations`, {
method: 'POST',
headers: {
Authorization: `Bearer ${ctx.config.apiKey}`,
'Content-Type': 'multipart/form-data',
},
body: formData,
MAX_FETCH_TIME: config?.fetchTimeout,
});
return response;
},
},
},
});
export default openAiProvider;
//# sourceMappingURL=index.js.map