@302ai/ai-sdk
Version:
The **[302AI provider](https://sdk.vercel.ai/providers/ai-sdk-providers/)** for the [AI SDK](https://sdk.vercel.ai/docs) contains image model support for the [302AI](https://302.ai) platform.
1,610 lines (1,591 loc) • 234 kB
JavaScript
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/index.ts
var index_exports = {};
__export(index_exports, {
AI302EmbeddingModel: () => AI302EmbeddingModel,
AI302LanguageModel: () => AI302LanguageModel,
AI302RerankingModel: () => AI302RerankingModel,
AI302SpeechModel: () => AI302SpeechModel,
AI302TranscriptionModel: () => AI302TranscriptionModel,
VERSION: () => VERSION,
ai302: () => ai302,
createAI302: () => createAI302
});
module.exports = __toCommonJS(index_exports);
// src/ai302-provider.ts
var import_provider_utils47 = require("@ai-sdk/provider-utils");
// src/models/auraflow.ts
var import_provider_utils2 = require("@ai-sdk/provider-utils");
// src/utils/api-handlers.ts
var import_provider = require("@ai-sdk/provider");
var import_provider_utils = require("@ai-sdk/provider-utils");
var createJsonResponseHandler = () => async ({ response, url, requestBodyValues }) => {
const responseHeaders = (0, import_provider_utils.extractResponseHeaders)(response);
const responseBody = await response.json();
return {
responseHeaders,
value: responseBody
};
};
var statusCodeErrorResponseHandler = async ({ response, url, requestBodyValues }) => {
const responseHeaders = (0, import_provider_utils.extractResponseHeaders)(response);
const responseBody = await response.text();
return {
responseHeaders,
value: new import_provider.APICallError({
message: response.statusText,
url,
requestBodyValues,
statusCode: response.status,
responseHeaders,
responseBody
})
};
};
// src/models/base-model.ts
var BaseModelHandler = class {
constructor(modelId, settings, config) {
this.modelId = modelId;
this.settings = settings;
this.config = config;
}
async handleRequest(params) {
const { headers, ...rest } = params;
const requestHeaders = headers ? Object.fromEntries(
Object.entries(headers).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
) : void 0;
return this.processRequest({
...rest,
headers: requestHeaders
});
}
parseSize(size) {
if (!size) return void 0;
const [width, height] = size.split("x").map(Number);
return { width, height };
}
validateAspectRatio(aspectRatio, warnings, maxRatio, minRatio) {
if (!aspectRatio) return void 0;
const [width, height] = aspectRatio.split(":").map(Number);
if (!width || !height) return void 0;
if (maxRatio === void 0 || minRatio === void 0) {
return aspectRatio;
}
const ratio = width / height;
if (ratio >= minRatio && ratio <= maxRatio) {
return aspectRatio;
}
let adjustedWidth;
let adjustedHeight;
if (ratio > maxRatio) {
adjustedHeight = 9;
adjustedWidth = Math.round(maxRatio * adjustedHeight);
} else {
adjustedWidth = 9;
adjustedHeight = Math.round(adjustedWidth / minRatio);
}
warnings.push({
type: "unsupported",
feature: "aspectRatio",
details: `Aspect ratio ${aspectRatio} is outside the allowed range (${adjustedWidth}:${adjustedHeight} to ${adjustedHeight}:${adjustedWidth}). Adjusted to ${adjustedWidth}:${adjustedHeight}`
});
return `${adjustedWidth}:${adjustedHeight}`;
}
aspectRatioToSize(aspectRatio, baseSize = 1024, warnings) {
if (!aspectRatio) return void 0;
const validatedAspectRatio = this.validateAspectRatio(
aspectRatio,
warnings
);
if (!validatedAspectRatio) return void 0;
const [width, height] = validatedAspectRatio.split(":").map(Number);
if (!width || !height) return void 0;
const ratio = width / height;
if (ratio > 1) {
return { width: baseSize, height: Math.round(baseSize / ratio) };
} else {
return { width: Math.round(baseSize * ratio), height: baseSize };
}
}
async downloadImage(url) {
const maxRetries = 5;
const timeout = 12e4;
for (let attempt = 0; attempt < maxRetries; attempt++) {
const controller = new AbortController();
try {
const timeoutId = setTimeout(() => controller.abort(), timeout);
const imageResponse = await fetch(url, {
signal: controller.signal,
headers: {
Accept: "image/*",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
}
});
clearTimeout(timeoutId);
if (!imageResponse.ok) {
throw new Error(`HTTP error! status: ${imageResponse.status}`);
}
const arrayBuffer = await imageResponse.arrayBuffer();
const base64 = Buffer.from(arrayBuffer).toString("base64");
return base64;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
if (attempt === maxRetries - 1) {
throw new Error(
`Failed to download image after ${maxRetries} attempts: ${errorMessage}`
);
}
const delay = Math.min(
2 ** attempt * 2e3 + Math.random() * 1e3,
3e4
);
await new Promise((resolve44) => setTimeout(resolve44, delay));
controller.abort();
}
}
throw new Error("Failed to download image after retries");
}
async downloadImages(urls) {
const imagePromises = urls.map(async (url) => {
try {
return await this.downloadImage(url);
} catch (error) {
return null;
}
});
const base64Images = await Promise.all(imagePromises);
const validImages = base64Images.filter(Boolean);
if (validImages.length === 0) {
throw new Error("All image downloads failed");
}
return validImages;
}
validateSizeOption(parsedSize, supportedSizes, warnings) {
const validatedSize = this.validateDimensionsMultipleOf32(
parsedSize,
warnings
);
const sizeStr = `${validatedSize.width}x${validatedSize.height}`;
if (!supportedSizes.includes(sizeStr)) {
const closestSize = this.findClosestSize(validatedSize, supportedSizes);
warnings.push({
type: "unsupported",
feature: "size",
details: `Size ${sizeStr} is not supported. Using closest supported size: ${closestSize}`
});
const [width, height] = closestSize.split("x").map(Number);
return { width, height };
}
return validatedSize;
}
validateDimensionsMultipleOf32(size, warnings, minSize = 32, maxSize = 4096) {
const adjustDimension = (value) => {
if (value < minSize) {
return minSize;
}
if (value > maxSize) {
return maxSize;
}
if (value % 32 !== 0) {
const roundedValue = Math.round(value / 32) * 32;
return Math.min(maxSize, Math.max(minSize, roundedValue));
}
return value;
};
const adjustedWidth = adjustDimension(size.width);
const adjustedHeight = adjustDimension(size.height);
if (adjustedWidth !== size.width || adjustedHeight !== size.height) {
warnings.push({
type: "unsupported",
feature: "size",
details: `Image dimensions must be multiples of 32 and within the range ${minSize}-${maxSize}. Adjusted from ${size.width}x${size.height} to ${adjustedWidth}x${adjustedHeight}`
});
return { width: adjustedWidth, height: adjustedHeight };
}
return size;
}
findClosestSize(size, supportedSizes) {
const targetRatio = size.width / size.height;
const sizesByRatio = supportedSizes.slice().sort((a, b) => {
const [w1, h1] = a.split("x").map(Number);
const [w2, h2] = b.split("x").map(Number);
const ratio1 = w1 / h1;
const ratio2 = w2 / h2;
const diff1 = Math.abs(ratio1 - targetRatio);
const diff2 = Math.abs(ratio2 - targetRatio);
return diff1 - diff2;
});
const similarRatioSizes = sizesByRatio.slice(0, 2);
return similarRatioSizes.reduce((closest, current) => {
const [w1, h1] = current.split("x").map(Number);
const [w2, h2] = closest.split("x").map(Number);
const diff1 = Math.abs(Math.max(w1, h1) - 1024);
const diff2 = Math.abs(Math.max(w2, h2) - 1024);
return diff1 < diff2 ? current : closest;
});
}
findClosestAspectRatio(targetRatio, supportedRatios, warnings) {
if (!targetRatio) return supportedRatios[0];
const [targetWidth, targetHeight] = targetRatio.split(":").map(Number);
if (!targetWidth || !targetHeight) return supportedRatios[0];
const targetValue = targetWidth / targetHeight;
let closestRatio = supportedRatios[0];
let minDiff = Infinity;
for (const ratio of supportedRatios) {
const [w, h] = ratio.split(":").map(Number);
if (!w || !h) continue;
const currentValue = w / h;
const diff = Math.abs(currentValue - targetValue);
if (diff < minDiff) {
minDiff = diff;
closestRatio = ratio;
}
}
if (closestRatio !== targetRatio) {
warnings.push({
type: "unsupported",
feature: "aspectRatio",
details: `Aspect ratio ${targetRatio} is not supported. Using closest supported ratio: ${closestRatio}`
});
}
return closestRatio;
}
sizeToAspectRatio(size, supportedRatios, warnings) {
if (!size) return void 0;
const parsedSize = this.parseSize(size);
if (!parsedSize) {
warnings.push({
type: "unsupported",
feature: "size",
details: `Invalid size format: ${size}. Expected format: WIDTHxHEIGHT`
});
return void 0;
}
const ratio = parsedSize.width / parsedSize.height;
let closestRatio = supportedRatios[0];
let minDiff = Infinity;
for (const aspectRatio of supportedRatios) {
const [w, h] = aspectRatio.split(":").map(Number);
if (!w || !h) continue;
const currentRatio = w / h;
const diff = Math.abs(currentRatio - ratio);
if (diff < minDiff) {
minDiff = diff;
closestRatio = aspectRatio;
}
}
const [closestW, closestH] = closestRatio.split(":").map(Number);
const closestRatioValue = closestW / closestH;
if (Math.abs(closestRatioValue - ratio) > 0.05) {
warnings.push({
type: "compatibility",
feature: "size",
details: `Size ${size} (ratio ${ratio.toFixed(2)}) converted to closest supported aspect ratio: ${closestRatio}`
});
}
return closestRatio;
}
};
// src/models/auraflow.ts
var AuraflowHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported",
feature: "n",
details: "AuraFlow does not support batch generation"
});
}
if (size != null) {
warnings.push({ type: "unsupported", feature: "size" });
}
if (aspectRatio != null) {
warnings.push({ type: "unsupported", feature: "aspectRatio" });
}
if (seed != null) {
warnings.push({ type: "unsupported", feature: "seed" });
}
if (providerOptions.ai302 != null) {
warnings.push({
type: "unsupported",
feature: "providerOptions"
});
}
if (!prompt) {
throw new Error("Prompt is required for AuraFlow");
}
const formData = new FormData();
formData.append("prompt", prompt);
const resolvedHeaders = await (0, import_provider_utils2.resolve)(this.config.headers());
const { value: response, responseHeaders } = await (0, import_provider_utils2.postToApi)({
url: this.config.url({
modelId: this.modelId,
path: "/302/submit/aura-flow"
}),
headers: (0, import_provider_utils2.combineHeaders)(resolvedHeaders, headers),
body: {
content: formData,
values: { prompt }
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/bagel.ts
var import_provider_utils3 = require("@ai-sdk/provider-utils");
var BagelHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a, _b, _c;
const warnings = [];
if (n != null && n > 1) {
warnings.push({ type: "unsupported", feature: "n", details: "Bagel does not support batch generation" });
}
if (size != null) {
warnings.push({ type: "unsupported", feature: "size", details: "Bagel does not support custom size" });
}
if (aspectRatio != null) {
warnings.push({ type: "unsupported", feature: "aspectRatio", details: "Bagel does not support custom aspect ratio" });
}
if (seed != null) {
warnings.push({ type: "unsupported", feature: "seed", details: "Bagel does not support custom seed" });
}
const resolvedHeaders = await (0, import_provider_utils3.resolve)(this.config.headers());
const { value: response, responseHeaders } = await (0, import_provider_utils3.postJsonToApi)({
url: this.config.url({ modelId: this.modelId, path: "/302/submit/bagel" }),
headers: (0, import_provider_utils3.combineHeaders)(resolvedHeaders, headers),
body: {
prompt,
use_thought: (_b = (_a = providerOptions.ai302) == null ? void 0 : _a.use_thought) != null ? _b : false,
...(_c = providerOptions.ai302) != null ? _c : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/cogview.ts
var import_provider_utils4 = require("@ai-sdk/provider-utils");
var SUPPORTED_SIZE_OPTIONS = [
"1024x1024",
"768x1344",
"864x1152",
"1344x768",
"1152x864",
"1440x720",
"720x1440"
];
var CogViewHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported",
feature: "n",
details: "CogView-4 does not support batch generation"
});
}
if (seed != null) {
warnings.push({
type: "unsupported",
feature: "seed",
details: "CogView-4 does not support seed parameter"
});
}
let sizeString;
if (size) {
const parsedSize = this.parseSize(size);
if (parsedSize) {
const validatedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS, warnings);
sizeString = `${validatedSize.width}x${validatedSize.height}`;
}
} else if (aspectRatio) {
const parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings);
if (parsedSize) {
const validatedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS, warnings);
sizeString = `${validatedSize.width}x${validatedSize.height}`;
}
}
const modelVariant = this.modelId === "cogview-4-250304" ? "cogview-4-250304" : "cogview-4";
const resolvedHeaders = await (0, import_provider_utils4.resolve)(this.config.headers());
const { value: response, responseHeaders } = await (0, import_provider_utils4.postJsonToApi)({
url: this.config.url({ modelId: this.modelId, path: "/bigmodel/api/paas/v4/images/generations" }),
headers: (0, import_provider_utils4.combineHeaders)(resolvedHeaders, headers),
body: {
model: modelVariant,
prompt,
size: sizeString,
...(_a = providerOptions.ai302) != null ? _a : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.data.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/dalle.ts
var import_provider_utils5 = require("@ai-sdk/provider-utils");
var SUPPORTED_SIZE_OPTIONS2 = ["256x256", "512x512", "1024x1024"];
var DallEHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({ type: "unsupported", feature: "n", details: "DALL-E 3 does not support batch generation" });
}
if (size != null && aspectRatio != null) {
warnings.push({ type: "unsupported", feature: "aspectRatio", details: "When size is provided, aspectRatio will be ignored" });
} else if (size == null && aspectRatio != null) {
warnings.push({ type: "compatibility", feature: "aspectRatio", details: "Using size calculated from aspect ratio with base size 1024" });
}
if (seed != null) {
warnings.push({ type: "unsupported", feature: "seed" });
}
let parsedSize = this.parseSize(size) || this.aspectRatioToSize(aspectRatio, 1024, warnings) || {
width: 1024,
height: 1024
};
parsedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS2, warnings);
const resolvedHeaders = await (0, import_provider_utils5.resolve)(this.config.headers());
const { value: response, responseHeaders } = await (0, import_provider_utils5.postJsonToApi)({
url: this.config.url({ modelId: this.modelId, path: "/v1/images/generations" }),
headers: (0, import_provider_utils5.combineHeaders)(resolvedHeaders, headers),
body: {
prompt,
model: "dall-e-3",
size: `${parsedSize.width}x${parsedSize.height}`,
...(_a = providerOptions.ai302) != null ? _a : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.data.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/flux-pro-dev.ts
var import_provider_utils6 = require("@ai-sdk/provider-utils");
// src/ai302-image-settings.ts
var modelToBackendConfig = {
"flux-v1.1-ultra": {
supportsSize: false
},
"flux-pro-v1.1": {
supportsSize: true
},
"flux-pro": {
supportsSize: true
},
"flux-dev": {
supportsSize: true
},
"flux-schnell": {
supportsSize: true
},
"flux-1-krea": {
supportsSize: true
},
"flux-kontext-max": {
supportsSize: true
},
"flux-kontext-pro": {
supportsSize: true
},
"flux-2-pro": {
supportsSize: true
},
"flux-2-flex": {
supportsSize: true
},
"flux-2-max": {
supportsSize: true
},
"ideogram/V_1": {
supportsSize: true
},
"ideogram/V_1_TURBO": {
supportsSize: true
},
"ideogram/V_2": {
supportsSize: true
},
"ideogram/V_2_TURBO": {
supportsSize: true
},
"ideogram/V_2A": {
supportsSize: true
},
"ideogram/V_2A_TURBO": {
supportsSize: true
},
"dall-e-3": {
supportsSize: true
},
recraftv3: {
supportsSize: true
},
recraftv2: {
supportsSize: true
},
"sdxl-lightning": {
supportsSize: true
},
"sdxl-lightning-v2": {
supportsSize: true
},
"sdxl-lightning-v3": {
supportsSize: true
},
kolors: {
supportsSize: true
},
"aura-flow": {
supportsSize: true
},
"luma-photon": {
supportsSize: true
},
sdxl: {
supportsSize: true
},
"sd3-ultra": {
supportsSize: false
},
sd3v2: {
supportsSize: true
},
"sd3.5-large": {
supportsSize: true
},
"sd3.5-large-turbo": {
supportsSize: true
},
"sd3.5-medium": {
supportsSize: true
},
"midjourney/6.0": {
supportsSize: false
},
"midjourney/6.1": {
supportsSize: false
},
"midjourney/7.0": {
supportsSize: false
},
"nijijourney/6.0": {
supportsSize: false
},
"google-imagen-3": {
supportsSize: true
},
"google-imagen-3-fast": {
supportsSize: true
},
"google-imagen-4-preview": {
supportsSize: false
},
"doubao-general-v2.1-l": {
supportsSize: true
},
"doubao-general-v2.0-l": {
supportsSize: true
},
"doubao-general-v2.0": {
supportsSize: true
},
"doubao-general-v3.0": {
supportsSize: true
},
"doubao-seedream-3-0-t2i-250415": {
supportsSize: true
},
"doubao-seedream-4-0-250828": {
supportsSize: true
},
"doubao-seedream-4-5-251128": {
supportsSize: true
},
"lumina-image-v2": {
supportsSize: true
},
"omnigen-v1": {
supportsSize: true
},
"playground-v25": {
supportsSize: true
},
"cogview-4": {
supportsSize: true
},
"cogview-4-250304": {
supportsSize: true
},
"minimaxi-image-01": {
supportsSize: false
},
"irag-1.0": {
supportsSize: false
},
"hidream-i1-full": {
supportsSize: true
},
"hidream-i1-dev": {
supportsSize: true
},
"hidream-i1-fast": {
supportsSize: true
},
"gpt-image-1": {
supportsSize: true
},
"gpt-image-1.5": {
supportsSize: true
},
bagel: {
supportsSize: false
},
soul: {
supportsSize: true
},
"kling-v1": {
supportsSize: true
},
"kling-v1-5": {
supportsSize: true
},
"kling-v2": {
supportsSize: true
},
"kling-v2-1": {
supportsSize: true
},
"qwen-image": {
supportsSize: false
},
"gemini-2.5-flash-image-preview": {
supportsSize: false
},
"gemini-3-pro-image-preview": {
supportsSize: false
},
"z-image-turbo": {
supportsSize: true
},
"vidu-viduq1": {
supportsSize: false
},
"vidu-viduq2": {
supportsSize: false
},
"kling-o1": {
supportsSize: false
},
"wan2.6-image": {
supportsSize: true
}
};
// src/models/flux-pro-dev.ts
var FluxProDevHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported",
feature: "n",
details: "Flux Pro does not support batch generation"
});
}
const backendConfig = modelToBackendConfig[this.modelId];
if (backendConfig == null ? void 0 : backendConfig.supportsSize) {
if (size != null && aspectRatio != null) {
warnings.push({
type: "unsupported",
feature: "aspectRatio",
details: "When size is provided, aspectRatio will be ignored"
});
} else if (size == null && aspectRatio != null) {
warnings.push({
type: "compatibility",
feature: "aspectRatio",
details: "Using size calculated from aspect ratio with base size 1024"
});
}
}
let parsedSize = this.parseSize(size) || this.aspectRatioToSize(aspectRatio, 1024, warnings) || {
width: 1024,
height: 1024
};
if (backendConfig == null ? void 0 : backendConfig.supportsSize) {
parsedSize = this.validateDimensionsMultipleOf32(
parsedSize,
warnings,
256,
1440
);
}
const resolvedHeaders = await (0, import_provider_utils6.resolve)(this.config.headers());
const { value: response, responseHeaders } = await (0, import_provider_utils6.postJsonToApi)({
url: this.config.url({
modelId: this.modelId,
path: `/302/submit/${this.modelId}`
}),
headers: (0, import_provider_utils6.combineHeaders)(resolvedHeaders, headers),
body: {
prompt,
image_size: (backendConfig == null ? void 0 : backendConfig.supportsSize) ? { width: parsedSize.width, height: parsedSize.height } : void 0,
size: (backendConfig == null ? void 0 : backendConfig.supportsSize) ? { width: parsedSize.width, height: parsedSize.height } : void 0,
aspect_ratio: !(backendConfig == null ? void 0 : backendConfig.supportsSize) ? aspectRatio : void 0,
seed,
...(_a = providerOptions.ai302) != null ? _a : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/flux-1-krea.ts
var import_provider_utils7 = require("@ai-sdk/provider-utils");
var FluxKreaHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a, _b, _c;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported",
feature: "n",
details: "Flux-1-Krea does not support batch generation"
});
}
const backendConfig = modelToBackendConfig[this.modelId];
if (backendConfig == null ? void 0 : backendConfig.supportsSize) {
if (size != null && aspectRatio != null) {
warnings.push({
type: "unsupported",
feature: "aspectRatio",
details: "When size is provided, aspectRatio will be ignored"
});
} else if (size == null && aspectRatio != null) {
warnings.push({
type: "compatibility",
feature: "aspectRatio",
details: "Using size calculated from aspect ratio with base size 1024"
});
}
}
let parsedSize = this.parseSize(size) || this.aspectRatioToSize(aspectRatio, 1024, warnings) || {
width: 1024,
height: 1024
};
if (backendConfig == null ? void 0 : backendConfig.supportsSize) {
parsedSize = this.validateDimensionsMultipleOf32(
parsedSize,
warnings,
256,
1440
);
}
const resolvedHeaders = await (0, import_provider_utils7.resolve)(this.config.headers());
const { value: response, responseHeaders } = await (0, import_provider_utils7.postJsonToApi)({
url: this.config.url({
modelId: this.modelId,
path: `/302/submit/${this.modelId}`
}),
headers: (0, import_provider_utils7.combineHeaders)(resolvedHeaders, headers),
body: {
prompt,
image_size: (backendConfig == null ? void 0 : backendConfig.supportsSize) ? { width: parsedSize.width, height: parsedSize.height } : void 0,
num_inference_steps: (_a = providerOptions.ai302) == null ? void 0 : _a.num_inference_steps,
guidance_scale: (_b = providerOptions.ai302) == null ? void 0 : _b.guidance_scale,
seed,
...(_c = providerOptions.ai302) != null ? _c : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/flux-kontext.ts
var import_provider_utils8 = require("@ai-sdk/provider-utils");
var POLL_INTERVAL = 2e3;
var MAX_POLL_TIME = 3e5;
var FluxKontextHandler = class extends BaseModelHandler {
getModelName() {
switch (this.modelId) {
case "flux-kontext-max":
return "flux-kontext-max";
case "flux-kontext-pro":
return "flux-kontext-pro";
default:
return "flux-kontext-pro";
}
}
async pollTask(taskId, resolvedHeaders, abortSignal) {
const startTime = Date.now();
const fetchFn = this.config.fetch || fetch;
while (true) {
if (abortSignal == null ? void 0 : abortSignal.aborted) {
throw new Error("Task polling aborted");
}
if (Date.now() - startTime > MAX_POLL_TIME) {
throw new Error("Task polling timed out");
}
const response = await fetchFn(
`${this.config.url({ modelId: this.modelId, path: `/flux/v1/get_result?id=${taskId}` })}`,
{
method: "GET",
headers: resolvedHeaders,
signal: abortSignal
}
);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
if (data.status === "Ready" && data.result) {
return data;
}
if (data.status === "Failed" || data.status === "Error") {
throw new Error(`Task failed with status: ${data.status}`);
}
await new Promise((resolve44) => setTimeout(resolve44, POLL_INTERVAL));
}
}
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported",
feature: "n",
details: "Flux Kontext generates one image per request"
});
}
let finalAspectRatio;
if (size) {
const supportedRatios = [
"21:9",
"16:9",
"3:2",
"4:3",
"1:1",
"3:4",
"2:3",
"9:16",
"9:21"
];
const sizeToAspectRatio = this.sizeToAspectRatio(
size,
supportedRatios,
warnings
);
if (sizeToAspectRatio) {
finalAspectRatio = sizeToAspectRatio;
}
} else if (aspectRatio) {
const supportedRatios = [
"21:9",
"16:9",
"3:2",
"4:3",
"1:1",
"3:4",
"2:3",
"9:16",
"9:21"
];
if (supportedRatios.includes(aspectRatio)) {
finalAspectRatio = aspectRatio;
} else {
warnings.push({
type: "unsupported",
feature: "aspectRatio",
details: `Aspect ratio ${aspectRatio} not supported. Supported ratios: ${supportedRatios.join(", ")}`
});
}
}
if (size != null && aspectRatio != null) {
warnings.push({
type: "unsupported",
feature: "aspectRatio",
details: "Both size and aspectRatio provided. Size will be converted to aspect ratio and aspectRatio parameter will be ignored."
});
}
const ai302Options = (providerOptions == null ? void 0 : providerOptions.ai302) || {};
const inputImage = ai302Options.input_image;
const promptUpsampling = ai302Options.prompt_upsampling;
const safetyTolerance = ai302Options.safety_tolerance;
const outputFormat = ai302Options.output_format;
const webhookUrl = ai302Options.webhook_url;
const webhookSecret = ai302Options.webhook_secret;
const resolvedHeaders = await (0, import_provider_utils8.resolve)(this.config.headers());
const { value: submitResponse, responseHeaders } = await (0, import_provider_utils8.postJsonToApi)({
url: this.config.url({
modelId: this.modelId,
path: `/flux/v1/${this.getModelName()}`
}),
headers: (0, import_provider_utils8.combineHeaders)(resolvedHeaders, headers),
body: {
prompt,
...inputImage !== void 0 && { input_image: inputImage },
...seed !== void 0 && { seed },
...finalAspectRatio !== void 0 && {
aspect_ratio: finalAspectRatio
},
...outputFormat !== void 0 && { output_format: outputFormat },
...webhookUrl !== void 0 && { webhook_url: webhookUrl },
...webhookSecret !== void 0 && { webhook_secret: webhookSecret },
...promptUpsampling !== void 0 && {
prompt_upsampling: promptUpsampling
},
...safetyTolerance !== void 0 && {
safety_tolerance: safetyTolerance
},
...ai302Options
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const taskResult = await this.pollTask(submitResponse.id, resolvedHeaders, abortSignal);
if (!((_a = taskResult.result) == null ? void 0 : _a.sample)) {
throw new Error("No image generated");
}
const images = await this.downloadImages([taskResult.result.sample]);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/flux-2-pro.ts
var import_provider_utils9 = require("@ai-sdk/provider-utils");
var POLL_INTERVAL2 = 2e3;
var MAX_POLL_TIME2 = 3e5;
var Flux2ProHandler = class extends BaseModelHandler {
getEndpointPath() {
switch (this.modelId) {
case "flux-2-flex":
return "/flux/v1/flux-2-flex";
case "flux-2-max":
return "/flux/v1/flux-2-max";
case "flux-2-pro":
default:
return "/flux/v1/flux-2-pro";
}
}
getModelDisplayName() {
switch (this.modelId) {
case "flux-2-flex":
return "Flux-2-Flex";
case "flux-2-max":
return "Flux-2-Max";
case "flux-2-pro":
default:
return "Flux-2-Pro";
}
}
async pollTask(taskId, resolvedHeaders, abortSignal) {
const startTime = Date.now();
const fetchFn = this.config.fetch || fetch;
while (true) {
if (abortSignal == null ? void 0 : abortSignal.aborted) {
throw new Error("Task polling aborted");
}
if (Date.now() - startTime > MAX_POLL_TIME2) {
throw new Error("Task polling timed out");
}
const response = await fetchFn(
`${this.config.url({ modelId: this.modelId, path: `/flux/v1/get_result?id=${taskId}` })}`,
{
method: "GET",
headers: resolvedHeaders,
signal: abortSignal
}
);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
if (data.status === "Ready" && data.result) {
return data;
}
if (data.status === "Failed" || data.status === "Error") {
throw new Error(`Task failed with status: ${data.status}`);
}
await new Promise((resolve44) => setTimeout(resolve44, POLL_INTERVAL2));
}
}
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported",
feature: "n",
details: `${this.getModelDisplayName()} generates one image per request`
});
}
let width;
let height;
if (size) {
const parsedSize = this.parseSize(size);
if (parsedSize) {
const validatedSize = this.validateDimensionsMultipleOf32(
parsedSize,
warnings,
64,
4096
);
width = validatedSize.width;
height = validatedSize.height;
}
} else if (aspectRatio) {
const calculatedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings);
if (calculatedSize) {
width = calculatedSize.width;
height = calculatedSize.height;
}
}
if (size != null && aspectRatio != null) {
warnings.push({
type: "unsupported",
feature: "aspectRatio",
details: "Both size and aspectRatio provided. Size will be used and aspectRatio will be ignored."
});
}
const ai302Options = (providerOptions == null ? void 0 : providerOptions.ai302) || {};
const inputImage = ai302Options.input_image;
const inputImage2 = ai302Options.input_image_2;
const inputImage3 = ai302Options.input_image_3;
const inputImage4 = ai302Options.input_image_4;
const inputImage5 = ai302Options.input_image_5;
const inputImage6 = ai302Options.input_image_6;
const inputImage7 = ai302Options.input_image_7;
const inputImage8 = ai302Options.input_image_8;
const safetyTolerance = ai302Options.safety_tolerance;
const outputFormat = ai302Options.output_format;
const webhookUrl = ai302Options.webhook_url;
const webhookSecret = ai302Options.webhook_secret;
const resolvedHeaders = await (0, import_provider_utils9.resolve)(this.config.headers());
const { value: submitResponse, responseHeaders } = await (0, import_provider_utils9.postJsonToApi)({
url: this.config.url({
modelId: this.modelId,
path: this.getEndpointPath()
}),
headers: (0, import_provider_utils9.combineHeaders)(resolvedHeaders, headers),
body: {
prompt,
...inputImage !== void 0 && { input_image: inputImage },
...inputImage2 !== void 0 && { input_image_2: inputImage2 },
...inputImage3 !== void 0 && { input_image_3: inputImage3 },
...inputImage4 !== void 0 && { input_image_4: inputImage4 },
...inputImage5 !== void 0 && { input_image_5: inputImage5 },
...inputImage6 !== void 0 && { input_image_6: inputImage6 },
...inputImage7 !== void 0 && { input_image_7: inputImage7 },
...inputImage8 !== void 0 && { input_image_8: inputImage8 },
...seed !== void 0 && { seed },
...width !== void 0 && { width },
...height !== void 0 && { height },
...outputFormat !== void 0 && { output_format: outputFormat },
...webhookUrl !== void 0 && { webhook_url: webhookUrl },
...webhookSecret !== void 0 && { webhook_secret: webhookSecret },
...safetyTolerance !== void 0 && {
safety_tolerance: safetyTolerance
}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const taskResult = await this.pollTask(submitResponse.id, resolvedHeaders, abortSignal);
if (!((_a = taskResult.result) == null ? void 0 : _a.sample)) {
throw new Error("No image generated");
}
const images = await this.downloadImages([taskResult.result.sample]);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/gpt-image.ts
var import_provider_utils10 = require("@ai-sdk/provider-utils");
var SUPPORTED_SIZES = [
"1024x1024",
"1536x1024",
"1024x1536"
];
var GPTImageHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
providerOptions,
headers,
abortSignal
}) {
var _a, _b;
const warnings = [];
let parsedSize = this.parseSize(size);
if (!parsedSize && aspectRatio) {
parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings);
}
let sizeStr = "1024x1024";
if (parsedSize) {
parsedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZES, warnings);
sizeStr = `${parsedSize.width}x${parsedSize.height}`;
}
const requestBody = {
prompt,
model: this.modelId,
size: sizeStr,
n: n || 1,
...(_a = providerOptions.ai302) != null ? _a : {}
};
const responseFormat = ((_b = providerOptions.ai302) == null ? void 0 : _b.response_format) || "url";
if (requestBody.response_format) {
delete requestBody.response_format;
}
const baseUrl = this.config.url({ modelId: this.modelId, path: "/v1/images/generations" });
const url = `${baseUrl}?response_format=${responseFormat}`;
const resolvedHeaders = await (0, import_provider_utils10.resolve)(this.config.headers());
const { value: response, responseHeaders } = await (0, import_provider_utils10.postJsonToApi)({
url,
headers: (0, import_provider_utils10.combineHeaders)(resolvedHeaders, headers),
body: requestBody,
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.data.map((img) => img.url || "").filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/hidream.ts
var import_provider_utils11 = require("@ai-sdk/provider-utils");
var HidreamHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported",
feature: "n",
details: "Hidream does not support batch generation"
});
}
let parsedSize = this.parseSize(size);
if (!parsedSize && aspectRatio) {
parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings);
}
if (parsedSize) {
parsedSize = this.validateDimensionsMultipleOf32(parsedSize, warnings);
}
const resolvedHeaders = await (0, import_provider_utils11.resolve)(this.config.headers());
const { value: response, responseHeaders } = await (0, import_provider_utils11.postJsonToApi)({
url: this.config.url({ modelId: this.modelId, path: `/302/submit/${this.modelId}` }),
headers: (0, import_provider_utils11.combineHeaders)(resolvedHeaders, headers),
body: {
prompt,
image_size: parsedSize,
seed,
...(_a = providerOptions.ai302) != null ? _a : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/ideogram.ts
var import_provider_utils12 = require("@ai-sdk/provider-utils");
// src/ai302-types.ts
var import_v4 = require("zod/v4");
var ImageResponseSchema = import_v4.z.looseObject({
content_type: import_v4.z.string().optional(),
height: import_v4.z.number().optional(),
url: import_v4.z.string().optional(),
width: import_v4.z.number().optional()
});
var ImageSizeSchema = import_v4.z.looseObject({
height: import_v4.z.number(),
width: import_v4.z.number()
});
var IdeogramAspectRatioSchema = import_v4.z.enum([
"ASPECT_1_1",
"ASPECT_10_16",
"ASPECT_16_10",
"ASPECT_9_16",
"ASPECT_16_9",
"ASPECT_3_2",
"ASPECT_2_3",
"ASPECT_4_3",
"ASPECT_3_4",
"ASPECT_1_3",
"ASPECT_3_1"
]);
var FluxV11UltraRequestSchema = import_v4.z.looseObject({
aspect_ratio: import_v4.z.string(),
prompt: import_v4.z.string(),
raw: import_v4.z.boolean()
});
var FluxV11UltraResponseSchema = import_v4.z.looseObject({
has_nsfw_concepts: import_v4.z.array(import_v4.z.boolean()),
images: import_v4.z.array(ImageResponseSchema),
prompt: import_v4.z.string(),
seed: import_v4.z.number(),
timings: import_v4.z.record(import_v4.z.string(), import_v4.z.any())
});
var FluxProV11RequestSchema = import_v4.z.looseObject({
guidance_scale: import_v4.z.number().optional(),
image_size: ImageSizeSchema.optional(),
num_inference_steps: import_v4.z.number().optional(),
prompt: import_v4.z.string()
});
var FluxProV11ResponseSchema = import_v4.z.looseObject({
has_nsfw_concepts: import_v4.z.array(import_v4.z.boolean()),
images: import_v4.z.array(ImageResponseSchema),
prompt: import_v4.z.string(),
seed: import_v4.z.number(),
timings: import_v4.z.record(import_v4.z.string(), import_v4.z.any())
});
var FluxProDevRequestSchema = import_v4.z.looseObject({
guidance_scale: import_v4.z.number().optional().default(3.5),
image_size: ImageSizeSchema,
num_inference_steps: import_v4.z.number().optional().default(28),
prompt: import_v4.z.string()
});
var FluxProDevResponseSchema = import_v4.z.looseObject({
images: import_v4.z.array(
import_v4.z.object({
url: import_v4.z.string(),
width: import_v4.z.number(),
height: import_v4.z.number(),
content_type: import_v4.z.string()
})
),
timings: import_v4.z.record(import_v4.z.string(), import_v4.z.number()),
seed: import_v4.z.number(),
has_nsfw_concepts: import_v4.z.array(import_v4.z.boolean()),
prompt: import_v4.z.string()
});
var FluxSchnellRequestSchema = import_v4.z.looseObject({
image_size: ImageSizeSchema.optional(),
num_inference_steps: import_v4.z.number().optional(),
prompt: import_v4.z.string()
});
var FluxSchnellResponseSchema = import_v4.z.looseObject({
images: import_v4.z.array(
import_v4.z.object({
url: import_v4.z.string(),
width: import_v4.z.number(),
height: import_v4.z.number(),
content_type: import_v4.z.string()
})
),
timings: import_v4.z.record(import_v4.z.string(), import_v4.z.number()),
seed: import_v4.z.number(),
has_nsfw_concepts: import_v4.z.array(import_v4.z.boolean()),
prompt: import_v4.z.string()
});
var IdeogramResolutionSchema = import_v4.z.enum([
"RESOLUTION_512_1536",
"RESOLUTION_576_1408",
"RESOLUTION_576_1472",
"RESOLUTION_576_1536",
"RESOLUTION_640_1024",
"RESOLUTION_640_1344",
"RESOLUTION_640_1408",
"RESOLUTION_640_1472",
"RESOLUTION_640_1536",
"RESOLUTION_704_1152",
"RESOLUTION_704_1216",
"RESOLUTION_704_1280",
"RESOLUTION_704_1344",
"RESOLUTION_704_1408",
"RESOLUTION_704_1472",
"RESOLUTION_720_1280",
"RESOLUTION_736_1312",
"RESOLUTION_768_1024",
"RESOLUTION_768_1088",
"RESOLUTION_768_1152",
"RESOLUTION_768_1216",
"RESOLUTION_768_1232",
"RESOLUTION_768_1280",
"RESOLUTION_768_1344",
"RESOLUTION_832_960",
"RESOLUTION_832_1024",
"RESOLUTION_832_1088",
"RESOLUTION_832_1152",
"RESOLUTION_832_1216",
"RESOLUTION_832_1248",
"RESOLUTION_864_1152",
"RESOLUTION_896_960",
"RESOLUTION_896_1024",
"RESOLUTION_896_1088",
"RESOLUTION_896_1120",
"RESOLUTION_896_1152",
"RESOLUTION_960_832",
"RESOLUTION_960_896",
"RESOLUTION_960_1024",
"RESOLUTION_960_1088",
"RESOLUTION_1024_640",
"RESOLUTION_1024_768",
"RESOLUTION_1024_832",
"RESOLUTION_1024_896",
"RESOLUTION_1024_960",
"RESOLUTION_1024_1024",
"RESOLUTION_1088_768",
"RESOLUTION_1088_832",
"RESOLUTION_1088_896",
"RESOLUTION_1088_960",
"RESOLUTION_1120_896",
"RESOLUTION_1152_704",
"RESOLUTION_1152_768",
"RESOLUTION_1152_832",
"RESOLUTION_1152_864",
"RESOLUTION_1152_896",
"RESOLUTION_1216_704",
"RESOLUTION_1216_768",
"RESOLUTION_1216_832",
"RESOLUTION_1232_768",
"RESOLUTION_1248_832",
"RESOLUTION_1280_704",
"RESOLUTION_1280_720",
"RESOLUTION_1280_768",
"RESOLUTION_1280_800",
"RESOLUTION_1312_736",
"RESOLUTION_1344_640",
"RESOLUTION_1344_704",
"RESOLUTION_1344_768",
"RESOLUTION_1408_576",
"RESOLUTION_1408_640",
"RESOLUTION_1408_704",
"RESOLUTION_1472_576",
"RESOLUTION_1472_640",
"RESOLUTION_1472_704",
"RESOLUTION_1536_512",
"RESOLUTION_1536_576",
"RESOLUTION_1536_640"
]);
var IdeogramRequestSchema = import_v4.z.looseObject({
image_request: import_v4.z.object({
aspect_ratio: IdeogramAspectRatioSchema.optional().default("ASPECT_1_1"),
magic_prompt_option: import_v4.z.enum(["AUTO", "ON", "OFF"]).optional().default("AUTO"),
model: import_v4.z.enum(["V_1", "V_1_TURBO", "V_2", "V_2_TURBO"]).optional().default("V_2"),
negative_prompt: import_v4.z.string().optional(),
prompt: import_v4.z.string(),
resolution: IdeogramResolutionSchema.optional(),
seed: import_v4.z.number().optional(),
style_type: