@302ai/ai-sdk
Version:
The **[302AI provider](https://sdk.vercel.ai/providers/ai-sdk-providers/)** for the [AI SDK](https://sdk.vercel.ai/docs) contains image model support for the [302AI](https://302.ai) platform.
1,713 lines (1,697 loc) • 113 kB
JavaScript
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
}) : x)(function(x) {
if (typeof require !== "undefined") return require.apply(this, arguments);
throw Error('Dynamic require of "' + x + '" is not supported');
});
// src/ai302-provider.ts
import {
loadApiKey,
withoutTrailingSlash
} from "@ai-sdk/provider-utils";
// src/models/auraflow.ts
import { combineHeaders, postToApi } from "@ai-sdk/provider-utils";
// src/utils/api-handlers.ts
import { APICallError } from "@ai-sdk/provider";
import {
extractResponseHeaders
} from "@ai-sdk/provider-utils";
var createJsonResponseHandler = () => async ({ response, url, requestBodyValues }) => {
const responseHeaders = extractResponseHeaders(response);
const responseBody = await response.json();
return {
responseHeaders,
value: responseBody
};
};
var statusCodeErrorResponseHandler = async ({ response, url, requestBodyValues }) => {
const responseHeaders = extractResponseHeaders(response);
const responseBody = await response.text();
return {
responseHeaders,
value: new APICallError({
message: response.statusText,
url,
requestBodyValues,
statusCode: response.status,
responseHeaders,
responseBody
})
};
};
// src/models/base-model.ts
var BaseModelHandler = class {
constructor(modelId, settings, config) {
this.modelId = modelId;
this.settings = settings;
this.config = config;
}
async handleRequest(params) {
const { headers, ...rest } = params;
const requestHeaders = headers ? Object.fromEntries(
Object.entries(headers).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v])
) : void 0;
return this.processRequest({
...rest,
headers: requestHeaders
});
}
parseSize(size) {
if (!size) return void 0;
const [width, height] = size.split("x").map(Number);
return { width, height };
}
validateAspectRatio(aspectRatio, warnings, maxRatio, minRatio) {
if (!aspectRatio) return void 0;
const [width, height] = aspectRatio.split(":").map(Number);
if (!width || !height) return void 0;
if (maxRatio === void 0 || minRatio === void 0) {
return aspectRatio;
}
const ratio = width / height;
if (ratio >= minRatio && ratio <= maxRatio) {
return aspectRatio;
}
let adjustedWidth;
let adjustedHeight;
if (ratio > maxRatio) {
adjustedHeight = 9;
adjustedWidth = Math.round(maxRatio * adjustedHeight);
} else {
adjustedWidth = 9;
adjustedHeight = Math.round(adjustedWidth / minRatio);
}
warnings.push({
type: "other",
message: `Aspect ratio ${aspectRatio} is outside the allowed range (${adjustedWidth}:${adjustedHeight} to ${adjustedHeight}:${adjustedWidth}). Adjusted to ${adjustedWidth}:${adjustedHeight}`
});
return `${adjustedWidth}:${adjustedHeight}`;
}
aspectRatioToSize(aspectRatio, baseSize = 1024, warnings) {
if (!aspectRatio) return void 0;
const validatedAspectRatio = this.validateAspectRatio(
aspectRatio,
warnings
);
if (!validatedAspectRatio) return void 0;
const [width, height] = validatedAspectRatio.split(":").map(Number);
if (!width || !height) return void 0;
const ratio = width / height;
if (ratio > 1) {
return { width: baseSize, height: Math.round(baseSize / ratio) };
} else {
return { width: Math.round(baseSize * ratio), height: baseSize };
}
}
async downloadImage(url) {
const maxRetries = 5;
const timeout = 12e4;
for (let attempt = 0; attempt < maxRetries; attempt++) {
const controller = new AbortController();
try {
const timeoutId = setTimeout(() => controller.abort(), timeout);
const imageResponse = await fetch(url, {
signal: controller.signal,
headers: {
Accept: "image/*",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
}
});
clearTimeout(timeoutId);
if (!imageResponse.ok) {
throw new Error(`HTTP error! status: ${imageResponse.status}`);
}
const arrayBuffer = await imageResponse.arrayBuffer();
const base64 = Buffer.from(arrayBuffer).toString("base64");
return base64;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
if (attempt === maxRetries - 1) {
throw new Error(
`Failed to download image after ${maxRetries} attempts: ${errorMessage}`
);
}
const delay = Math.min(
2 ** attempt * 2e3 + Math.random() * 1e3,
3e4
);
await new Promise((resolve) => setTimeout(resolve, delay));
controller.abort();
}
}
throw new Error("Failed to download image after retries");
}
async downloadImages(urls) {
const imagePromises = urls.map(async (url) => {
try {
return await this.downloadImage(url);
} catch (error) {
return null;
}
});
const base64Images = await Promise.all(imagePromises);
const validImages = base64Images.filter(Boolean);
if (validImages.length === 0) {
throw new Error("All image downloads failed");
}
return validImages;
}
validateSizeOption(parsedSize, supportedSizes, warnings) {
const validatedSize = this.validateDimensionsMultipleOf32(
parsedSize,
warnings
);
const sizeStr = `${validatedSize.width}x${validatedSize.height}`;
if (!supportedSizes.includes(sizeStr)) {
const closestSize = this.findClosestSize(validatedSize, supportedSizes);
warnings.push({
type: "other",
message: `Size ${sizeStr} is not supported. Using closest supported size: ${closestSize}`
});
const [width, height] = closestSize.split("x").map(Number);
return { width, height };
}
return validatedSize;
}
validateDimensionsMultipleOf32(size, warnings, minSize = 32, maxSize = 4096) {
const adjustDimension = (value) => {
if (value < minSize) {
return minSize;
}
if (value > maxSize) {
return maxSize;
}
if (value % 32 !== 0) {
const roundedValue = Math.round(value / 32) * 32;
return Math.min(maxSize, Math.max(minSize, roundedValue));
}
return value;
};
const adjustedWidth = adjustDimension(size.width);
const adjustedHeight = adjustDimension(size.height);
if (adjustedWidth !== size.width || adjustedHeight !== size.height) {
warnings.push({
type: "other",
message: `Image dimensions must be multiples of 32 and within the range ${minSize}-${maxSize}. Adjusted from ${size.width}x${size.height} to ${adjustedWidth}x${adjustedHeight}`
});
return { width: adjustedWidth, height: adjustedHeight };
}
return size;
}
findClosestSize(size, supportedSizes) {
const targetRatio = size.width / size.height;
const sizesByRatio = supportedSizes.slice().sort((a, b) => {
const [w1, h1] = a.split("x").map(Number);
const [w2, h2] = b.split("x").map(Number);
const ratio1 = w1 / h1;
const ratio2 = w2 / h2;
const diff1 = Math.abs(ratio1 - targetRatio);
const diff2 = Math.abs(ratio2 - targetRatio);
return diff1 - diff2;
});
const similarRatioSizes = sizesByRatio.slice(0, 2);
return similarRatioSizes.reduce((closest, current) => {
const [w1, h1] = current.split("x").map(Number);
const [w2, h2] = closest.split("x").map(Number);
const diff1 = Math.abs(Math.max(w1, h1) - 1024);
const diff2 = Math.abs(Math.max(w2, h2) - 1024);
return diff1 < diff2 ? current : closest;
});
}
findClosestAspectRatio(targetRatio, supportedRatios, warnings) {
if (!targetRatio) return supportedRatios[0];
const [targetWidth, targetHeight] = targetRatio.split(":").map(Number);
if (!targetWidth || !targetHeight) return supportedRatios[0];
const targetValue = targetWidth / targetHeight;
let closestRatio = supportedRatios[0];
let minDiff = Infinity;
for (const ratio of supportedRatios) {
const [w, h] = ratio.split(":").map(Number);
if (!w || !h) continue;
const currentValue = w / h;
const diff = Math.abs(currentValue - targetValue);
if (diff < minDiff) {
minDiff = diff;
closestRatio = ratio;
}
}
if (closestRatio !== targetRatio) {
warnings.push({
type: "other",
message: `Aspect ratio ${targetRatio} is not supported. Using closest supported ratio: ${closestRatio}`
});
}
return closestRatio;
}
sizeToAspectRatio(size, supportedRatios, warnings) {
if (!size) return void 0;
const parsedSize = this.parseSize(size);
if (!parsedSize) {
warnings.push({
type: "other",
message: `Invalid size format: ${size}. Expected format: WIDTHxHEIGHT`
});
return void 0;
}
const ratio = parsedSize.width / parsedSize.height;
let closestRatio = supportedRatios[0];
let minDiff = Infinity;
for (const aspectRatio of supportedRatios) {
const [w, h] = aspectRatio.split(":").map(Number);
if (!w || !h) continue;
const currentRatio = w / h;
const diff = Math.abs(currentRatio - ratio);
if (diff < minDiff) {
minDiff = diff;
closestRatio = aspectRatio;
}
}
const [closestW, closestH] = closestRatio.split(":").map(Number);
const closestRatioValue = closestW / closestH;
if (Math.abs(closestRatioValue - ratio) > 0.05) {
warnings.push({
type: "other",
message: `Size ${size} (ratio ${ratio.toFixed(2)}) converted to closest supported aspect ratio: ${closestRatio}`
});
}
return closestRatio;
}
};
// src/models/auraflow.ts
var AuraflowHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported-setting",
setting: "n",
details: "AuraFlow does not support batch generation"
});
}
if (size != null) {
warnings.push({ type: "unsupported-setting", setting: "size" });
}
if (aspectRatio != null) {
warnings.push({ type: "unsupported-setting", setting: "aspectRatio" });
}
if (seed != null) {
warnings.push({ type: "unsupported-setting", setting: "seed" });
}
if (providerOptions.ai302 != null) {
warnings.push({
type: "unsupported-setting",
setting: "providerOptions"
});
}
const formData = new FormData();
formData.append("prompt", prompt);
const { value: response, responseHeaders } = await postToApi({
url: this.config.url({
modelId: this.modelId,
path: "/302/submit/aura-flow"
}),
headers: combineHeaders(this.config.headers(), headers),
body: {
content: formData,
values: { prompt }
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/bagel.ts
import { combineHeaders as combineHeaders2, postJsonToApi } from "@ai-sdk/provider-utils";
var BagelHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a, _b, _c;
const warnings = [];
if (n != null && n > 1) {
warnings.push({ type: "unsupported-setting", setting: "n", details: "Bagel does not support batch generation" });
}
if (size != null) {
warnings.push({ type: "unsupported-setting", setting: "size", details: "Bagel does not support custom size" });
}
if (aspectRatio != null) {
warnings.push({ type: "unsupported-setting", setting: "aspectRatio", details: "Bagel does not support custom aspect ratio" });
}
if (seed != null) {
warnings.push({ type: "unsupported-setting", setting: "seed", details: "Bagel does not support custom seed" });
}
const { value: response, responseHeaders } = await postJsonToApi({
url: this.config.url({ modelId: this.modelId, path: "/302/submit/bagel" }),
headers: combineHeaders2(this.config.headers(), headers),
body: {
prompt,
use_thought: (_b = (_a = providerOptions.ai302) == null ? void 0 : _a.use_thought) != null ? _b : false,
...(_c = providerOptions.ai302) != null ? _c : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/cogview.ts
import { combineHeaders as combineHeaders3, postJsonToApi as postJsonToApi2 } from "@ai-sdk/provider-utils";
var SUPPORTED_SIZE_OPTIONS = [
"1024x1024",
"768x1344",
"864x1152",
"1344x768",
"1152x864",
"1440x720",
"720x1440"
];
var CogViewHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported-setting",
setting: "n",
details: "CogView-4 does not support batch generation"
});
}
if (seed != null) {
warnings.push({
type: "unsupported-setting",
setting: "seed",
details: "CogView-4 does not support seed parameter"
});
}
let sizeString;
if (size) {
const parsedSize = this.parseSize(size);
if (parsedSize) {
const validatedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS, warnings);
sizeString = `${validatedSize.width}x${validatedSize.height}`;
}
} else if (aspectRatio) {
const parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings);
if (parsedSize) {
const validatedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS, warnings);
sizeString = `${validatedSize.width}x${validatedSize.height}`;
}
}
const modelVariant = this.modelId === "cogview-4-250304" ? "cogview-4-250304" : "cogview-4";
const { value: response, responseHeaders } = await postJsonToApi2({
url: this.config.url({ modelId: this.modelId, path: "/bigmodel/api/paas/v4/images/generations" }),
headers: combineHeaders3(this.config.headers(), headers),
body: {
model: modelVariant,
prompt,
size: sizeString,
...(_a = providerOptions.ai302) != null ? _a : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.data.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/dalle.ts
import { combineHeaders as combineHeaders4, postJsonToApi as postJsonToApi3 } from "@ai-sdk/provider-utils";
var SUPPORTED_SIZE_OPTIONS2 = ["256x256", "512x512", "1024x1024"];
var DallEHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({ type: "unsupported-setting", setting: "n", details: "DALL-E 3 does not support batch generation" });
}
if (size != null && aspectRatio != null) {
warnings.push({ type: "unsupported-setting", setting: "aspectRatio", details: "When size is provided, aspectRatio will be ignored" });
} else if (size == null && aspectRatio != null) {
warnings.push({ type: "other", message: "Using size calculated from aspect ratio with base size 1024" });
}
if (seed != null) {
warnings.push({ type: "unsupported-setting", setting: "seed" });
}
let parsedSize = this.parseSize(size) || this.aspectRatioToSize(aspectRatio, 1024, warnings) || {
width: 1024,
height: 1024
};
parsedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS2, warnings);
const { value: response, responseHeaders } = await postJsonToApi3({
url: this.config.url({ modelId: this.modelId, path: "/v1/images/generations" }),
headers: combineHeaders4(this.config.headers(), headers),
body: {
prompt,
model: "dall-e-3",
size: `${parsedSize.width}x${parsedSize.height}`,
...(_a = providerOptions.ai302) != null ? _a : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.data.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/flux-pro-dev.ts
import { combineHeaders as combineHeaders5, postJsonToApi as postJsonToApi4 } from "@ai-sdk/provider-utils";
// src/ai302-image-settings.ts
var modelToBackendConfig = {
"flux-v1.1-ultra": {
supportsSize: false
},
"flux-pro-v1.1": {
supportsSize: true
},
"flux-pro": {
supportsSize: true
},
"flux-dev": {
supportsSize: true
},
"flux-schnell": {
supportsSize: true
},
"flux-kontext-max": {
supportsSize: true
},
"flux-kontext-pro": {
supportsSize: true
},
"ideogram/V_1": {
supportsSize: true
},
"ideogram/V_1_TURBO": {
supportsSize: true
},
"ideogram/V_2": {
supportsSize: true
},
"ideogram/V_2_TURBO": {
supportsSize: true
},
"ideogram/V_2A": {
supportsSize: true
},
"ideogram/V_2A_TURBO": {
supportsSize: true
},
"dall-e-3": {
supportsSize: true
},
recraftv3: {
supportsSize: true
},
recraftv2: {
supportsSize: true
},
"sdxl-lightning": {
supportsSize: true
},
"sdxl-lightning-v2": {
supportsSize: true
},
"sdxl-lightning-v3": {
supportsSize: true
},
kolors: {
supportsSize: true
},
"aura-flow": {
supportsSize: true
},
"luma-photon": {
supportsSize: true
},
sdxl: {
supportsSize: true
},
"sd3-ultra": {
supportsSize: false
},
sd3v2: {
supportsSize: true
},
"sd3.5-large": {
supportsSize: true
},
"sd3.5-large-turbo": {
supportsSize: true
},
"sd3.5-medium": {
supportsSize: true
},
"midjourney/6.0": {
supportsSize: false
},
"midjourney/6.1": {
supportsSize: false
},
"midjourney/7.0": {
supportsSize: false
},
"nijijourney/6.0": {
supportsSize: false
},
"google-imagen-3": {
supportsSize: true
},
"google-imagen-3-fast": {
supportsSize: true
},
"google-imagen-4-preview": {
supportsSize: false
},
"doubao-general-v2.1-l": {
supportsSize: true
},
"doubao-general-v2.0-l": {
supportsSize: true
},
"doubao-general-v2.0": {
supportsSize: true
},
"doubao-general-v3.0": {
supportsSize: true
},
"lumina-image-v2": {
supportsSize: true
},
"omnigen-v1": {
supportsSize: true
},
"playground-v25": {
supportsSize: true
},
"cogview-4": {
supportsSize: true
},
"cogview-4-250304": {
supportsSize: true
},
"minimaxi-image-01": {
supportsSize: false
},
"irag-1.0": {
supportsSize: false
},
"hidream-i1-full": {
supportsSize: true
},
"hidream-i1-dev": {
supportsSize: true
},
"hidream-i1-fast": {
supportsSize: true
},
"gpt-image-1": {
supportsSize: true
},
bagel: {
supportsSize: false
},
soul: {
supportsSize: true
},
"kling-v1": {
supportsSize: true
},
"kling-v1-5": {
supportsSize: true
},
"kling-v2": {
supportsSize: true
}
};
// src/models/flux-pro-dev.ts
var FluxProDevHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported-setting",
setting: "n",
details: "Flux Pro does not support batch generation"
});
}
const backendConfig = modelToBackendConfig[this.modelId];
if (backendConfig == null ? void 0 : backendConfig.supportsSize) {
if (size != null && aspectRatio != null) {
warnings.push({
type: "unsupported-setting",
setting: "aspectRatio",
details: "When size is provided, aspectRatio will be ignored"
});
} else if (size == null && aspectRatio != null) {
warnings.push({
type: "other",
message: "Using size calculated from aspect ratio with base size 1024"
});
}
}
let parsedSize = this.parseSize(size) || this.aspectRatioToSize(aspectRatio, 1024, warnings) || {
width: 1024,
height: 1024
};
if (backendConfig == null ? void 0 : backendConfig.supportsSize) {
parsedSize = this.validateDimensionsMultipleOf32(
parsedSize,
warnings,
256,
1440
);
}
const { value: response, responseHeaders } = await postJsonToApi4({
url: this.config.url({
modelId: this.modelId,
path: `/302/submit/${this.modelId}`
}),
headers: combineHeaders5(this.config.headers(), headers),
body: {
prompt,
image_size: (backendConfig == null ? void 0 : backendConfig.supportsSize) ? { width: parsedSize.width, height: parsedSize.height } : void 0,
size: (backendConfig == null ? void 0 : backendConfig.supportsSize) ? { width: parsedSize.width, height: parsedSize.height } : void 0,
aspect_ratio: !(backendConfig == null ? void 0 : backendConfig.supportsSize) ? aspectRatio : void 0,
seed,
...(_a = providerOptions.ai302) != null ? _a : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/flux-kontext.ts
import { combineHeaders as combineHeaders6, postJsonToApi as postJsonToApi5 } from "@ai-sdk/provider-utils";
var POLL_INTERVAL = 2e3;
var MAX_POLL_TIME = 3e5;
var FluxKontextHandler = class extends BaseModelHandler {
getModelName() {
switch (this.modelId) {
case "flux-kontext-max":
return "flux-kontext-max";
case "flux-kontext-pro":
return "flux-kontext-pro";
default:
return "flux-kontext-pro";
}
}
async pollTask(taskId, abortSignal) {
const startTime = Date.now();
const fetchFn = this.config.fetch || fetch;
while (true) {
if (abortSignal == null ? void 0 : abortSignal.aborted) {
throw new Error("Task polling aborted");
}
if (Date.now() - startTime > MAX_POLL_TIME) {
throw new Error("Task polling timed out");
}
const response = await fetchFn(
`${this.config.url({ modelId: this.modelId, path: `/flux/v1/get_result?id=${taskId}` })}`,
{
method: "GET",
headers: this.config.headers(),
signal: abortSignal
}
);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
if (data.status === "Ready" && data.result) {
return data;
}
if (data.status === "Failed" || data.status === "Error") {
throw new Error(`Task failed with status: ${data.status}`);
}
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL));
}
}
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported-setting",
setting: "n",
details: "Flux Kontext generates one image per request"
});
}
let finalAspectRatio;
if (size) {
const supportedRatios = [
"21:9",
"16:9",
"3:2",
"4:3",
"1:1",
"3:4",
"2:3",
"9:16",
"9:21"
];
const sizeToAspectRatio = this.sizeToAspectRatio(
size,
supportedRatios,
warnings
);
if (sizeToAspectRatio) {
finalAspectRatio = sizeToAspectRatio;
}
} else if (aspectRatio) {
const supportedRatios = [
"21:9",
"16:9",
"3:2",
"4:3",
"1:1",
"3:4",
"2:3",
"9:16",
"9:21"
];
if (supportedRatios.includes(aspectRatio)) {
finalAspectRatio = aspectRatio;
} else {
warnings.push({
type: "unsupported-setting",
setting: "aspectRatio",
details: `Aspect ratio ${aspectRatio} not supported. Supported ratios: ${supportedRatios.join(", ")}`
});
}
}
if (size != null && aspectRatio != null) {
warnings.push({
type: "other",
message: "Both size and aspectRatio provided. Size will be converted to aspect ratio and aspectRatio parameter will be ignored."
});
}
const ai302Options = (providerOptions == null ? void 0 : providerOptions.ai302) || {};
const inputImage = ai302Options.input_image;
const promptUpsampling = ai302Options.prompt_upsampling;
const safetyTolerance = ai302Options.safety_tolerance;
const outputFormat = ai302Options.output_format;
const webhookUrl = ai302Options.webhook_url;
const webhookSecret = ai302Options.webhook_secret;
const { value: submitResponse, responseHeaders } = await postJsonToApi5({
url: this.config.url({
modelId: this.modelId,
path: `/flux/v1/${this.getModelName()}`
}),
headers: combineHeaders6(this.config.headers(), headers),
body: {
prompt,
...inputImage !== void 0 && { input_image: inputImage },
...seed !== void 0 && { seed },
...finalAspectRatio !== void 0 && {
aspect_ratio: finalAspectRatio
},
...outputFormat !== void 0 && { output_format: outputFormat },
...webhookUrl !== void 0 && { webhook_url: webhookUrl },
...webhookSecret !== void 0 && { webhook_secret: webhookSecret },
...promptUpsampling !== void 0 && {
prompt_upsampling: promptUpsampling
},
...safetyTolerance !== void 0 && {
safety_tolerance: safetyTolerance
},
...ai302Options
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const taskResult = await this.pollTask(submitResponse.id, abortSignal);
if (!((_a = taskResult.result) == null ? void 0 : _a.sample)) {
throw new Error("No image generated");
}
const images = await this.downloadImages([taskResult.result.sample]);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/gpt-image.ts
import { combineHeaders as combineHeaders7, postJsonToApi as postJsonToApi6 } from "@ai-sdk/provider-utils";
var SUPPORTED_SIZES = [
"1024x1024",
"1536x1024",
"1024x1536"
];
var GPTImageHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
providerOptions,
headers,
abortSignal
}) {
var _a, _b;
const warnings = [];
let parsedSize = this.parseSize(size);
if (!parsedSize && aspectRatio) {
parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings);
}
let sizeStr = "1024x1024";
if (parsedSize) {
parsedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZES, warnings);
sizeStr = `${parsedSize.width}x${parsedSize.height}`;
}
const requestBody = {
prompt,
model: "gpt-image-1",
size: sizeStr,
n: n || 1,
...(_a = providerOptions.ai302) != null ? _a : {}
};
const responseFormat = ((_b = providerOptions.ai302) == null ? void 0 : _b.response_format) || "url";
if (requestBody.response_format) {
delete requestBody.response_format;
}
const baseUrl = this.config.url({ modelId: this.modelId, path: "/v1/images/generations" });
const url = `${baseUrl}?response_format=${responseFormat}`;
const { value: response, responseHeaders } = await postJsonToApi6({
url,
headers: combineHeaders7(this.config.headers(), headers),
body: requestBody,
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.data.map((img) => img.url || "").filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/hidream.ts
import { combineHeaders as combineHeaders8, postJsonToApi as postJsonToApi7 } from "@ai-sdk/provider-utils";
var HidreamHandler = class extends BaseModelHandler {
async processRequest({
prompt,
n,
size,
aspectRatio,
seed,
providerOptions,
headers,
abortSignal
}) {
var _a;
const warnings = [];
if (n != null && n > 1) {
warnings.push({
type: "unsupported-setting",
setting: "n",
details: "Hidream does not support batch generation"
});
}
let parsedSize = this.parseSize(size);
if (!parsedSize && aspectRatio) {
parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings);
}
if (parsedSize) {
parsedSize = this.validateDimensionsMultipleOf32(parsedSize, warnings);
}
const { value: response, responseHeaders } = await postJsonToApi7({
url: this.config.url({ modelId: this.modelId, path: `/302/submit/${this.modelId}` }),
headers: combineHeaders8(this.config.headers(), headers),
body: {
prompt,
image_size: parsedSize,
seed,
...(_a = providerOptions.ai302) != null ? _a : {}
},
failedResponseHandler: statusCodeErrorResponseHandler,
successfulResponseHandler: createJsonResponseHandler(),
abortSignal,
fetch: this.config.fetch
});
const urls = response.images.map((img) => img.url).filter(Boolean);
const images = await this.downloadImages(urls);
return {
images,
warnings,
response: {
timestamp: /* @__PURE__ */ new Date(),
modelId: this.modelId,
headers: responseHeaders
}
};
}
};
// src/models/ideogram.ts
import { combineHeaders as combineHeaders9, postJsonToApi as postJsonToApi8 } from "@ai-sdk/provider-utils";
// src/ai302-types.ts
import { z } from "zod";
var ImageResponseSchema = z.object({
content_type: z.string().optional(),
height: z.number().optional(),
url: z.string().optional(),
width: z.number().optional()
}).passthrough();
var ImageSizeSchema = z.object({
height: z.number(),
width: z.number()
}).passthrough();
var IdeogramAspectRatioSchema = z.enum([
"ASPECT_1_1",
"ASPECT_10_16",
"ASPECT_16_10",
"ASPECT_9_16",
"ASPECT_16_9",
"ASPECT_3_2",
"ASPECT_2_3",
"ASPECT_4_3",
"ASPECT_3_4",
"ASPECT_1_3",
"ASPECT_3_1"
]);
var FluxV11UltraRequestSchema = z.object({
aspect_ratio: z.string(),
prompt: z.string(),
raw: z.boolean()
}).passthrough();
var FluxV11UltraResponseSchema = z.object({
has_nsfw_concepts: z.array(z.boolean()),
images: z.array(ImageResponseSchema),
prompt: z.string(),
seed: z.number(),
timings: z.record(z.any())
}).passthrough();
var FluxProV11RequestSchema = z.object({
guidance_scale: z.number().optional(),
image_size: ImageSizeSchema.optional(),
num_inference_steps: z.number().optional(),
prompt: z.string()
}).passthrough();
var FluxProV11ResponseSchema = z.object({
has_nsfw_concepts: z.array(z.boolean()),
images: z.array(ImageResponseSchema),
prompt: z.string(),
seed: z.number(),
timings: z.record(z.any())
}).passthrough();
var FluxProDevRequestSchema = z.object({
guidance_scale: z.number().optional().default(3.5),
image_size: ImageSizeSchema,
num_inference_steps: z.number().optional().default(28),
prompt: z.string()
}).passthrough();
var FluxProDevResponseSchema = z.object({
images: z.array(
z.object({
url: z.string(),
width: z.number(),
height: z.number(),
content_type: z.string()
})
),
timings: z.record(z.number()),
seed: z.number(),
has_nsfw_concepts: z.array(z.boolean()),
prompt: z.string()
}).passthrough();
var FluxSchnellRequestSchema = z.object({
image_size: ImageSizeSchema.optional(),
num_inference_steps: z.number().optional(),
prompt: z.string()
}).passthrough();
var FluxSchnellResponseSchema = z.object({
images: z.array(
z.object({
url: z.string(),
width: z.number(),
height: z.number(),
content_type: z.string()
})
),
timings: z.record(z.number()),
seed: z.number(),
has_nsfw_concepts: z.array(z.boolean()),
prompt: z.string()
}).passthrough();
var IdeogramResolutionSchema = z.enum([
"RESOLUTION_512_1536",
"RESOLUTION_576_1408",
"RESOLUTION_576_1472",
"RESOLUTION_576_1536",
"RESOLUTION_640_1024",
"RESOLUTION_640_1344",
"RESOLUTION_640_1408",
"RESOLUTION_640_1472",
"RESOLUTION_640_1536",
"RESOLUTION_704_1152",
"RESOLUTION_704_1216",
"RESOLUTION_704_1280",
"RESOLUTION_704_1344",
"RESOLUTION_704_1408",
"RESOLUTION_704_1472",
"RESOLUTION_720_1280",
"RESOLUTION_736_1312",
"RESOLUTION_768_1024",
"RESOLUTION_768_1088",
"RESOLUTION_768_1152",
"RESOLUTION_768_1216",
"RESOLUTION_768_1232",
"RESOLUTION_768_1280",
"RESOLUTION_768_1344",
"RESOLUTION_832_960",
"RESOLUTION_832_1024",
"RESOLUTION_832_1088",
"RESOLUTION_832_1152",
"RESOLUTION_832_1216",
"RESOLUTION_832_1248",
"RESOLUTION_864_1152",
"RESOLUTION_896_960",
"RESOLUTION_896_1024",
"RESOLUTION_896_1088",
"RESOLUTION_896_1120",
"RESOLUTION_896_1152",
"RESOLUTION_960_832",
"RESOLUTION_960_896",
"RESOLUTION_960_1024",
"RESOLUTION_960_1088",
"RESOLUTION_1024_640",
"RESOLUTION_1024_768",
"RESOLUTION_1024_832",
"RESOLUTION_1024_896",
"RESOLUTION_1024_960",
"RESOLUTION_1024_1024",
"RESOLUTION_1088_768",
"RESOLUTION_1088_832",
"RESOLUTION_1088_896",
"RESOLUTION_1088_960",
"RESOLUTION_1120_896",
"RESOLUTION_1152_704",
"RESOLUTION_1152_768",
"RESOLUTION_1152_832",
"RESOLUTION_1152_864",
"RESOLUTION_1152_896",
"RESOLUTION_1216_704",
"RESOLUTION_1216_768",
"RESOLUTION_1216_832",
"RESOLUTION_1232_768",
"RESOLUTION_1248_832",
"RESOLUTION_1280_704",
"RESOLUTION_1280_720",
"RESOLUTION_1280_768",
"RESOLUTION_1280_800",
"RESOLUTION_1312_736",
"RESOLUTION_1344_640",
"RESOLUTION_1344_704",
"RESOLUTION_1344_768",
"RESOLUTION_1408_576",
"RESOLUTION_1408_640",
"RESOLUTION_1408_704",
"RESOLUTION_1472_576",
"RESOLUTION_1472_640",
"RESOLUTION_1472_704",
"RESOLUTION_1536_512",
"RESOLUTION_1536_576",
"RESOLUTION_1536_640"
]);
var IdeogramRequestSchema = z.object({
image_request: z.object({
aspect_ratio: IdeogramAspectRatioSchema.optional().default("ASPECT_1_1"),
magic_prompt_option: z.enum(["AUTO", "ON", "OFF"]).optional().default("AUTO"),
model: z.enum(["V_1", "V_1_TURBO", "V_2", "V_2_TURBO"]).optional().default("V_2"),
negative_prompt: z.string().optional(),
prompt: z.string(),
resolution: IdeogramResolutionSchema.optional(),
seed: z.number().optional(),
style_type: z.enum(["GENERAL", "REALISTIC", "DESIGN", "RENDER_3D", "ANIME"]).optional().default("GENERAL")
}).passthrough()
}).passthrough();
var IdeogramImageDataSchema = z.object({
is_image_safe: z.boolean(),
prompt: z.string(),
resolution: z.string(),
seed: z.number(),
url: z.string()
}).passthrough();
var IdeogramResponseSchema = z.object({
created: z.string(),
data: z.array(IdeogramImageDataSchema)
}).passthrough();
var DallERequestSchema = z.object({
prompt: z.string(),
model: z.enum(["dall-e-3"]),
size: z.string()
}).passthrough();
var DallEImageDataSchema = z.object({
revised_prompt: z.string().optional(),
url: z.string()
}).passthrough();
var DallEResponseSchema = z.object({
created: z.number(),
data: z.array(DallEImageDataSchema)
}).passthrough();
var RecraftTextLayoutSchema = z.object({
text: z.string(),
bbox: z.array(z.tuple([z.number(), z.number()])).length(4)
}).passthrough();
var RecraftControlsSchema = z.object({
colors: z.array(
z.object({
rgb: z.tuple([z.number(), z.number(), z.number()])
})
).optional(),
background_color: z.string().optional()
}).passthrough();
var RecraftStyleSchema = z.enum([
"realistic_image",
"digital_illustration",
"vector_illustration",
"icon"
]);
var RecraftResponseSchema = z.object({
images: z.array(
z.object({
url: z.string(),
content_type: z.string(),
file_size: z.number()
})
)
}).passthrough();
var SDXLLightningImageSizeSchema = z.object({
width: z.number(),
height: z.number()
}).passthrough();
var SDXLLightningRequestSchema = z.object({
prompt: z.string(),
image_size: SDXLLightningImageSizeSchema,
embeddings: z.array(z.any()).optional(),
format: z.enum(["jpeg", "png"]).optional()
}).passthrough();
var SDXLLightningImageDataSchema = z.object({
url: z.string(),
width: z.number(),
height: z.number(),
content_type: z.string()
}).passthrough();
var SDXLLightningResponseSchema = z.object({
images: z.array(SDXLLightningImageDataSchema),
timings: z.record(z.number()),
seed: z.number(),
has_nsfw_concepts: z.array(z.boolean()),
prompt: z.string()
}).passthrough();
var KolorsImageSizeSchema = z.object({
width: z.number(),
height: z.number()
}).passthrough();
var KolorsRequestSchema = z.object({
prompt: z.string(),
negative_prompt: z.string().optional(),
guidance_scale: z.number().optional(),
image_size: KolorsImageSizeSchema
}).passthrough();
var KolorsImageDataSchema = z.object({
url: z.string(),
width: z.number(),
height: z.number(),
content_type: z.string()
}).passthrough();
var KolorsResponseSchema = z.object({
images: z.array(KolorsImageDataSchema),
timings: z.record(z.number()),
seed: z.number(),
has_nsfw_concepts: z.array(z.boolean()),
prompt: z.string()
}).passthrough();
var AuraflowImageDataSchema = z.object({
url: z.string(),
content_type: z.string(),
file_size: z.number(),
width: z.number(),
height: z.number()
}).passthrough();
var AuraflowResponseSchema = z.object({
images: z.array(AuraflowImageDataSchema),
seed: z.number(),
prompt: z.string()
}).passthrough();
var LumaPhotonAspectRatioSchema = z.enum([
"1:1",
"3:4",
"4:3",
"9:16",
"16:9",
"9:21",
"21:9"
]);
var LumaPhotonRequestSchema = z.object({
prompt: z.string(),
aspect_ratio: LumaPhotonAspectRatioSchema.optional().default("16:9")
}).passthrough();
var LumaPhotonImageDataSchema = z.object({
url: z.string(),
content_type: z.string(),
file_size: z.number()
}).passthrough();
var LumaPhotonResponseSchema = z.object({
images: z.array(LumaPhotonImageDataSchema)
}).passthrough();
var SDXLRequestSchema = z.object({
prompt: z.string(),
negative_prompt: z.string().optional(),
width: z.string().optional(),
height: z.string().optional()
}).passthrough();
var SDXLResponseSchema = z.object({
completed_at: z.string(),
created_at: z.string(),
error: z.string(),
id: z.string(),
model: z.string(),
output: z.string(),
// JSON string of URLs array
started_at: z.string(),
status: z.enum(["succeeded", "failed"])
}).passthrough();
var SD3UltraAspectRatioSchema = z.enum([
"16:9",
"1:1",
"21:9",
"2:3",
"3:2",
"4:5",
"5:4",
"9:16",
"9:21"
]);
var SD3UltraRequestSchema = z.object({
prompt: z.string(),
negative_prompt: z.string().optional(),
aspect_ratio: SD3UltraAspectRatioSchema.optional().default("1:1"),
output_format: z.enum(["jpeg", "png"]).optional(),
seed: z.number().optional()
}).passthrough();
var SD3ImageSizeSchema = z.enum([
"1024x1024",
"1024x2048",
"1536x1024",
"1536x2048",
"2048x1152",
"1152x2048"
]);
var SD3RequestSchema = z.object({
prompt: z.string(),
image_size: SD3ImageSizeSchema.optional().default("1024x1024"),
batch_size: z.number().min(1).max(4).optional().default(1),
num_inference_steps: z.number().min(1).max(100).optional().default(20),
guidance_scale: z.number().min(0).max(100).optional().default(7.5)
}).passthrough();
var SD3ImageDataSchema = z.object({
url: z.string(),
content_type: z.string(),
file_size: z.number()
}).passthrough();
var SD3ResponseSchema = z.object({
images: z.array(SD3ImageDataSchema)
}).passthrough();
var SD35AspectRatioSchema = z.enum([
"16:9",
"1:1",
"21:9",
"2:3",
"3:2",
"4:5",
"5:4",
"9:16",
"9:21"
]);
var SD35ModelSchema = z.enum([
"sd3.5-large",
"sd3.5-large-turbo",
"sd3.5-medium"
]);
var SD35RequestSchema = z.object({
prompt: z.string(),
aspect_ratio: SD35AspectRatioSchema.optional().default("1:1"),
mode: z.enum(["text-to-image", "image-to-image"]).optional().default("text-to-image"),
model: SD35ModelSchema,
negative_prompt: z.string().optional(),
output_format: z.enum(["jpeg", "png"]).optional(),
seed: z.number().optional()
}).passthrough();
var MidjourneyBotTypeSchema = z.enum(["MID_JOURNEY", "NIJI_JOURNEY"]);
var MidjourneySubmitRequestSchema = z.object({
prompt: z.string(),
botType: MidjourneyBotTypeSchema.optional().default("MID_JOURNEY"),
state: z.string().optional()
}).passthrough();
var MidjourneySubmitResponseSchema = z.object({
code: z.number(),
description: z.string(),
result: z.string()
}).passthrough();
var MidjourneyButtonSchema = z.object({
customId: z.string(),
emoji: z.string(),
label: z.string(),
style: z.number(),
type: z.number()
}).passthrough();
var MidjourneyTaskStatusSchema = z.enum([
"IN_PROGRESS",
"SUCCESS",
"FAILED"
]);
var MidjourneyTaskResponseSchema = z.object({
action: z.string(),
botType: z.string(),
buttons: z.array(MidjourneyButtonSchema),
customId: z.string(),
description: z.string(),
failReason: z.string(),
finishTime: z.number(),
id: z.string(),
imageUrl: z.string(),
maskBase64: z.string(),
mode: z.string(),
progress: z.string(),
prompt: z.string(),
promptEn: z.string(),
proxy: z.string(),
startTime: z.number(),
state: z.string(),
status: MidjourneyTaskStatusSchema,
submitTime: z.number()
}).passthrough();
var MidjourneyActionRequestSchema = z.object({
customId: z.string(),
taskId: z.string()
}).passthrough();
var OmnigenRequestSchema = z.object({
prompt: z.string(),
negative_prompt: z.string().optional(),
image_size: ImageSizeSchema.optional(),
num_inference_steps: z.number().optional(),
guidance_scale: z.number().optional(),
output_format: z.string().optional(),
seed: z.number().optional()
}).passthrough();
var OmnigenImageDataSchema = z.object({
url: z.string(),
content_type: z.string(),
file_size: z.number(),
width: z.number(),
height: z.number()
}).passthrough();
var OmnigenResponseSchema = z.object({
images: z.array(OmnigenImageDataSchema),
seed: z.number(),
has_nsfw_concepts: z.array(z.boolean()),
debug_latents: z.any().nullable(),
debug_per_pass_latents: z.any().nullable()
}).passthrough();
var CogViewRequestSchema = z.object({
model: z.string(),
prompt: z.string(),
size: z.string().optional()
}).passthrough();
var CogViewImageDataSchema = z.object({
url: z.string()
}).passthrough();
var CogViewResponseSchema = z.object({
created: z.number(),
data: z.array(CogViewImageDataSchema)
}).passthrough();
var MinimaxRequestSchema = z.object({
model: z.string(),
prompt: z.string(),
aspect_ratio: z.string().optional(),
prompt_optimizer: z.boolean().optional(),
response_format: z.string().optional()
}).passthrough();
var MinimaxResponseSchema = z.object({
base_resp: z.object({
status_code: z.number(),
status_msg: z.string()
}),
data: z.object({
image_urls: z.array(z.string())
}),
id: z.string(),
metadata: z.object({
failed_count: z.string(),
success_count: z.string()
})
}).passthrough();
var GPTImageSizeSchema = z.enum([
"1024x1024",
"1536x1024",
"1024x1536"
]);
var GPTImageBackgroundSchema = z.enum([
"transparent",
"opaque",
"auto"
]);
var GPTImageQualitySchema = z.enum(["auto", "high", "medium", "low"]);
var GPTImageRequestSchema = z.object({
prompt: z.string(),
model: z.string().optional(),
size: GPTImageSizeSchema.optional().default("1024x1024"),
background: GPTImageBackgroundSchema.optional(),
quality: GPTImageQualitySchema.optional(),
n: z.number().min(1).max(10).optional(),
response_format: z.enum(["url", "b64_json"]).optional()
}).passthrough();
var GPTImageDataSchema = z.object({
url: z.string().optional(),
b64_json: z.string().optional()
}).passthrough();
var GPTImageResponseSchema = z.object({
created: z.number(),
data: z.array(GPTImageDataSchema),
usage: z.object({
prompt_tokens: z.number(),
completion_tokens: z.number(),
total_tokens: z.number(),
input_tokens: z.number(),
output_tokens: z.number(),
input_tokens_details: z.object({
text_tokens: z.number(),
cached_tokens_details: z.record(z.any())
}),
prompt_tokens_details: z.object({
cached_tokens_details: z.record(z.any())
}),
completion_tokens_details: z.record(z.any())
}).optional()
}).passthrough();
var BagelRequestSchema = z.object({
prompt: z.string(),
use_thought: z.boolean().optional()
}).passthrough();
var BagelImageDataSchema = z.object({
url: z.string(),
content_type: z.string(),
file_size: z.number(),
width: z.number(),
height: z.number()
}).passthrough();
var BagelResponseSchema = z.object({
images: z.array(BagelImageDataSchema),
seed: z.number(),
has_nsfw_concepts: z.array(z.boolean()),
debug_latents: z.any().nullable(),
debug_per_pass_latents: z.any().nullable()
}).passthrough();
var FluxKontextRequestSchema = z.object({
prompt: z.string(),
input_image: z.string().nullable().optional(),
seed: z.number().nullable().optional(),
aspect_ratio: z.string().nullable().optional(),
output_format: z.enum(["jpeg", "png"]).optional().default("png"),
webhook_url: z.string().url().min(1).max(2083).nullable().optional(),
webhook_secret: z.string().nullable().optional(),
prompt_upsampling: z.boolean().optional().default(false),
safety_tolerance: z.number().min(0).max(6).optional().default(2)
}).passthrough();
var FluxKontextSubmitResponseSchema = z.object({
id: z.string(),
polling_url: z.string()
}).passthrough();
var FluxKontextResultDataSchema = z.object({
seed: z.number(),
prompt: z.string(),
sample: z.string(),
duration: z.number(),
end_time: z.number(),
start_time: z.number()
}).passthrough();
var FluxKontextResultResponseSchema = z.object({
id: z.string(),
result: FluxKontextResultDataSchema.optional(),
status: z.string()
}).passthrough();
var SoulQualitySchema = z.enum(["720p", "1080p"]);
var SoulAspectRatioSchema = z.enum([
"9:16",
"3:4",
"2:3",
"1:1",
"4:3",
"16:9",
"3:2"
]);
var SoulRequestSchema = z.object({
quality: SoulQualitySchema,
aspect_ratio: SoulAspectRatioSchema,
prompt: z.string(),
enhance_prompt: z.boolean(),
seed: z.number(),
style_id: z.string(),
negative_prompt: z.string()
}).passthrough();
var SoulSubmitResponseSchema = z.object({
id: z.string()
}).passthrough();
var SoulJobResultSchema = z.object({
min: z.object({
type: z.string(),
url: z.string()
}),
raw: z.object({
type: z.string(),
url: z.string()
})
}).passthrough();
var SoulJobSchema = z.object({
board_ids: z.array(z.string()),
created_at: z.number(),
id: z.string(),
meta: z.