UNPKG

@302ai/ai-sdk

Version:

The **[302AI provider](https://sdk.vercel.ai/providers/ai-sdk-providers/)** for the [AI SDK](https://sdk.vercel.ai/docs) contains image model support for the [302AI](https://302.ai) platform.

1,661 lines (1,643 loc) 223 kB
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, { get: (a, b) => (typeof require !== "undefined" ? require : a)[b] }) : x)(function(x) { if (typeof require !== "undefined") return require.apply(this, arguments); throw Error('Dynamic require of "' + x + '" is not supported'); }); // src/ai302-provider.ts import { loadApiKey, withoutTrailingSlash, withUserAgentSuffix } from "@ai-sdk/provider-utils"; // src/models/auraflow.ts import { combineHeaders, postToApi, resolve } from "@ai-sdk/provider-utils"; // src/utils/api-handlers.ts import { APICallError } from "@ai-sdk/provider"; import { extractResponseHeaders } from "@ai-sdk/provider-utils"; var createJsonResponseHandler = () => async ({ response, url, requestBodyValues }) => { const responseHeaders = extractResponseHeaders(response); const responseBody = await response.json(); return { responseHeaders, value: responseBody }; }; var statusCodeErrorResponseHandler = async ({ response, url, requestBodyValues }) => { const responseHeaders = extractResponseHeaders(response); const responseBody = await response.text(); return { responseHeaders, value: new APICallError({ message: response.statusText, url, requestBodyValues, statusCode: response.status, responseHeaders, responseBody }) }; }; // src/models/base-model.ts var BaseModelHandler = class { constructor(modelId, settings, config) { this.modelId = modelId; this.settings = settings; this.config = config; } async handleRequest(params) { const { headers, ...rest } = params; const requestHeaders = headers ? Object.fromEntries( Object.entries(headers).filter(([_, v]) => v !== void 0).map(([k, v]) => [k, v]) ) : void 0; return this.processRequest({ ...rest, headers: requestHeaders }); } parseSize(size) { if (!size) return void 0; const [width, height] = size.split("x").map(Number); return { width, height }; } validateAspectRatio(aspectRatio, warnings, maxRatio, minRatio) { if (!aspectRatio) return void 0; const [width, height] = aspectRatio.split(":").map(Number); if (!width || !height) return void 0; if (maxRatio === void 0 || minRatio === void 0) { return aspectRatio; } const ratio = width / height; if (ratio >= minRatio && ratio <= maxRatio) { return aspectRatio; } let adjustedWidth; let adjustedHeight; if (ratio > maxRatio) { adjustedHeight = 9; adjustedWidth = Math.round(maxRatio * adjustedHeight); } else { adjustedWidth = 9; adjustedHeight = Math.round(adjustedWidth / minRatio); } warnings.push({ type: "unsupported", feature: "aspectRatio", details: `Aspect ratio ${aspectRatio} is outside the allowed range (${adjustedWidth}:${adjustedHeight} to ${adjustedHeight}:${adjustedWidth}). Adjusted to ${adjustedWidth}:${adjustedHeight}` }); return `${adjustedWidth}:${adjustedHeight}`; } aspectRatioToSize(aspectRatio, baseSize = 1024, warnings) { if (!aspectRatio) return void 0; const validatedAspectRatio = this.validateAspectRatio( aspectRatio, warnings ); if (!validatedAspectRatio) return void 0; const [width, height] = validatedAspectRatio.split(":").map(Number); if (!width || !height) return void 0; const ratio = width / height; if (ratio > 1) { return { width: baseSize, height: Math.round(baseSize / ratio) }; } else { return { width: Math.round(baseSize * ratio), height: baseSize }; } } async downloadImage(url) { const maxRetries = 5; const timeout = 12e4; for (let attempt = 0; attempt < maxRetries; attempt++) { const controller = new AbortController(); try { const timeoutId = setTimeout(() => controller.abort(), timeout); const imageResponse = await fetch(url, { signal: controller.signal, headers: { Accept: "image/*", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" } }); clearTimeout(timeoutId); if (!imageResponse.ok) { throw new Error(`HTTP error! status: ${imageResponse.status}`); } const arrayBuffer = await imageResponse.arrayBuffer(); const base64 = Buffer.from(arrayBuffer).toString("base64"); return base64; } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error); if (attempt === maxRetries - 1) { throw new Error( `Failed to download image after ${maxRetries} attempts: ${errorMessage}` ); } const delay = Math.min( 2 ** attempt * 2e3 + Math.random() * 1e3, 3e4 ); await new Promise((resolve44) => setTimeout(resolve44, delay)); controller.abort(); } } throw new Error("Failed to download image after retries"); } async downloadImages(urls) { const imagePromises = urls.map(async (url) => { try { return await this.downloadImage(url); } catch (error) { return null; } }); const base64Images = await Promise.all(imagePromises); const validImages = base64Images.filter(Boolean); if (validImages.length === 0) { throw new Error("All image downloads failed"); } return validImages; } validateSizeOption(parsedSize, supportedSizes, warnings) { const validatedSize = this.validateDimensionsMultipleOf32( parsedSize, warnings ); const sizeStr = `${validatedSize.width}x${validatedSize.height}`; if (!supportedSizes.includes(sizeStr)) { const closestSize = this.findClosestSize(validatedSize, supportedSizes); warnings.push({ type: "unsupported", feature: "size", details: `Size ${sizeStr} is not supported. Using closest supported size: ${closestSize}` }); const [width, height] = closestSize.split("x").map(Number); return { width, height }; } return validatedSize; } validateDimensionsMultipleOf32(size, warnings, minSize = 32, maxSize = 4096) { const adjustDimension = (value) => { if (value < minSize) { return minSize; } if (value > maxSize) { return maxSize; } if (value % 32 !== 0) { const roundedValue = Math.round(value / 32) * 32; return Math.min(maxSize, Math.max(minSize, roundedValue)); } return value; }; const adjustedWidth = adjustDimension(size.width); const adjustedHeight = adjustDimension(size.height); if (adjustedWidth !== size.width || adjustedHeight !== size.height) { warnings.push({ type: "unsupported", feature: "size", details: `Image dimensions must be multiples of 32 and within the range ${minSize}-${maxSize}. Adjusted from ${size.width}x${size.height} to ${adjustedWidth}x${adjustedHeight}` }); return { width: adjustedWidth, height: adjustedHeight }; } return size; } findClosestSize(size, supportedSizes) { const targetRatio = size.width / size.height; const sizesByRatio = supportedSizes.slice().sort((a, b) => { const [w1, h1] = a.split("x").map(Number); const [w2, h2] = b.split("x").map(Number); const ratio1 = w1 / h1; const ratio2 = w2 / h2; const diff1 = Math.abs(ratio1 - targetRatio); const diff2 = Math.abs(ratio2 - targetRatio); return diff1 - diff2; }); const similarRatioSizes = sizesByRatio.slice(0, 2); return similarRatioSizes.reduce((closest, current) => { const [w1, h1] = current.split("x").map(Number); const [w2, h2] = closest.split("x").map(Number); const diff1 = Math.abs(Math.max(w1, h1) - 1024); const diff2 = Math.abs(Math.max(w2, h2) - 1024); return diff1 < diff2 ? current : closest; }); } findClosestAspectRatio(targetRatio, supportedRatios, warnings) { if (!targetRatio) return supportedRatios[0]; const [targetWidth, targetHeight] = targetRatio.split(":").map(Number); if (!targetWidth || !targetHeight) return supportedRatios[0]; const targetValue = targetWidth / targetHeight; let closestRatio = supportedRatios[0]; let minDiff = Infinity; for (const ratio of supportedRatios) { const [w, h] = ratio.split(":").map(Number); if (!w || !h) continue; const currentValue = w / h; const diff = Math.abs(currentValue - targetValue); if (diff < minDiff) { minDiff = diff; closestRatio = ratio; } } if (closestRatio !== targetRatio) { warnings.push({ type: "unsupported", feature: "aspectRatio", details: `Aspect ratio ${targetRatio} is not supported. Using closest supported ratio: ${closestRatio}` }); } return closestRatio; } sizeToAspectRatio(size, supportedRatios, warnings) { if (!size) return void 0; const parsedSize = this.parseSize(size); if (!parsedSize) { warnings.push({ type: "unsupported", feature: "size", details: `Invalid size format: ${size}. Expected format: WIDTHxHEIGHT` }); return void 0; } const ratio = parsedSize.width / parsedSize.height; let closestRatio = supportedRatios[0]; let minDiff = Infinity; for (const aspectRatio of supportedRatios) { const [w, h] = aspectRatio.split(":").map(Number); if (!w || !h) continue; const currentRatio = w / h; const diff = Math.abs(currentRatio - ratio); if (diff < minDiff) { minDiff = diff; closestRatio = aspectRatio; } } const [closestW, closestH] = closestRatio.split(":").map(Number); const closestRatioValue = closestW / closestH; if (Math.abs(closestRatioValue - ratio) > 0.05) { warnings.push({ type: "compatibility", feature: "size", details: `Size ${size} (ratio ${ratio.toFixed(2)}) converted to closest supported aspect ratio: ${closestRatio}` }); } return closestRatio; } }; // src/models/auraflow.ts var AuraflowHandler = class extends BaseModelHandler { async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: "AuraFlow does not support batch generation" }); } if (size != null) { warnings.push({ type: "unsupported", feature: "size" }); } if (aspectRatio != null) { warnings.push({ type: "unsupported", feature: "aspectRatio" }); } if (seed != null) { warnings.push({ type: "unsupported", feature: "seed" }); } if (providerOptions.ai302 != null) { warnings.push({ type: "unsupported", feature: "providerOptions" }); } if (!prompt) { throw new Error("Prompt is required for AuraFlow"); } const formData = new FormData(); formData.append("prompt", prompt); const resolvedHeaders = await resolve(this.config.headers()); const { value: response, responseHeaders } = await postToApi({ url: this.config.url({ modelId: this.modelId, path: "/302/submit/aura-flow" }), headers: combineHeaders(resolvedHeaders, headers), body: { content: formData, values: { prompt } }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const urls = response.images.map((img) => img.url).filter(Boolean); const images = await this.downloadImages(urls); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/bagel.ts import { combineHeaders as combineHeaders2, postJsonToApi, resolve as resolve2 } from "@ai-sdk/provider-utils"; var BagelHandler = class extends BaseModelHandler { async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { var _a, _b, _c; const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: "Bagel does not support batch generation" }); } if (size != null) { warnings.push({ type: "unsupported", feature: "size", details: "Bagel does not support custom size" }); } if (aspectRatio != null) { warnings.push({ type: "unsupported", feature: "aspectRatio", details: "Bagel does not support custom aspect ratio" }); } if (seed != null) { warnings.push({ type: "unsupported", feature: "seed", details: "Bagel does not support custom seed" }); } const resolvedHeaders = await resolve2(this.config.headers()); const { value: response, responseHeaders } = await postJsonToApi({ url: this.config.url({ modelId: this.modelId, path: "/302/submit/bagel" }), headers: combineHeaders2(resolvedHeaders, headers), body: { prompt, use_thought: (_b = (_a = providerOptions.ai302) == null ? void 0 : _a.use_thought) != null ? _b : false, ...(_c = providerOptions.ai302) != null ? _c : {} }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const urls = response.images.map((img) => img.url).filter(Boolean); const images = await this.downloadImages(urls); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/cogview.ts import { combineHeaders as combineHeaders3, postJsonToApi as postJsonToApi2, resolve as resolve3 } from "@ai-sdk/provider-utils"; var SUPPORTED_SIZE_OPTIONS = [ "1024x1024", "768x1344", "864x1152", "1344x768", "1152x864", "1440x720", "720x1440" ]; var CogViewHandler = class extends BaseModelHandler { async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { var _a; const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: "CogView-4 does not support batch generation" }); } if (seed != null) { warnings.push({ type: "unsupported", feature: "seed", details: "CogView-4 does not support seed parameter" }); } let sizeString; if (size) { const parsedSize = this.parseSize(size); if (parsedSize) { const validatedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS, warnings); sizeString = `${validatedSize.width}x${validatedSize.height}`; } } else if (aspectRatio) { const parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings); if (parsedSize) { const validatedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS, warnings); sizeString = `${validatedSize.width}x${validatedSize.height}`; } } const modelVariant = this.modelId === "cogview-4-250304" ? "cogview-4-250304" : "cogview-4"; const resolvedHeaders = await resolve3(this.config.headers()); const { value: response, responseHeaders } = await postJsonToApi2({ url: this.config.url({ modelId: this.modelId, path: "/bigmodel/api/paas/v4/images/generations" }), headers: combineHeaders3(resolvedHeaders, headers), body: { model: modelVariant, prompt, size: sizeString, ...(_a = providerOptions.ai302) != null ? _a : {} }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const urls = response.data.map((img) => img.url).filter(Boolean); const images = await this.downloadImages(urls); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/dalle.ts import { combineHeaders as combineHeaders4, postJsonToApi as postJsonToApi3, resolve as resolve4 } from "@ai-sdk/provider-utils"; var SUPPORTED_SIZE_OPTIONS2 = ["256x256", "512x512", "1024x1024"]; var DallEHandler = class extends BaseModelHandler { async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { var _a; const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: "DALL-E 3 does not support batch generation" }); } if (size != null && aspectRatio != null) { warnings.push({ type: "unsupported", feature: "aspectRatio", details: "When size is provided, aspectRatio will be ignored" }); } else if (size == null && aspectRatio != null) { warnings.push({ type: "compatibility", feature: "aspectRatio", details: "Using size calculated from aspect ratio with base size 1024" }); } if (seed != null) { warnings.push({ type: "unsupported", feature: "seed" }); } let parsedSize = this.parseSize(size) || this.aspectRatioToSize(aspectRatio, 1024, warnings) || { width: 1024, height: 1024 }; parsedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZE_OPTIONS2, warnings); const resolvedHeaders = await resolve4(this.config.headers()); const { value: response, responseHeaders } = await postJsonToApi3({ url: this.config.url({ modelId: this.modelId, path: "/v1/images/generations" }), headers: combineHeaders4(resolvedHeaders, headers), body: { prompt, model: "dall-e-3", size: `${parsedSize.width}x${parsedSize.height}`, ...(_a = providerOptions.ai302) != null ? _a : {} }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const urls = response.data.map((img) => img.url).filter(Boolean); const images = await this.downloadImages(urls); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/flux-pro-dev.ts import { combineHeaders as combineHeaders5, postJsonToApi as postJsonToApi4, resolve as resolve5 } from "@ai-sdk/provider-utils"; // src/ai302-image-settings.ts var modelToBackendConfig = { "flux-v1.1-ultra": { supportsSize: false }, "flux-pro-v1.1": { supportsSize: true }, "flux-pro": { supportsSize: true }, "flux-dev": { supportsSize: true }, "flux-schnell": { supportsSize: true }, "flux-1-krea": { supportsSize: true }, "flux-kontext-max": { supportsSize: true }, "flux-kontext-pro": { supportsSize: true }, "flux-2-pro": { supportsSize: true }, "flux-2-flex": { supportsSize: true }, "flux-2-max": { supportsSize: true }, "ideogram/V_1": { supportsSize: true }, "ideogram/V_1_TURBO": { supportsSize: true }, "ideogram/V_2": { supportsSize: true }, "ideogram/V_2_TURBO": { supportsSize: true }, "ideogram/V_2A": { supportsSize: true }, "ideogram/V_2A_TURBO": { supportsSize: true }, "dall-e-3": { supportsSize: true }, recraftv3: { supportsSize: true }, recraftv2: { supportsSize: true }, "sdxl-lightning": { supportsSize: true }, "sdxl-lightning-v2": { supportsSize: true }, "sdxl-lightning-v3": { supportsSize: true }, kolors: { supportsSize: true }, "aura-flow": { supportsSize: true }, "luma-photon": { supportsSize: true }, sdxl: { supportsSize: true }, "sd3-ultra": { supportsSize: false }, sd3v2: { supportsSize: true }, "sd3.5-large": { supportsSize: true }, "sd3.5-large-turbo": { supportsSize: true }, "sd3.5-medium": { supportsSize: true }, "midjourney/6.0": { supportsSize: false }, "midjourney/6.1": { supportsSize: false }, "midjourney/7.0": { supportsSize: false }, "nijijourney/6.0": { supportsSize: false }, "google-imagen-3": { supportsSize: true }, "google-imagen-3-fast": { supportsSize: true }, "google-imagen-4-preview": { supportsSize: false }, "doubao-general-v2.1-l": { supportsSize: true }, "doubao-general-v2.0-l": { supportsSize: true }, "doubao-general-v2.0": { supportsSize: true }, "doubao-general-v3.0": { supportsSize: true }, "doubao-seedream-3-0-t2i-250415": { supportsSize: true }, "doubao-seedream-4-0-250828": { supportsSize: true }, "doubao-seedream-4-5-251128": { supportsSize: true }, "lumina-image-v2": { supportsSize: true }, "omnigen-v1": { supportsSize: true }, "playground-v25": { supportsSize: true }, "cogview-4": { supportsSize: true }, "cogview-4-250304": { supportsSize: true }, "minimaxi-image-01": { supportsSize: false }, "irag-1.0": { supportsSize: false }, "hidream-i1-full": { supportsSize: true }, "hidream-i1-dev": { supportsSize: true }, "hidream-i1-fast": { supportsSize: true }, "gpt-image-1": { supportsSize: true }, "gpt-image-1.5": { supportsSize: true }, bagel: { supportsSize: false }, soul: { supportsSize: true }, "kling-v1": { supportsSize: true }, "kling-v1-5": { supportsSize: true }, "kling-v2": { supportsSize: true }, "kling-v2-1": { supportsSize: true }, "qwen-image": { supportsSize: false }, "gemini-2.5-flash-image-preview": { supportsSize: false }, "gemini-3-pro-image-preview": { supportsSize: false }, "z-image-turbo": { supportsSize: true }, "vidu-viduq1": { supportsSize: false }, "vidu-viduq2": { supportsSize: false }, "kling-o1": { supportsSize: false }, "wan2.6-image": { supportsSize: true } }; // src/models/flux-pro-dev.ts var FluxProDevHandler = class extends BaseModelHandler { async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { var _a; const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: "Flux Pro does not support batch generation" }); } const backendConfig = modelToBackendConfig[this.modelId]; if (backendConfig == null ? void 0 : backendConfig.supportsSize) { if (size != null && aspectRatio != null) { warnings.push({ type: "unsupported", feature: "aspectRatio", details: "When size is provided, aspectRatio will be ignored" }); } else if (size == null && aspectRatio != null) { warnings.push({ type: "compatibility", feature: "aspectRatio", details: "Using size calculated from aspect ratio with base size 1024" }); } } let parsedSize = this.parseSize(size) || this.aspectRatioToSize(aspectRatio, 1024, warnings) || { width: 1024, height: 1024 }; if (backendConfig == null ? void 0 : backendConfig.supportsSize) { parsedSize = this.validateDimensionsMultipleOf32( parsedSize, warnings, 256, 1440 ); } const resolvedHeaders = await resolve5(this.config.headers()); const { value: response, responseHeaders } = await postJsonToApi4({ url: this.config.url({ modelId: this.modelId, path: `/302/submit/${this.modelId}` }), headers: combineHeaders5(resolvedHeaders, headers), body: { prompt, image_size: (backendConfig == null ? void 0 : backendConfig.supportsSize) ? { width: parsedSize.width, height: parsedSize.height } : void 0, size: (backendConfig == null ? void 0 : backendConfig.supportsSize) ? { width: parsedSize.width, height: parsedSize.height } : void 0, aspect_ratio: !(backendConfig == null ? void 0 : backendConfig.supportsSize) ? aspectRatio : void 0, seed, ...(_a = providerOptions.ai302) != null ? _a : {} }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const urls = response.images.map((img) => img.url).filter(Boolean); const images = await this.downloadImages(urls); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/flux-1-krea.ts import { combineHeaders as combineHeaders6, postJsonToApi as postJsonToApi5, resolve as resolve6 } from "@ai-sdk/provider-utils"; var FluxKreaHandler = class extends BaseModelHandler { async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { var _a, _b, _c; const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: "Flux-1-Krea does not support batch generation" }); } const backendConfig = modelToBackendConfig[this.modelId]; if (backendConfig == null ? void 0 : backendConfig.supportsSize) { if (size != null && aspectRatio != null) { warnings.push({ type: "unsupported", feature: "aspectRatio", details: "When size is provided, aspectRatio will be ignored" }); } else if (size == null && aspectRatio != null) { warnings.push({ type: "compatibility", feature: "aspectRatio", details: "Using size calculated from aspect ratio with base size 1024" }); } } let parsedSize = this.parseSize(size) || this.aspectRatioToSize(aspectRatio, 1024, warnings) || { width: 1024, height: 1024 }; if (backendConfig == null ? void 0 : backendConfig.supportsSize) { parsedSize = this.validateDimensionsMultipleOf32( parsedSize, warnings, 256, 1440 ); } const resolvedHeaders = await resolve6(this.config.headers()); const { value: response, responseHeaders } = await postJsonToApi5({ url: this.config.url({ modelId: this.modelId, path: `/302/submit/${this.modelId}` }), headers: combineHeaders6(resolvedHeaders, headers), body: { prompt, image_size: (backendConfig == null ? void 0 : backendConfig.supportsSize) ? { width: parsedSize.width, height: parsedSize.height } : void 0, num_inference_steps: (_a = providerOptions.ai302) == null ? void 0 : _a.num_inference_steps, guidance_scale: (_b = providerOptions.ai302) == null ? void 0 : _b.guidance_scale, seed, ...(_c = providerOptions.ai302) != null ? _c : {} }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const urls = response.images.map((img) => img.url).filter(Boolean); const images = await this.downloadImages(urls); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/flux-kontext.ts import { combineHeaders as combineHeaders7, postJsonToApi as postJsonToApi6, resolve as resolve7 } from "@ai-sdk/provider-utils"; var POLL_INTERVAL = 2e3; var MAX_POLL_TIME = 3e5; var FluxKontextHandler = class extends BaseModelHandler { getModelName() { switch (this.modelId) { case "flux-kontext-max": return "flux-kontext-max"; case "flux-kontext-pro": return "flux-kontext-pro"; default: return "flux-kontext-pro"; } } async pollTask(taskId, resolvedHeaders, abortSignal) { const startTime = Date.now(); const fetchFn = this.config.fetch || fetch; while (true) { if (abortSignal == null ? void 0 : abortSignal.aborted) { throw new Error("Task polling aborted"); } if (Date.now() - startTime > MAX_POLL_TIME) { throw new Error("Task polling timed out"); } const response = await fetchFn( `${this.config.url({ modelId: this.modelId, path: `/flux/v1/get_result?id=${taskId}` })}`, { method: "GET", headers: resolvedHeaders, signal: abortSignal } ); if (!response.ok) { throw new Error(`HTTP error! status: ${response.status}`); } const data = await response.json(); if (data.status === "Ready" && data.result) { return data; } if (data.status === "Failed" || data.status === "Error") { throw new Error(`Task failed with status: ${data.status}`); } await new Promise((resolve44) => setTimeout(resolve44, POLL_INTERVAL)); } } async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { var _a; const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: "Flux Kontext generates one image per request" }); } let finalAspectRatio; if (size) { const supportedRatios = [ "21:9", "16:9", "3:2", "4:3", "1:1", "3:4", "2:3", "9:16", "9:21" ]; const sizeToAspectRatio = this.sizeToAspectRatio( size, supportedRatios, warnings ); if (sizeToAspectRatio) { finalAspectRatio = sizeToAspectRatio; } } else if (aspectRatio) { const supportedRatios = [ "21:9", "16:9", "3:2", "4:3", "1:1", "3:4", "2:3", "9:16", "9:21" ]; if (supportedRatios.includes(aspectRatio)) { finalAspectRatio = aspectRatio; } else { warnings.push({ type: "unsupported", feature: "aspectRatio", details: `Aspect ratio ${aspectRatio} not supported. Supported ratios: ${supportedRatios.join(", ")}` }); } } if (size != null && aspectRatio != null) { warnings.push({ type: "unsupported", feature: "aspectRatio", details: "Both size and aspectRatio provided. Size will be converted to aspect ratio and aspectRatio parameter will be ignored." }); } const ai302Options = (providerOptions == null ? void 0 : providerOptions.ai302) || {}; const inputImage = ai302Options.input_image; const promptUpsampling = ai302Options.prompt_upsampling; const safetyTolerance = ai302Options.safety_tolerance; const outputFormat = ai302Options.output_format; const webhookUrl = ai302Options.webhook_url; const webhookSecret = ai302Options.webhook_secret; const resolvedHeaders = await resolve7(this.config.headers()); const { value: submitResponse, responseHeaders } = await postJsonToApi6({ url: this.config.url({ modelId: this.modelId, path: `/flux/v1/${this.getModelName()}` }), headers: combineHeaders7(resolvedHeaders, headers), body: { prompt, ...inputImage !== void 0 && { input_image: inputImage }, ...seed !== void 0 && { seed }, ...finalAspectRatio !== void 0 && { aspect_ratio: finalAspectRatio }, ...outputFormat !== void 0 && { output_format: outputFormat }, ...webhookUrl !== void 0 && { webhook_url: webhookUrl }, ...webhookSecret !== void 0 && { webhook_secret: webhookSecret }, ...promptUpsampling !== void 0 && { prompt_upsampling: promptUpsampling }, ...safetyTolerance !== void 0 && { safety_tolerance: safetyTolerance }, ...ai302Options }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const taskResult = await this.pollTask(submitResponse.id, resolvedHeaders, abortSignal); if (!((_a = taskResult.result) == null ? void 0 : _a.sample)) { throw new Error("No image generated"); } const images = await this.downloadImages([taskResult.result.sample]); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/flux-2-pro.ts import { combineHeaders as combineHeaders8, postJsonToApi as postJsonToApi7, resolve as resolve8 } from "@ai-sdk/provider-utils"; var POLL_INTERVAL2 = 2e3; var MAX_POLL_TIME2 = 3e5; var Flux2ProHandler = class extends BaseModelHandler { getEndpointPath() { switch (this.modelId) { case "flux-2-flex": return "/flux/v1/flux-2-flex"; case "flux-2-max": return "/flux/v1/flux-2-max"; case "flux-2-pro": default: return "/flux/v1/flux-2-pro"; } } getModelDisplayName() { switch (this.modelId) { case "flux-2-flex": return "Flux-2-Flex"; case "flux-2-max": return "Flux-2-Max"; case "flux-2-pro": default: return "Flux-2-Pro"; } } async pollTask(taskId, resolvedHeaders, abortSignal) { const startTime = Date.now(); const fetchFn = this.config.fetch || fetch; while (true) { if (abortSignal == null ? void 0 : abortSignal.aborted) { throw new Error("Task polling aborted"); } if (Date.now() - startTime > MAX_POLL_TIME2) { throw new Error("Task polling timed out"); } const response = await fetchFn( `${this.config.url({ modelId: this.modelId, path: `/flux/v1/get_result?id=${taskId}` })}`, { method: "GET", headers: resolvedHeaders, signal: abortSignal } ); if (!response.ok) { throw new Error(`HTTP error! status: ${response.status}`); } const data = await response.json(); if (data.status === "Ready" && data.result) { return data; } if (data.status === "Failed" || data.status === "Error") { throw new Error(`Task failed with status: ${data.status}`); } await new Promise((resolve44) => setTimeout(resolve44, POLL_INTERVAL2)); } } async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { var _a; const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: `${this.getModelDisplayName()} generates one image per request` }); } let width; let height; if (size) { const parsedSize = this.parseSize(size); if (parsedSize) { const validatedSize = this.validateDimensionsMultipleOf32( parsedSize, warnings, 64, 4096 ); width = validatedSize.width; height = validatedSize.height; } } else if (aspectRatio) { const calculatedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings); if (calculatedSize) { width = calculatedSize.width; height = calculatedSize.height; } } if (size != null && aspectRatio != null) { warnings.push({ type: "unsupported", feature: "aspectRatio", details: "Both size and aspectRatio provided. Size will be used and aspectRatio will be ignored." }); } const ai302Options = (providerOptions == null ? void 0 : providerOptions.ai302) || {}; const inputImage = ai302Options.input_image; const inputImage2 = ai302Options.input_image_2; const inputImage3 = ai302Options.input_image_3; const inputImage4 = ai302Options.input_image_4; const inputImage5 = ai302Options.input_image_5; const inputImage6 = ai302Options.input_image_6; const inputImage7 = ai302Options.input_image_7; const inputImage8 = ai302Options.input_image_8; const safetyTolerance = ai302Options.safety_tolerance; const outputFormat = ai302Options.output_format; const webhookUrl = ai302Options.webhook_url; const webhookSecret = ai302Options.webhook_secret; const resolvedHeaders = await resolve8(this.config.headers()); const { value: submitResponse, responseHeaders } = await postJsonToApi7({ url: this.config.url({ modelId: this.modelId, path: this.getEndpointPath() }), headers: combineHeaders8(resolvedHeaders, headers), body: { prompt, ...inputImage !== void 0 && { input_image: inputImage }, ...inputImage2 !== void 0 && { input_image_2: inputImage2 }, ...inputImage3 !== void 0 && { input_image_3: inputImage3 }, ...inputImage4 !== void 0 && { input_image_4: inputImage4 }, ...inputImage5 !== void 0 && { input_image_5: inputImage5 }, ...inputImage6 !== void 0 && { input_image_6: inputImage6 }, ...inputImage7 !== void 0 && { input_image_7: inputImage7 }, ...inputImage8 !== void 0 && { input_image_8: inputImage8 }, ...seed !== void 0 && { seed }, ...width !== void 0 && { width }, ...height !== void 0 && { height }, ...outputFormat !== void 0 && { output_format: outputFormat }, ...webhookUrl !== void 0 && { webhook_url: webhookUrl }, ...webhookSecret !== void 0 && { webhook_secret: webhookSecret }, ...safetyTolerance !== void 0 && { safety_tolerance: safetyTolerance } }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const taskResult = await this.pollTask(submitResponse.id, resolvedHeaders, abortSignal); if (!((_a = taskResult.result) == null ? void 0 : _a.sample)) { throw new Error("No image generated"); } const images = await this.downloadImages([taskResult.result.sample]); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/gpt-image.ts import { combineHeaders as combineHeaders9, postJsonToApi as postJsonToApi8, resolve as resolve9 } from "@ai-sdk/provider-utils"; var SUPPORTED_SIZES = [ "1024x1024", "1536x1024", "1024x1536" ]; var GPTImageHandler = class extends BaseModelHandler { async processRequest({ prompt, n, size, aspectRatio, providerOptions, headers, abortSignal }) { var _a, _b; const warnings = []; let parsedSize = this.parseSize(size); if (!parsedSize && aspectRatio) { parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings); } let sizeStr = "1024x1024"; if (parsedSize) { parsedSize = this.validateSizeOption(parsedSize, SUPPORTED_SIZES, warnings); sizeStr = `${parsedSize.width}x${parsedSize.height}`; } const requestBody = { prompt, model: this.modelId, size: sizeStr, n: n || 1, ...(_a = providerOptions.ai302) != null ? _a : {} }; const responseFormat = ((_b = providerOptions.ai302) == null ? void 0 : _b.response_format) || "url"; if (requestBody.response_format) { delete requestBody.response_format; } const baseUrl = this.config.url({ modelId: this.modelId, path: "/v1/images/generations" }); const url = `${baseUrl}?response_format=${responseFormat}`; const resolvedHeaders = await resolve9(this.config.headers()); const { value: response, responseHeaders } = await postJsonToApi8({ url, headers: combineHeaders9(resolvedHeaders, headers), body: requestBody, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const urls = response.data.map((img) => img.url || "").filter(Boolean); const images = await this.downloadImages(urls); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/hidream.ts import { combineHeaders as combineHeaders10, postJsonToApi as postJsonToApi9, resolve as resolve10 } from "@ai-sdk/provider-utils"; var HidreamHandler = class extends BaseModelHandler { async processRequest({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal }) { var _a; const warnings = []; if (n != null && n > 1) { warnings.push({ type: "unsupported", feature: "n", details: "Hidream does not support batch generation" }); } let parsedSize = this.parseSize(size); if (!parsedSize && aspectRatio) { parsedSize = this.aspectRatioToSize(aspectRatio, 1024, warnings); } if (parsedSize) { parsedSize = this.validateDimensionsMultipleOf32(parsedSize, warnings); } const resolvedHeaders = await resolve10(this.config.headers()); const { value: response, responseHeaders } = await postJsonToApi9({ url: this.config.url({ modelId: this.modelId, path: `/302/submit/${this.modelId}` }), headers: combineHeaders10(resolvedHeaders, headers), body: { prompt, image_size: parsedSize, seed, ...(_a = providerOptions.ai302) != null ? _a : {} }, failedResponseHandler: statusCodeErrorResponseHandler, successfulResponseHandler: createJsonResponseHandler(), abortSignal, fetch: this.config.fetch }); const urls = response.images.map((img) => img.url).filter(Boolean); const images = await this.downloadImages(urls); return { images, warnings, response: { timestamp: /* @__PURE__ */ new Date(), modelId: this.modelId, headers: responseHeaders } }; } }; // src/models/ideogram.ts import { combineHeaders as combineHeaders11, postJsonToApi as postJsonToApi10, resolve as resolve11 } from "@ai-sdk/provider-utils"; // src/ai302-types.ts import { z } from "zod/v4"; var ImageResponseSchema = z.looseObject({ content_type: z.string().optional(), height: z.number().optional(), url: z.string().optional(), width: z.number().optional() }); var ImageSizeSchema = z.looseObject({ height: z.number(), width: z.number() }); var IdeogramAspectRatioSchema = z.enum([ "ASPECT_1_1", "ASPECT_10_16", "ASPECT_16_10", "ASPECT_9_16", "ASPECT_16_9", "ASPECT_3_2", "ASPECT_2_3", "ASPECT_4_3", "ASPECT_3_4", "ASPECT_1_3", "ASPECT_3_1" ]); var FluxV11UltraRequestSchema = z.looseObject({ aspect_ratio: z.string(), prompt: z.string(), raw: z.boolean() }); var FluxV11UltraResponseSchema = z.looseObject({ has_nsfw_concepts: z.array(z.boolean()), images: z.array(ImageResponseSchema), prompt: z.string(), seed: z.number(), timings: z.record(z.string(), z.any()) }); var FluxProV11RequestSchema = z.looseObject({ guidance_scale: z.number().optional(), image_size: ImageSizeSchema.optional(), num_inference_steps: z.number().optional(), prompt: z.string() }); var FluxProV11ResponseSchema = z.looseObject({ has_nsfw_concepts: z.array(z.boolean()), images: z.array(ImageResponseSchema), prompt: z.string(), seed: z.number(), timings: z.record(z.string(), z.any()) }); var FluxProDevRequestSchema = z.looseObject({ guidance_scale: z.number().optional().default(3.5), image_size: ImageSizeSchema, num_inference_steps: z.number().optional().default(28), prompt: z.string() }); var FluxProDevResponseSchema = z.looseObject({ images: z.array( z.object({ url: z.string(), width: z.number(), height: z.number(), content_type: z.string() }) ), timings: z.record(z.string(), z.number()), seed: z.number(), has_nsfw_concepts: z.array(z.boolean()), prompt: z.string() }); var FluxSchnellRequestSchema = z.looseObject({ image_size: ImageSizeSchema.optional(), num_inference_steps: z.number().optional(), prompt: z.string() }); var FluxSchnellResponseSchema = z.looseObject({ images: z.array( z.object({ url: z.string(), width: z.number(), height: z.number(), content_type: z.string() }) ), timings: z.record(z.string(), z.number()), seed: z.number(), has_nsfw_concepts: z.array(z.boolean()), prompt: z.string() }); var IdeogramResolutionSchema = z.enum([ "RESOLUTION_512_1536", "RESOLUTION_576_1408", "RESOLUTION_576_1472", "RESOLUTION_576_1536", "RESOLUTION_640_1024", "RESOLUTION_640_1344", "RESOLUTION_640_1408", "RESOLUTION_640_1472", "RESOLUTION_640_1536", "RESOLUTION_704_1152", "RESOLUTION_704_1216", "RESOLUTION_704_1280", "RESOLUTION_704_1344", "RESOLUTION_704_1408", "RESOLUTION_704_1472", "RESOLUTION_720_1280", "RESOLUTION_736_1312", "RESOLUTION_768_1024", "RESOLUTION_768_1088", "RESOLUTION_768_1152", "RESOLUTION_768_1216", "RESOLUTION_768_1232", "RESOLUTION_768_1280", "RESOLUTION_768_1344", "RESOLUTION_832_960", "RESOLUTION_832_1024", "RESOLUTION_832_1088", "RESOLUTION_832_1152", "RESOLUTION_832_1216", "RESOLUTION_832_1248", "RESOLUTION_864_1152", "RESOLUTION_896_960", "RESOLUTION_896_1024", "RESOLUTION_896_1088", "RESOLUTION_896_1120", "RESOLUTION_896_1152", "RESOLUTION_960_832", "RESOLUTION_960_896", "RESOLUTION_960_1024", "RESOLUTION_960_1088", "RESOLUTION_1024_640", "RESOLUTION_1024_768", "RESOLUTION_1024_832", "RESOLUTION_1024_896", "RESOLUTION_1024_960", "RESOLUTION_1024_1024", "RESOLUTION_1088_768", "RESOLUTION_1088_832", "RESOLUTION_1088_896", "RESOLUTION_1088_960", "RESOLUTION_1120_896", "RESOLUTION_1152_704", "RESOLUTION_1152_768", "RESOLUTION_1152_832", "RESOLUTION_1152_864", "RESOLUTION_1152_896", "RESOLUTION_1216_704", "RESOLUTION_1216_768", "RESOLUTION_1216_832", "RESOLUTION_1232_768", "RESOLUTION_1248_832", "RESOLUTION_1280_704", "RESOLUTION_1280_720", "RESOLUTION_1280_768", "RESOLUTION_1280_800", "RESOLUTION_1312_736", "RESOLUTION_1344_640", "RESOLUTION_1344_704", "RESOLUTION_1344_768", "RESOLUTION_1408_576", "RESOLUTION_1408_640", "RESOLUTION_1408_704", "RESOLUTION_1472_576", "RESOLUTION_1472_640", "RESOLUTION_1472_704", "RESOLUTION_1536_512", "RESOLUTION_1536_576", "RESOLUTION_1536_640" ]); var IdeogramRequestSchema = z.looseObject({ image_request: z.object({ aspect_ratio: IdeogramAspectRatioSchema.optional().default("ASPECT_1_1"), magic_prompt_option: z.enum(["AUTO", "ON", "OFF"]).optional().default("AUTO"), model: z.enum(["V_1", "V_1_TURBO", "V_2", "V_2_TURBO"]).optional().default("V_2"), negative_prompt: z.string().optional(), prompt: z.string(), resolution: IdeogramResolutionSchema.optional(), seed: z.number().optional(), style_type: z.enum(["GENERAL", "REALISTIC", "DESIGN", "RENDER_3D", "ANIME"]).optional().default("GENERAL") }) }); var IdeogramImageDataSchema = z.looseObject({ is_image_safe: z.boolean(), prompt: z.string(), resolution: z.string(), seed: z.number(), url: z.string() }); var IdeogramResponseSchema = z.looseObject({ created: z.string(), data: z.array(IdeogramImageDataSchema) }); var DallERequestSchema = z.looseObject({ prompt: z.string(), model: z.enum(["dall-e-3"]), size: z.string() }); var DallEImageDataSchema = z.looseObject({ revised_prompt: z.string().optional(), url: z.string() }); var DallEResponseSchema = z.looseObject({ created: z.number(), data: z.array(DallEImageDataSchema) }); var RecraftTextLayoutSchema = z.looseObject({ text: z.string(), bbox: z.array(z.tuple([z.number(), z.number()])).length(4) }); var RecraftControlsSchema = z.looseObject({ colors: z.array( z.object({ rgb: z.tuple([z.number(), z.number(), z.number()]) }) ).optional(), background_color: z.string().optional() }); var RecraftStyleSchema = z.enum([ "realistic_image", "digital_illustration", "vector_illustration", "icon" ]); var RecraftResponseSchema = z.looseObject({ images: z.array( z.object({ url: z.string(), content_type: z.string(), file_size: z.number() }) ) }); var SDXLLightningImageSizeSchema = z.looseObject({ width: z.number(), height: z.number() }); var SDXLLightningRequestSchema = z.looseObject({ prompt: z.string(), image_size: SDXLLightningImageSizeSchema, embeddings: z.array(z.any()).optional(), format: z.enum(["jpeg", "png"]).optional() }); var SDXLLightningImageDataSchema = z.looseObject({ url: z.string(), width: z.number(), height: z.number(), content_type: z.string() }); var SDXLLightningResponseSchema = z.looseObject({ images: z.array(S