n8n-nodes-piapi
Version:
Community n8n nodes for PiAPI - integrate generative AI capabilities (image, video, audio, 3D) into your workflows
359 lines • 14.5 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FluxImageToImage = void 0;
const GenericFunctions_1 = require("../shared/GenericFunctions");
const Constants_1 = require("../shared/Constants");
class FluxImageToImage {
constructor() {
this.description = {
displayName: 'PiAPI Flux Image to Image',
name: 'fluxImageToImage',
icon: 'file:../piapi.svg',
group: ['transform'],
version: 1,
description: 'Transform images using PiAPI Flux Image-to-Image',
defaults: {
name: 'Flux Image to Image',
},
inputs: ["main"],
outputs: ["main"],
credentials: [
{
name: 'piAPIApi',
required: true,
},
],
properties: [
{
displayName: 'Model',
name: 'model',
type: 'options',
options: [
{
name: 'Flux 1 Dev',
value: 'Qubico/flux1-dev',
},
{
name: 'Flux 1 Schnell',
value: 'Qubico/flux1-schnell',
},
{
name: 'Flux 1 Dev Advanced',
value: 'Qubico/flux1-dev-advanced',
},
],
default: 'Qubico/flux1-dev',
description: 'The model to use for image generation',
},
{
displayName: 'Image Source',
name: 'imageSource',
type: 'options',
options: [
{
name: 'URL',
value: 'url',
},
{
name: 'Binary Data',
value: 'binaryData',
},
],
default: 'url',
description: 'The source of the input image',
},
{
displayName: 'Image URL',
name: 'imageUrl',
type: 'string',
default: '',
required: true,
displayOptions: {
show: {
imageSource: ['url'],
},
},
description: 'URL of the image to transform',
},
{
displayName: 'Binary Property',
name: 'binaryPropertyName',
type: 'string',
default: 'data',
required: true,
displayOptions: {
show: {
imageSource: ['binaryData'],
},
},
description: 'Name of the binary property containing the image data',
},
{
displayName: 'Prompt',
name: 'prompt',
type: 'string',
typeOptions: {
rows: 4,
},
default: '',
required: true,
description: 'Text prompt for image transformation',
},
{
displayName: 'Negative Prompt',
name: 'negativePrompt',
type: 'string',
typeOptions: {
rows: 2,
},
default: '',
description: 'Negative text prompt for image transformation',
},
{
displayName: 'Denoise Strength',
name: 'denoiseStrength',
type: 'number',
default: 0.7,
description: 'Determines how much to transform the image (0.0 to 1.0)',
typeOptions: {
minValue: 0.01,
maxValue: 1,
numberPrecision: 2,
},
},
{
displayName: 'Guidance Scale',
name: 'guidanceScale',
type: 'number',
default: 3,
description: 'Guidance scale for image generation. Higher values improve prompt adherence but may reduce image quality.',
typeOptions: {
minValue: 1.5,
maxValue: 5,
numberPrecision: 1,
},
},
{
displayName: 'Batch Size',
name: 'batchSize',
type: 'number',
default: 1,
description: 'Number of images to generate. Only works for Flux 1 Schnell model.',
typeOptions: {
minValue: 1,
maxValue: 4,
},
},
{
displayName: 'Use LoRA',
name: 'useLora',
type: 'boolean',
default: false,
description: 'Whether to use LoRA model for generation (only available with Flux 1 Dev Advanced model)',
displayOptions: {
show: {
model: ['Qubico/flux1-dev-advanced'],
},
},
},
{
displayName: 'LoRA Type',
name: 'loraType',
type: 'options',
displayOptions: {
show: {
useLora: [true],
},
},
options: Constants_1.LORA_OPTIONS,
default: 'none',
description: 'The LoRA model to use for image generation',
},
{
displayName: 'LoRA Strength',
name: 'loraStrength',
type: 'number',
displayOptions: {
show: {
useLora: [true],
},
},
default: 1,
description: 'Strength of the LoRA effect (0.0 to 1.0)',
typeOptions: {
minValue: 0.1,
maxValue: 1,
numberPrecision: 2,
},
},
{
displayName: 'Use ControlNet',
name: 'useControlNet',
type: 'boolean',
default: false,
description: 'Whether to use ControlNet for generation (only available with Flux 1 Dev Advanced model)',
displayOptions: {
show: {
model: ['Qubico/flux1-dev-advanced'],
},
},
},
{
displayName: 'ControlNet Type',
name: 'controlNetType',
type: 'options',
displayOptions: {
show: {
useControlNet: [true],
},
},
options: Constants_1.CONTROLNET_TYPES,
default: 'none',
description: 'The ControlNet type to use for image generation',
},
{
displayName: 'Control Strength',
name: 'controlStrength',
type: 'number',
displayOptions: {
show: {
useControlNet: [true],
},
},
default: 0.5,
description: 'Strength of the ControlNet effect (0.0 to 1.0)',
typeOptions: {
minValue: 0.1,
maxValue: 1,
numberPrecision: 2,
},
},
{
displayName: 'Return Preprocessed Image',
name: 'returnPreprocessed',
type: 'boolean',
displayOptions: {
show: {
useControlNet: [true],
},
},
default: false,
description: 'Whether to return the preprocessed control image',
},
{
displayName: 'Wait for Completion',
name: 'waitForCompletion',
type: 'boolean',
default: false,
description: 'Wait for task to complete and return results',
},
],
};
}
async execute() {
var _a;
const items = this.getInputData();
const returnData = [];
for (let i = 0; i < items.length; i++) {
const model = this.getNodeParameter('model', i);
const prompt = this.getNodeParameter('prompt', i);
const negativePrompt = this.getNodeParameter('negativePrompt', i, '');
const denoiseStrength = this.getNodeParameter('denoiseStrength', i, 0.7);
const guidanceScale = this.getNodeParameter('guidanceScale', i, 3);
const batchSize = this.getNodeParameter('batchSize', i, 1);
const useLora = this.getNodeParameter('useLora', i, false);
const loraType = useLora ? this.getNodeParameter('loraType', i, 'none') : 'none';
const loraStrength = useLora ? this.getNodeParameter('loraStrength', i, 1) : 1;
const useControlNet = this.getNodeParameter('useControlNet', i, false);
const controlNetType = useControlNet ? this.getNodeParameter('controlNetType', i, 'none') : 'none';
const controlStrength = useControlNet ? this.getNodeParameter('controlStrength', i, 0.5) : 0.5;
const returnPreprocessed = useControlNet ? this.getNodeParameter('returnPreprocessed', i, false) : false;
const waitForCompletion = this.getNodeParameter('waitForCompletion', i, true);
const imageSource = this.getNodeParameter('imageSource', i);
let imageUrl = '';
if (imageSource === 'url') {
imageUrl = this.getNodeParameter('imageUrl', i);
}
else {
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i);
const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
if (binaryData.mimeType && !binaryData.mimeType.includes('image/')) {
throw new Error('The provided binary data is not an image');
}
if (binaryData.data) {
const dataBuffer = Buffer.from(binaryData.data, 'base64');
imageUrl = `data:${binaryData.mimeType};base64,${dataBuffer.toString('base64')}`;
}
else if (binaryData.url) {
imageUrl = binaryData.url;
}
else {
throw new Error('No usable image data found in the provided binary property');
}
}
let taskType = 'img2img';
if (model === 'Qubico/flux1-dev-advanced') {
if (useControlNet && controlNetType !== 'none') {
taskType = 'controlnet-lora';
}
else if (useLora && loraType !== 'none') {
taskType = 'img2img-lora';
}
}
const body = {
model,
task_type: taskType,
input: {
prompt,
negative_prompt: negativePrompt,
image: imageUrl,
denoise: denoiseStrength,
guidance_scale: guidanceScale,
batch_size: batchSize,
},
};
if (useLora && loraType !== 'none') {
body.input.lora_settings = [
{
lora_type: loraType,
lora_strength: loraStrength,
},
];
}
if (useControlNet && controlNetType !== 'none') {
body.input.control_net_settings = [
{
control_type: controlNetType,
control_image: imageUrl,
control_strength: controlStrength,
return_preprocessed_image: returnPreprocessed,
},
];
}
try {
const response = await GenericFunctions_1.piApiRequest.call(this, 'POST', '/api/v1/task', body);
let taskResult = response;
if (waitForCompletion && ((_a = response.data) === null || _a === void 0 ? void 0 : _a.task_id)) {
taskResult = await GenericFunctions_1.waitForTaskCompletion.call(this, response.data.task_id);
}
returnData.push({
json: taskResult,
});
}
catch (error) {
if (this.continueOnFail()) {
returnData.push({
json: {
error: error.message,
},
});
continue;
}
throw error;
}
}
return [returnData];
}
}
exports.FluxImageToImage = FluxImageToImage;
//# sourceMappingURL=FluxImageToImage.node.js.map