n8n
Version:
n8n Workflow Automation Tool
133 lines • 6.21 kB
JavaScript
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.AIService = void 0;
const typedi_1 = require("typedi");
const config_1 = __importDefault(require("../config"));
const n8n_workflow_1 = require("n8n-workflow");
const openai_1 = require("../services/ai/providers/openai");
const pinecone_1 = require("@pinecone-database/pinecone");
const api_knowledgebase_json_1 = __importDefault(require("../services/ai/resources/api-knowledgebase.json"));
const output_parsers_1 = require("langchain/output_parsers");
const generateCurl_1 = require("../services/ai/prompts/generateCurl");
const generateCurl_2 = require("../services/ai/schemas/generateCurl");
const pinecone_2 = require("@langchain/pinecone");
const fuse_js_1 = __importDefault(require("fuse.js"));
function isN8nAIProviderType(value) {
return ['openai'].includes(value);
}
let AIService = class AIService {
constructor() {
this.providerType = 'unknown';
this.jsonOutputParser = new output_parsers_1.JsonOutputFunctionsParser();
const providerName = config_1.default.getEnv('ai.provider');
if (isN8nAIProviderType(providerName)) {
this.providerType = providerName;
}
if (this.providerType === 'openai') {
const openAIApiKey = config_1.default.getEnv('ai.openAI.apiKey');
const openAIModelName = config_1.default.getEnv('ai.openAI.model');
if (openAIApiKey) {
this.provider = new openai_1.AIProviderOpenAI({ openAIApiKey, modelName: openAIModelName });
}
}
const pineconeApiKey = config_1.default.getEnv('ai.pinecone.apiKey');
if (pineconeApiKey) {
this.pinecone = new pinecone_1.Pinecone({
apiKey: pineconeApiKey,
});
}
}
async prompt(messages, options) {
if (!this.provider) {
throw new n8n_workflow_1.ApplicationError('No AI provider has been configured.');
}
return await this.provider.invoke(messages, options);
}
validateCurl(result) {
if (!result.curl.startsWith('curl')) {
throw new n8n_workflow_1.ApplicationError('The generated HTTP Request Node parameters format is incorrect. Please adjust your request and try again.');
}
result.curl = result.curl
.replace(/": (\{\{[A-Za-z0-9_]+}}|\{[A-Za-z0-9_]+})/g, '": "$1"')
.replace(/(-d '[^']+')}/, '$1');
return result;
}
async generateCurl(serviceName, serviceRequest) {
this.checkRequirements();
if (!this.pinecone) {
return await this.generateCurlGeneric(serviceName, serviceRequest);
}
const fuse = new fuse_js_1.default(api_knowledgebase_json_1.default, {
threshold: 0.25,
useExtendedSearch: true,
keys: ['id', 'title'],
});
const matchedServices = fuse
.search(serviceName.replace(/ +/g, '|'))
.map((result) => result.item);
if (matchedServices.length === 0) {
return await this.generateCurlGeneric(serviceName, serviceRequest);
}
const pcIndex = this.pinecone.Index('api-knowledgebase');
const vectorStore = await pinecone_2.PineconeStore.fromExistingIndex(this.provider.embeddings, {
namespace: 'endpoints',
pineconeIndex: pcIndex,
});
const matchedDocuments = await vectorStore.similaritySearch(`${serviceName} ${serviceRequest}`, 4, {
id: {
$in: matchedServices.map((service) => service.id),
},
});
if (matchedDocuments.length === 0) {
return await this.generateCurlGeneric(serviceName, serviceRequest);
}
const aggregatedDocuments = matchedDocuments.reduce((acc, document) => {
const pageData = (0, n8n_workflow_1.jsonParse)(document.pageContent);
acc.push(pageData);
return acc;
}, []);
const generateCurlChain = generateCurl_1.generateCurlCommandPromptTemplate
.pipe(this.provider.modelWithOutputParser(generateCurl_2.generateCurlSchema))
.pipe(this.jsonOutputParser);
const result = (await generateCurlChain.invoke({
endpoints: JSON.stringify(aggregatedDocuments),
serviceName,
serviceRequest,
}));
return this.validateCurl(result);
}
async generateCurlGeneric(serviceName, serviceRequest) {
this.checkRequirements();
const generateCurlFallbackChain = generateCurl_1.generateCurlCommandFallbackPromptTemplate
.pipe(this.provider.modelWithOutputParser(generateCurl_2.generateCurlSchema))
.pipe(this.jsonOutputParser);
const result = (await generateCurlFallbackChain.invoke({
serviceName,
serviceRequest,
}));
return this.validateCurl(result);
}
checkRequirements() {
if (!this.provider) {
throw new n8n_workflow_1.ApplicationError('No AI provider has been configured.');
}
}
};
exports.AIService = AIService;
exports.AIService = AIService = __decorate([
(0, typedi_1.Service)(),
__metadata("design:paramtypes", [])
], AIService);
//# sourceMappingURL=ai.service.js.map
;