n8n-nodes-databricks-api
Version:
Databricks node for n8n
297 lines • 16.5 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.callMethodAsync = callMethodAsync;
exports.callMethodSync = callMethodSync;
exports.logWrapper = logWrapper;
const embeddings_1 = require("@langchain/core/embeddings");
const retrievers_1 = require("@langchain/core/retrievers");
const vectorstores_1 = require("@langchain/core/vectorstores");
const textsplitters_1 = require("@langchain/textsplitters");
const n8n_workflow_1 = require("n8n-workflow");
const helpers_1 = require("./helpers");
const N8nBinaryLoader_1 = require("./N8nBinaryLoader");
const N8nJsonLoader_1 = require("./N8nJsonLoader");
async function callMethodAsync(parameters) {
try {
return await parameters.method.call(this, ...parameters.arguments);
}
catch (e) {
const connectedNode = parameters.executeFunctions.getNode();
const error = new n8n_workflow_1.NodeOperationError(connectedNode, e, {
functionality: 'configuration-node',
});
const metadata = (0, n8n_workflow_1.parseErrorMetadata)(error);
parameters.executeFunctions.addOutputData(parameters.connectionType, parameters.currentNodeRunIndex, error, metadata);
if (error.message) {
if (!error.description) {
error.description = error.message;
}
throw error;
}
throw new n8n_workflow_1.NodeOperationError(connectedNode, `Error on node "${connectedNode.name}" which is connected via input "${parameters.connectionType}"`, { functionality: 'configuration-node' });
}
}
function callMethodSync(parameters) {
try {
return parameters.method.call(this, ...parameters.arguments);
}
catch (e) {
const connectedNode = parameters.executeFunctions.getNode();
const error = new n8n_workflow_1.NodeOperationError(connectedNode, e);
parameters.executeFunctions.addOutputData(parameters.connectionType, parameters.currentNodeRunIndex, error);
throw new n8n_workflow_1.NodeOperationError(connectedNode, `Error on node "${connectedNode.name}" which is connected via input "${parameters.connectionType}"`, { functionality: 'configuration-node' });
}
}
function logWrapper(originalInstance, executeFunctions) {
return new Proxy(originalInstance, {
get: (target, prop) => {
let connectionType;
if ((0, helpers_1.isBaseChatMemory)(originalInstance)) {
if (prop === 'loadMemoryVariables' && 'loadMemoryVariables' in target) {
return async (values) => {
var _a;
connectionType = n8n_workflow_1.NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'loadMemoryVariables', values } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [values],
}));
const chatHistory = (_a = response === null || response === void 0 ? void 0 : response.chat_history) !== null && _a !== void 0 ? _a : response;
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'loadMemoryVariables', chatHistory } }],
]);
return response;
};
}
else if (prop === 'saveContext' && 'saveContext' in target) {
return async (input, output) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'saveContext', input, output } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [input, output],
}));
const chatHistory = await target.chatHistory.getMessages();
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'saveContext', chatHistory } }],
]);
return response;
};
}
}
if ((0, helpers_1.isBaseChatMessageHistory)(originalInstance)) {
if (prop === 'getMessages' && 'getMessages' in target) {
return async () => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'getMessages' } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [],
}));
const payload = { action: 'getMessages', response };
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-messages-retrieved-from-memory', { response });
return response;
};
}
else if (prop === 'addMessage' && 'addMessage' in target) {
return async (message) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiMemory;
const payload = { action: 'addMessage', message };
const { index } = executeFunctions.addInputData(connectionType, [[{ json: payload }]]);
await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [message],
});
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-message-added-to-memory', { message });
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
};
}
}
if (originalInstance instanceof retrievers_1.BaseRetriever) {
if (prop === 'getRelevantDocuments' && 'getRelevantDocuments' in target) {
return async (query, config) => {
var _a, _b, _c, _d;
connectionType = n8n_workflow_1.NodeConnectionTypes.AiRetriever;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, config } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query, config],
}));
const executionId = (_b = (_a = response[0]) === null || _a === void 0 ? void 0 : _a.metadata) === null || _b === void 0 ? void 0 : _b.executionId;
const workflowId = (_d = (_c = response[0]) === null || _c === void 0 ? void 0 : _c.metadata) === null || _d === void 0 ? void 0 : _d.workflowId;
const metadata = {};
if (executionId && workflowId) {
metadata.subExecution = {
executionId,
workflowId,
};
}
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-documents-retrieved', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]], metadata);
return response;
};
}
}
if (originalInstance instanceof embeddings_1.Embeddings) {
if (prop === 'embedDocuments' && 'embedDocuments' in target) {
return async (documents) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiEmbedding;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { documents } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [documents],
}));
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-document-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
if (prop === 'embedQuery' && 'embedQuery' in target) {
return async (query) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiEmbedding;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query],
}));
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-query-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
if (originalInstance instanceof N8nJsonLoader_1.N8nJsonLoader ||
originalInstance instanceof N8nBinaryLoader_1.N8nBinaryLoader) {
if (prop === 'processAll' && 'processAll' in target) {
return async (items) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiDocument;
const { index } = executeFunctions.addInputData(connectionType, [items]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [items],
}));
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
if (prop === 'processItem' && 'processItem' in target) {
return async (item, itemIndex) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiDocument;
const { index } = executeFunctions.addInputData(connectionType, [[item]]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [item, itemIndex],
}));
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-document-processed');
executeFunctions.addOutputData(connectionType, index, [
[{ json: { response }, pairedItem: { item: itemIndex } }],
]);
return response;
};
}
}
if (originalInstance instanceof textsplitters_1.TextSplitter) {
if (prop === 'splitText' && 'splitText' in target) {
return async (text) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiTextSplitter;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { textSplitter: text } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [text],
}));
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-text-split');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
if ((0, helpers_1.isToolsInstance)(originalInstance)) {
if (prop === '_call' && '_call' in target) {
return async (query) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiTool;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query],
}));
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-tool-called', { query, response });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
if (originalInstance instanceof vectorstores_1.VectorStore) {
if (prop === 'similaritySearch' && 'similaritySearch' in target) {
return async (query, k, filter, _callbacks) => {
connectionType = n8n_workflow_1.NodeConnectionTypes.AiVectorStore;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { query, k, filter } }],
]);
const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [query, k, filter, _callbacks],
}));
(0, helpers_1.logAiEvent)(executeFunctions, 'ai-vector-store-searched', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
}
}
return target[prop];
},
});
}
//# sourceMappingURL=logWrapper.js.map