n8n
Version:
n8n Workflow Automation Tool
440 lines ⢠23.1 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ImportService = void 0;
const backend_common_1 = require("@n8n/backend-common");
const db_1 = require("@n8n/db");
const typeorm_1 = require("@n8n/typeorm");
const di_1 = require("@n8n/di");
const uuid_1 = require("uuid");
const promises_1 = require("fs/promises");
const workflow_helpers_1 = require("../workflow-helpers");
const validate_database_type_1 = require("../utils/validate-database-type");
const n8n_core_1 = require("n8n-core");
const compression_util_1 = require("../utils/compression.util");
const zod_1 = require("zod");
const active_workflow_manager_1 = require("../active-workflow-manager");
const workflow_index_service_1 = require("../modules/workflow-index/workflow-index.service");
const config_1 = require("@n8n/config");
let ImportService = class ImportService {
constructor(logger, credentialsRepository, tagRepository, dataSource, cipher, activeWorkflowManager, workflowIndexService, databaseConfig, workflowPublishHistoryRepository) {
this.logger = logger;
this.credentialsRepository = credentialsRepository;
this.tagRepository = tagRepository;
this.dataSource = dataSource;
this.cipher = cipher;
this.activeWorkflowManager = activeWorkflowManager;
this.workflowIndexService = workflowIndexService;
this.databaseConfig = databaseConfig;
this.workflowPublishHistoryRepository = workflowPublishHistoryRepository;
this.dbCredentials = [];
this.dbTags = [];
this.foreignKeyCommands = {
disable: {
sqlite: 'PRAGMA defer_foreign_keys = ON;',
'sqlite-pooled': 'PRAGMA defer_foreign_keys = ON;',
'sqlite-memory': 'PRAGMA defer_foreign_keys = ON;',
postgres: 'SET session_replication_role = replica;',
postgresql: 'SET session_replication_role = replica;',
},
enable: {
sqlite: 'PRAGMA defer_foreign_keys = OFF;',
'sqlite-pooled': 'PRAGMA defer_foreign_keys = OFF;',
'sqlite-memory': 'PRAGMA defer_foreign_keys = OFF;',
postgres: 'SET session_replication_role = DEFAULT;',
postgresql: 'SET session_replication_role = DEFAULT;',
},
};
}
async initRecords() {
this.dbCredentials = await this.credentialsRepository.find();
this.dbTags = await this.tagRepository.find();
}
async importWorkflows(workflows, projectId) {
await this.initRecords();
for (const workflow of workflows) {
workflow.nodes.forEach((node) => {
this.toNewCredentialFormat(node);
if (!node.id)
node.id = (0, uuid_1.v4)();
});
const hasInvalidCreds = workflow.nodes.some((node) => !node.credentials?.id);
if (hasInvalidCreds)
await this.replaceInvalidCreds(workflow);
if (workflow.id) {
await this.activeWorkflowManager.remove(workflow.id);
}
}
const insertedWorkflows = [];
const { manager: dbManager } = this.credentialsRepository;
await dbManager.transaction(async (tx) => {
for (const workflow of workflows) {
if (workflow.active || workflow.activeVersionId) {
await this.workflowPublishHistoryRepository.addRecord({
workflowId: workflow.id,
versionId: workflow.activeVersionId ?? workflow.versionId ?? 'no id found',
event: 'deactivated',
userId: null,
});
workflow.active = false;
workflow.activeVersionId = null;
this.logger.info(`Deactivating workflow "${workflow.name}". Remember to activate later.`);
}
const exists = workflow.id ? await tx.existsBy(db_1.WorkflowEntity, { id: workflow.id }) : false;
const upsertResult = await tx.upsert(db_1.WorkflowEntity, workflow, ['id']);
const workflowId = upsertResult.identifiers.at(0)?.id;
insertedWorkflows.push({ ...workflow, id: workflowId });
const personalProject = await tx.findOneByOrFail(db_1.Project, { id: projectId });
if (!exists) {
await tx.upsert(db_1.SharedWorkflow, { workflowId, projectId: personalProject.id, role: 'workflow:owner' }, ['workflowId', 'projectId']);
}
if (!workflow.tags?.length)
continue;
await this.tagRepository.setTags(tx, this.dbTags, workflow);
for (const tag of workflow.tags) {
await tx.upsert(db_1.WorkflowTagMapping, { tagId: tag.id, workflowId }, [
'tagId',
'workflowId',
]);
}
}
});
if (!this.databaseConfig.isLegacySqlite) {
for (const workflow of insertedWorkflows) {
await this.workflowIndexService.updateIndexFor(workflow);
}
}
}
async replaceInvalidCreds(workflow) {
try {
await (0, workflow_helpers_1.replaceInvalidCredentials)(workflow);
}
catch (e) {
this.logger.error('Failed to replace invalid credential', { error: e });
}
}
async isTableEmpty(tableName) {
try {
const result = await this.dataSource
.createQueryBuilder()
.select('1')
.from(tableName, tableName)
.limit(1)
.getRawMany();
this.logger.debug(`Table ${tableName} has ${result.length} rows`);
return result.length === 0;
}
catch (error) {
this.logger.error(`Failed to check if table ${tableName} is empty:`, { error });
throw new Error(`Unable to check table ${tableName}`);
}
}
async areAllEntityTablesEmpty(tableNames) {
if (tableNames.length === 0) {
this.logger.info('No table names provided, considering all tables empty');
return true;
}
this.logger.info(`Checking if ${tableNames.length} tables are empty...`);
const nonEmptyTables = [];
for (const tableName of tableNames) {
const isEmpty = await this.isTableEmpty(tableName);
if (!isEmpty) {
nonEmptyTables.push(tableName);
}
}
if (nonEmptyTables.length > 0) {
this.logger.info(`š Found ${nonEmptyTables.length} table(s) with existing data: ${nonEmptyTables.join(', ')}`);
return false;
}
this.logger.info('ā
All tables are empty');
return true;
}
async truncateEntityTable(tableName, transactionManager) {
this.logger.info(`šļø Truncating table: ${tableName}`);
await transactionManager.createQueryBuilder().delete().from(tableName, tableName).execute();
this.logger.info(` ā
Table ${tableName} truncated successfully`);
}
async getImportMetadata(inputDir) {
const files = await (0, promises_1.readdir)(inputDir);
const entityTableNamesMap = {};
const entityFiles = {};
for (const file of files) {
if (file.endsWith('.jsonl')) {
const entityName = file.replace(/\.\d+\.jsonl$/, '.jsonl').replace('.jsonl', '');
if (entityName === 'migrations') {
continue;
}
if (!entityTableNamesMap[entityName]) {
const entityMetadata = this.dataSource.entityMetadatas.find((meta) => meta.name.toLowerCase() === entityName);
if (!entityMetadata) {
this.logger.warn(`ā ļø No entity metadata found for ${entityName}, skipping...`);
continue;
}
entityTableNamesMap[entityName] = entityMetadata.tableName;
}
if (!entityFiles[entityName]) {
entityFiles[entityName] = [];
}
entityFiles[entityName].push((0, backend_common_1.safeJoinPath)(inputDir, file));
}
}
return {
tableNames: Object.values(entityTableNamesMap),
entityFiles,
};
}
async readEntityFile(filePath, customEncryptionKey) {
const content = await (0, promises_1.readFile)(filePath, 'utf8');
const entities = [];
const entitySchema = zod_1.z.record(zod_1.z.string(), zod_1.z.unknown());
for (const block of content.split('\n')) {
const lines = this.cipher.decrypt(block, customEncryptionKey).split(/\r?\n/);
for (let i = 0; i < lines.length; i++) {
const line = lines[i].trim();
if (!line)
continue;
try {
entities.push(entitySchema.parse(JSON.parse(line)));
}
catch (error) {
this.logger.error(`Failed to parse JSON on line ${i + 1} in ${filePath}`, { error });
this.logger.error(`Line content (first 200 chars): ${line.substring(0, 200)}...`);
throw new Error(`Invalid JSON on line ${i + 1} in file ${filePath}. JSONL format requires one complete JSON object per line.`);
}
}
}
return entities;
}
async decompressEntitiesZip(inputDir) {
const entitiesZipPath = (0, backend_common_1.safeJoinPath)(inputDir, 'entities.zip');
const { existsSync } = await Promise.resolve().then(() => __importStar(require('fs')));
if (!existsSync(entitiesZipPath)) {
throw new Error(`entities.zip file not found in ${inputDir}.`);
}
this.logger.info(`\nšļø Found entities.zip file, decompressing to ${inputDir}...`);
await (0, compression_util_1.decompressFolder)(entitiesZipPath, inputDir);
this.logger.info('ā
Successfully decompressed entities.zip');
}
async importEntities(inputDir, truncateTables, keyFilePath) {
(0, validate_database_type_1.validateDbTypeForImportEntities)(this.dataSource.options.type);
let customEncryptionKey;
if (keyFilePath) {
try {
const keyFileContent = await (0, promises_1.readFile)(keyFilePath, 'utf8');
customEncryptionKey = keyFileContent.trim();
this.logger.info(`š Using custom encryption key from: ${keyFilePath}`);
}
catch (error) {
throw new Error(`Failed to read encryption key file at ${keyFilePath}: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
await this.decompressEntitiesZip(inputDir);
await this.validateMigrations(inputDir, customEncryptionKey);
await this.dataSource.transaction(async (transactionManager) => {
await this.disableForeignKeyConstraints(transactionManager);
const importMetadata = await this.getImportMetadata(inputDir);
const { tableNames, entityFiles } = importMetadata;
const entityNames = Object.keys(entityFiles);
if (truncateTables) {
this.logger.info('\nšļø Truncating tables before import...');
this.logger.info(`Found ${tableNames.length} tables to truncate: ${tableNames.join(', ')}`);
await Promise.all(tableNames.map(async (tableName) => await this.truncateEntityTable(tableName, transactionManager)));
this.logger.info('ā
All tables truncated successfully');
}
if (!truncateTables && !(await this.areAllEntityTablesEmpty(tableNames))) {
this.logger.info('\nšļø Not all tables are empty, skipping import, you can use --truncateTables to truncate tables before import if needed');
return;
}
await this.importEntitiesFromFiles(inputDir, transactionManager, entityNames, entityFiles, customEncryptionKey);
await this.enableForeignKeyConstraints(transactionManager);
});
const { readdir, rm } = await Promise.resolve().then(() => __importStar(require('fs/promises')));
const files = await readdir(inputDir);
for (const file of files) {
if (file.endsWith('.jsonl') && file !== 'entities.zip') {
await rm((0, backend_common_1.safeJoinPath)(inputDir, file));
this.logger.info(` Removed: ${file}`);
}
}
this.logger.info(`\nšļø Cleaned up decompressed files in ${inputDir}`);
}
async importEntitiesFromFiles(inputDir, transactionManager, entityNames, entityFiles, customEncryptionKey) {
this.logger.info(`\nš Starting entity import from directory: ${inputDir}`);
if (entityNames.length === 0) {
this.logger.warn('No entity files found in input directory');
return;
}
this.logger.info(`š Found ${entityNames.length} entity types to import:`);
let totalEntitiesImported = 0;
await Promise.all(entityNames.map(async (entityName) => {
const files = entityFiles[entityName];
this.logger.info(` ⢠${entityName}: ${files.length} file(s)`);
this.logger.info(`\nš Importing ${entityName} entities...`);
const entityMetadata = this.dataSource.entityMetadatas.find((meta) => meta.name.toLowerCase() === entityName);
if (!entityMetadata) {
this.logger.warn(` ā ļø No entity metadata found for ${entityName}, skipping...`);
return;
}
const tableName = this.dataSource.driver.escape(entityMetadata.tableName);
this.logger.info(` š Target table: ${tableName}`);
let entityCount = 0;
await Promise.all(files.map(async (filePath) => {
this.logger.info(` š Reading file: ${filePath}`);
const entities = await this.readEntityFile(filePath, customEncryptionKey);
this.logger.info(` Found ${entities.length} entities`);
await Promise.all(entities.map(async (entity) => {
const columns = Object.keys(entity);
const columnNames = columns.map(this.dataSource.driver.escape).join(', ');
const columnValues = columns.map((key) => `:${key}`).join(', ');
const [query, parameters] = this.dataSource.driver.escapeQueryWithParameters(`INSERT INTO ${tableName} (${columnNames}) VALUES (${columnValues})`, entity, {});
await transactionManager.query(query, parameters);
}));
entityCount += entities.length;
}));
this.logger.info(` ā
Completed ${entityName}: ${entityCount} entities imported`);
totalEntitiesImported += entityCount;
}));
this.logger.info('\nš Import Summary:');
this.logger.info(` Total entities imported: ${totalEntitiesImported}`);
this.logger.info(` Entity types processed: ${entityNames.length}`);
this.logger.info('ā
Import completed successfully!');
}
toNewCredentialFormat(node) {
if (!node.credentials)
return;
for (const [type, name] of Object.entries(node.credentials)) {
if (typeof name !== 'string')
continue;
const nodeCredential = { id: null, name };
const match = this.dbCredentials.find((c) => c.name === name && c.type === type);
if (match)
nodeCredential.id = match.id;
node.credentials[type] = nodeCredential;
}
}
async disableForeignKeyConstraints(transactionManager) {
const disableCommand = this.foreignKeyCommands.disable[this.dataSource.options.type];
if (!disableCommand) {
throw new Error(`Unsupported database type: ${this.dataSource.options.type}. Supported types: sqlite, postgres`);
}
this.logger.debug(`Executing: ${disableCommand}`);
await transactionManager.query(disableCommand);
this.logger.info('ā
Foreign key constraints disabled');
}
async enableForeignKeyConstraints(transactionManager) {
const enableCommand = this.foreignKeyCommands.enable[this.dataSource.options.type];
if (!enableCommand) {
throw new Error(`Unsupported database type: ${this.dataSource.options.type}. Supported types: sqlite, postgres`);
}
this.logger.debug(`Executing: ${enableCommand}`);
await transactionManager.query(enableCommand);
this.logger.info('ā
Foreign key constraints re-enabled');
}
async validateMigrations(inputDir, customEncryptionKey) {
const migrationsFilePath = (0, backend_common_1.safeJoinPath)(inputDir, 'migrations.jsonl');
try {
await (0, promises_1.readFile)(migrationsFilePath, 'utf8');
}
catch (error) {
throw new Error('Migrations file not found. Cannot proceed with import without migration validation.');
}
const migrationsFileContent = await (0, promises_1.readFile)(migrationsFilePath, 'utf8');
const importMigrations = this.cipher
.decrypt(migrationsFileContent, customEncryptionKey)
.trim()
.split('\n')
.filter((line) => line.trim())
.map((line) => {
try {
return JSON.parse(line);
}
catch (error) {
throw new Error(`Invalid JSON in migrations file: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
});
if (importMigrations.length === 0) {
this.logger.info('No migrations found in import data');
return;
}
const latestImportMigration = importMigrations.reduce((latest, current) => {
const currentTimestamp = parseInt(String(current.timestamp || current.id || '0'));
const latestTimestamp = parseInt(String(latest.timestamp || latest.id || '0'));
return currentTimestamp > latestTimestamp ? current : latest;
});
this.logger.info(`Latest migration in import data: ${String(latestImportMigration.name)} (timestamp: ${String(latestImportMigration.timestamp || latestImportMigration.id)}, id: ${String(latestImportMigration.id)})`);
const tablePrefix = this.dataSource.options.entityPrefix || '';
const migrationsTableName = `${tablePrefix}migrations`;
const dbMigrations = await this.dataSource.query(`SELECT * FROM ${this.dataSource.driver.escape(migrationsTableName)} ORDER BY timestamp DESC LIMIT 1`);
if (dbMigrations.length === 0) {
throw new Error('Target database has no migrations. Cannot import data from a different migration state.');
}
const latestDbMigration = dbMigrations[0];
this.logger.info(`Latest migration in target database: ${latestDbMigration.name} (timestamp: ${latestDbMigration.timestamp}, id: ${latestDbMigration.id})`);
const importTimestamp = parseInt(String(latestImportMigration.timestamp || latestImportMigration.id || '0'));
const dbTimestamp = parseInt(String(latestDbMigration.timestamp || '0'));
const importName = latestImportMigration.name;
const dbName = latestDbMigration.name;
if (importTimestamp !== dbTimestamp) {
throw new Error(`Migration timestamp mismatch. Import data: ${String(importName)} (${String(importTimestamp)}) does not match target database ${String(dbName)} (${String(dbTimestamp)}). Cannot import data from different migration states.`);
}
if (importName !== dbName) {
throw new Error(`Migration name mismatch. Import data: ${String(importName)} does not match target database ${String(dbName)}. Cannot import data from different migration states.`);
}
this.logger.info('ā
Migration validation passed - import data matches target database migration state');
}
};
exports.ImportService = ImportService;
exports.ImportService = ImportService = __decorate([
(0, di_1.Service)(),
__metadata("design:paramtypes", [backend_common_1.Logger,
db_1.CredentialsRepository,
db_1.TagRepository,
typeorm_1.DataSource,
n8n_core_1.Cipher,
active_workflow_manager_1.ActiveWorkflowManager,
workflow_index_service_1.WorkflowIndexService,
config_1.DatabaseConfig,
db_1.WorkflowPublishHistoryRepository])
], ImportService);
//# sourceMappingURL=import.service.js.map