diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4beac42b36..9b95d10d44 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -9,7 +9,7 @@ jobs: strategy: matrix: - node-version: [12.x, 14.x] + node-version: [14.x] steps: - uses: actions/checkout@v1 diff --git a/package.json b/package.json index 866d256baf..44145dbb5e 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,9 @@ "start:windows": "cd packages/cli/bin && n8n", "test": "lerna run test", "tslint": "lerna exec npm run tslint", - "watch": "lerna run --parallel watch" + "watch": "lerna run --parallel watch", + "webhook": "./packages/cli/bin/n8n webhook", + "worker": "./packages/cli/bin/n8n worker" }, "devDependencies": { "lerna": "^3.13.1", diff --git a/packages/cli/BREAKING-CHANGES.md b/packages/cli/BREAKING-CHANGES.md index 55be53d7c7..2a762e0476 100644 --- a/packages/cli/BREAKING-CHANGES.md +++ b/packages/cli/BREAKING-CHANGES.md @@ -2,6 +2,26 @@ This list shows all the versions which include breaking changes and how to upgrade. +## 0.118.0 + +### What changed? +The minimum Node.js version required for n8n is now v14. + +### When is action necessary? +If you're using n8n via npm or PM2 or if you're contributing to n8n. + +### How to upgrade: +Update the Node.js version to v14 or above. + +---------------------------- + +### What changed? +In the Postgres, CrateDB, QuestDB and TimescaleDB nodes the `Execute Query` operation returns the result from all queries executed instead of just one of the results. + +### When is action necessary? + +If you use any of the above mentioned nodes with the `Execute Query` operation and the result is relevant to you, you are encouraged to revisit your logic. The node output may now contain more information than before. This change was made so that the behavior is more consistent across n8n where input with multiple rows should yield results acccording all input data instead of only one. Please note: n8n was already running multiple queries based on input. Only the output was changed. + ## 0.117.0 ### What changed? diff --git a/packages/cli/bin/n8n b/packages/cli/bin/n8n index 583dfa5bf4..b72c643fb3 100755 --- a/packages/cli/bin/n8n +++ b/packages/cli/bin/n8n @@ -24,8 +24,8 @@ if (process.argv.length === 2) { var nodeVersion = process.versions.node.split('.'); -if (parseInt(nodeVersion[0], 10) < 12 || parseInt(nodeVersion[0], 10) === 12 && parseInt(nodeVersion[1], 10) < 9) { - console.log(`\nYour Node.js version (${process.versions.node}) is too old to run n8n.\nPlease update to version 12.9 or later!\n`); +if (parseInt(nodeVersion[0], 10) < 14) { + console.log(`\nYour Node.js version (${process.versions.node}) is too old to run n8n.\nPlease update to version 14 or later!\n`); process.exit(0); } diff --git a/packages/cli/commands/execute.ts b/packages/cli/commands/execute.ts index 864748f8cb..9eafd28c9e 100644 --- a/packages/cli/commands/execute.ts +++ b/packages/cli/commands/execute.ts @@ -13,7 +13,6 @@ import { CredentialTypes, Db, ExternalHooks, - GenericHelpers, IWorkflowBase, IWorkflowExecutionDataProcess, LoadNodesAndCredentials, @@ -23,6 +22,13 @@ import { WorkflowRunner, } from '../src'; +import { + getLogger, +} from '../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; export class Execute extends Command { static description = '\nExecutes a given workflow'; @@ -44,6 +50,9 @@ export class Execute extends Command { async run() { + const logger = getLogger(); + LoggerProxy.init(logger); + const { flags } = this.parse(Execute); // Start directly with the init of the database to improve startup time @@ -54,12 +63,12 @@ export class Execute extends Command { const loadNodesAndCredentialsPromise = loadNodesAndCredentials.init(); if (!flags.id && !flags.file) { - GenericHelpers.logOutput(`Either option "--id" or "--file" have to be set!`); + console.info(`Either option "--id" or "--file" have to be set!`); return; } if (flags.id && flags.file) { - GenericHelpers.logOutput(`Either "id" or "file" can be set never both!`); + console.info(`Either "id" or "file" can be set never both!`); return; } @@ -71,7 +80,7 @@ export class Execute extends Command { workflowData = JSON.parse(await fs.readFile(flags.file, 'utf8')); } catch (error) { if (error.code === 'ENOENT') { - GenericHelpers.logOutput(`The file "${flags.file}" could not be found.`); + console.info(`The file "${flags.file}" could not be found.`); return; } @@ -81,7 +90,7 @@ export class Execute extends Command { // Do a basic check if the data in the file looks right // TODO: Later check with the help of TypeScript data if it is valid or not if (workflowData === undefined || workflowData.nodes === undefined || workflowData.connections === undefined) { - GenericHelpers.logOutput(`The file "${flags.file}" does not contain valid workflow data.`); + console.info(`The file "${flags.file}" does not contain valid workflow data.`); return; } workflowId = workflowData.id!.toString(); @@ -95,8 +104,8 @@ export class Execute extends Command { workflowId = flags.id; workflowData = await Db.collections!.Workflow!.findOne(workflowId); if (workflowData === undefined) { - GenericHelpers.logOutput(`The workflow with the id "${workflowId}" does not exist.`); - return; + console.info(`The workflow with the id "${workflowId}" does not exist.`); + process.exit(1); } } @@ -138,7 +147,7 @@ export class Execute extends Command { if (startNode === undefined) { // If the workflow does not contain a start-node we can not know what // should be executed and with which data to start. - GenericHelpers.logOutput(`The workflow does not contain a "Start" node. So it can not be executed.`); + console.info(`The workflow does not contain a "Start" node. So it can not be executed.`); return Promise.resolve(); } @@ -163,9 +172,10 @@ export class Execute extends Command { } if (data.data.resultData.error) { - this.log('Execution was NOT successfull:'); - this.log('===================================='); - this.log(JSON.stringify(data, null, 2)); + console.info('Execution was NOT successful. See log message for details.'); + logger.info('Execution error:'); + logger.info('===================================='); + logger.info(JSON.stringify(data, null, 2)); const { error } = data.data.resultData; throw { @@ -174,14 +184,15 @@ export class Execute extends Command { }; } - this.log('Execution was successfull:'); - this.log('===================================='); - this.log(JSON.stringify(data, null, 2)); + console.info('Execution was successful:'); + console.info('===================================='); + console.info(JSON.stringify(data, null, 2)); } catch (e) { - console.error('\nGOT ERROR'); - console.log('===================================='); - console.error(e.message); - console.error(e.stack); + console.error('Error executing workflow. See log messages for details.'); + logger.error('\nExecution error:'); + logger.info('===================================='); + logger.error(e.message); + logger.error(e.stack); this.exit(1); } diff --git a/packages/cli/commands/export/credentials.ts b/packages/cli/commands/export/credentials.ts index ec12033446..de1aafe6dd 100644 --- a/packages/cli/commands/export/credentials.ts +++ b/packages/cli/commands/export/credentials.ts @@ -14,10 +14,17 @@ import { import { Db, - GenericHelpers, ICredentialsDecryptedDb, } from '../../src'; +import { + getLogger, +} from '../../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; + import * as fs from 'fs'; import * as path from 'path'; @@ -59,8 +66,11 @@ export class ExportCredentialsCommand extends Command { }; async run() { - const { flags } = this.parse(ExportCredentialsCommand); + const logger = getLogger(); + LoggerProxy.init(logger); + const { flags } = this.parse(ExportCredentialsCommand); + if (flags.backup) { flags.all = true; flags.pretty = true; @@ -68,41 +78,42 @@ export class ExportCredentialsCommand extends Command { } if (!flags.all && !flags.id) { - GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`); + console.info(`Either option "--all" or "--id" have to be set!`); return; } if (flags.all && flags.id) { - GenericHelpers.logOutput(`You should either use "--all" or "--id" but never both!`); + console.info(`You should either use "--all" or "--id" but never both!`); return; } if (flags.separate) { try { if (!flags.output) { - GenericHelpers.logOutput(`You must inform an output directory via --output when using --separate`); + console.info(`You must inform an output directory via --output when using --separate`); return; } if (fs.existsSync(flags.output)) { if (!fs.lstatSync(flags.output).isDirectory()) { - GenericHelpers.logOutput(`The paramenter --output must be a directory`); + console.info(`The paramenter --output must be a directory`); return; } } else { fs.mkdirSync(flags.output, { recursive: true }); } } catch (e) { - console.error('\nFILESYSTEM ERROR'); - console.log('===================================='); - console.error(e.message); - console.error(e.stack); + console.error('Aborting execution as a filesystem error has been encountered while creating the output directory. See log messages for details.'); + logger.error('\nFILESYSTEM ERROR'); + logger.info('===================================='); + logger.error(e.message); + logger.error(e.stack); this.exit(1); } } else if (flags.output) { if (fs.existsSync(flags.output)) { if (fs.lstatSync(flags.output).isDirectory()) { - GenericHelpers.logOutput(`The paramenter --output must be a writeble file`); + console.info(`The paramenter --output must be a writeble file`); return; } } @@ -143,18 +154,21 @@ export class ExportCredentialsCommand extends Command { const filename = (flags.output!.endsWith(path.sep) ? flags.output! : flags.output + path.sep) + credentials[i].id + '.json'; fs.writeFileSync(filename, fileContents); } - console.log('Successfully exported', i, 'credentials.'); + console.info(`Successfully exported ${i} credentials.`); } else { const fileContents = JSON.stringify(credentials, null, flags.pretty ? 2 : undefined); if (flags.output) { fs.writeFileSync(flags.output!, fileContents); - console.log('Successfully exported', credentials.length, 'credentials.'); + console.info(`Successfully exported ${credentials.length} credentials.`); } else { - console.log(fileContents); + console.info(fileContents); } } + // Force exit as process won't exit using MySQL or Postgres. + process.exit(0); } catch (error) { - this.error(error.message); + console.error('Error exporting credentials. See log messages for details.'); + logger.error(error.message); this.exit(1); } } diff --git a/packages/cli/commands/export/workflow.ts b/packages/cli/commands/export/workflow.ts index 0cf1d712ad..9d478dbaef 100644 --- a/packages/cli/commands/export/workflow.ts +++ b/packages/cli/commands/export/workflow.ts @@ -9,9 +9,16 @@ import { import { Db, - GenericHelpers, } from '../../src'; +import { + getLogger, +} from '../../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; + import * as fs from 'fs'; import * as path from 'path'; @@ -49,6 +56,9 @@ export class ExportWorkflowsCommand extends Command { }; async run() { + const logger = getLogger(); + LoggerProxy.init(logger); + const { flags } = this.parse(ExportWorkflowsCommand); if (flags.backup) { @@ -58,41 +68,42 @@ export class ExportWorkflowsCommand extends Command { } if (!flags.all && !flags.id) { - GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`); + console.info(`Either option "--all" or "--id" have to be set!`); return; } if (flags.all && flags.id) { - GenericHelpers.logOutput(`You should either use "--all" or "--id" but never both!`); + console.info(`You should either use "--all" or "--id" but never both!`); return; } if (flags.separate) { try { if (!flags.output) { - GenericHelpers.logOutput(`You must inform an output directory via --output when using --separate`); + console.info(`You must inform an output directory via --output when using --separate`); return; } if (fs.existsSync(flags.output)) { if (!fs.lstatSync(flags.output).isDirectory()) { - GenericHelpers.logOutput(`The paramenter --output must be a directory`); + console.info(`The paramenter --output must be a directory`); return; } } else { fs.mkdirSync(flags.output, { recursive: true }); } } catch (e) { - console.error('\nFILESYSTEM ERROR'); - console.log('===================================='); - console.error(e.message); - console.error(e.stack); + console.error('Aborting execution as a filesystem error has been encountered while creating the output directory. See log messages for details.'); + logger.error('\nFILESYSTEM ERROR'); + logger.info('===================================='); + logger.error(e.message); + logger.error(e.stack); this.exit(1); } } else if (flags.output) { if (fs.existsSync(flags.output)) { if (fs.lstatSync(flags.output).isDirectory()) { - GenericHelpers.logOutput(`The paramenter --output must be a writeble file`); + console.info(`The paramenter --output must be a writeble file`); return; } } @@ -119,18 +130,21 @@ export class ExportWorkflowsCommand extends Command { const filename = (flags.output!.endsWith(path.sep) ? flags.output! : flags.output + path.sep) + workflows[i].id + '.json'; fs.writeFileSync(filename, fileContents); } - console.log('Successfully exported', i, 'workflows.'); + console.info(`Successfully exported ${i} workflows.`); } else { const fileContents = JSON.stringify(workflows, null, flags.pretty ? 2 : undefined); if (flags.output) { fs.writeFileSync(flags.output!, fileContents); - console.log('Successfully exported', workflows.length, workflows.length === 1 ? 'workflow.' : 'workflows.'); + console.info(`Successfully exported ${workflows.length} ${workflows.length === 1 ? 'workflow.' : 'workflows.'}`); } else { - console.log(fileContents); + console.info(fileContents); } } + // Force exit as process won't exit using MySQL or Postgres. + process.exit(0); } catch (error) { - this.error(error.message); + console.error('Error exporting workflows. See log messages for details.'); + logger.error(error.message); this.exit(1); } } diff --git a/packages/cli/commands/import/credentials.ts b/packages/cli/commands/import/credentials.ts index ed0da4e94b..b038f33693 100644 --- a/packages/cli/commands/import/credentials.ts +++ b/packages/cli/commands/import/credentials.ts @@ -10,9 +10,16 @@ import { import { Db, - GenericHelpers, } from '../../src'; +import { + getLogger, +} from '../../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; + import * as fs from 'fs'; import * as glob from 'glob-promise'; import * as path from 'path'; @@ -37,17 +44,20 @@ export class ImportCredentialsCommand extends Command { }; async run() { + const logger = getLogger(); + LoggerProxy.init(logger); + const { flags } = this.parse(ImportCredentialsCommand); if (!flags.input) { - GenericHelpers.logOutput(`An input file or directory with --input must be provided`); + console.info(`An input file or directory with --input must be provided`); return; } if (flags.separate) { if (fs.existsSync(flags.input)) { if (!fs.lstatSync(flags.input).isDirectory()) { - GenericHelpers.logOutput(`The paramenter --input must be a directory`); + console.info(`The paramenter --input must be a directory`); return; } } @@ -89,9 +99,11 @@ export class ImportCredentialsCommand extends Command { await Db.collections.Credentials!.save(fileContents[i]); } } - console.log('Successfully imported', i, 'credentials.'); + console.info(`Successfully imported ${i} ${i === 1 ? 'credential.' : 'credentials.'}`); + process.exit(0); } catch (error) { - this.error(error.message); + console.error('An error occurred while exporting credentials. See log messages for details.'); + logger.error(error.message); this.exit(1); } } diff --git a/packages/cli/commands/import/workflow.ts b/packages/cli/commands/import/workflow.ts index 7a0dc49e10..5b31041a44 100644 --- a/packages/cli/commands/import/workflow.ts +++ b/packages/cli/commands/import/workflow.ts @@ -5,9 +5,16 @@ import { import { Db, - GenericHelpers, } from '../../src'; +import { + getLogger, +} from '../../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; + import * as fs from 'fs'; import * as glob from 'glob-promise'; import * as path from 'path'; @@ -32,17 +39,20 @@ export class ImportWorkflowsCommand extends Command { }; async run() { + const logger = getLogger(); + LoggerProxy.init(logger); + const { flags } = this.parse(ImportWorkflowsCommand); if (!flags.input) { - GenericHelpers.logOutput(`An input file or directory with --input must be provided`); + console.info(`An input file or directory with --input must be provided`); return; } if (flags.separate) { if (fs.existsSync(flags.input)) { if (!fs.lstatSync(flags.input).isDirectory()) { - GenericHelpers.logOutput(`The paramenter --input must be a directory`); + console.info(`The paramenter --input must be a directory`); return; } } @@ -69,9 +79,11 @@ export class ImportWorkflowsCommand extends Command { } } - console.log('Successfully imported', i, i === 1 ? 'workflow.' : 'workflows.'); + console.info(`Successfully imported ${i} ${i === 1 ? 'workflow.' : 'workflows.'}`); + process.exit(0); } catch (error) { - this.error(error.message); + console.error('An error occurred while exporting workflows. See log messages for details.'); + logger.error(error.message); this.exit(1); } } diff --git a/packages/cli/commands/start.ts b/packages/cli/commands/start.ts index c01ad0ed4a..023e3e827f 100644 --- a/packages/cli/commands/start.ts +++ b/packages/cli/commands/start.ts @@ -25,11 +25,17 @@ import { } from '../src'; import { IDataObject } from 'n8n-workflow'; +import { + getLogger, +} from '../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined; let processExistCode = 0; - export class Start extends Command { static description = 'Starts n8n. Makes Web-UI available and starts active workflows'; @@ -71,7 +77,7 @@ export class Start extends Command { * get removed. */ static async stopProcess() { - console.log(`\nStopping n8n...`); + getLogger().info('\nStopping n8n...'); try { const externalHooks = ExternalHooks(); @@ -132,13 +138,18 @@ export class Start extends Command { // Wrap that the process does not close but we can still use async await (async () => { try { + const logger = getLogger(); + LoggerProxy.init(logger); + logger.info('Initializing n8n process'); + // Start directly with the init of the database to improve startup time const startDbInitPromise = Db.init().catch((error: Error) => { - console.error(`There was an error initializing DB: ${error.message}`); + logger.error(`There was an error initializing DB: "${error.message}"`); processExistCode = 1; // @ts-ignore process.emit('SIGINT'); + process.exit(1); }); // Make sure the settings exist @@ -184,7 +195,7 @@ export class Start extends Command { cumulativeTimeout += now - lastTimer; lastTimer = now; if (cumulativeTimeout > redisConnectionTimeoutLimit) { - console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.'); + logger.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + ". Exiting process."); process.exit(1); } } @@ -213,9 +224,9 @@ export class Start extends Command { redis.on('error', (error) => { if (error.toString().includes('ECONNREFUSED') === true) { - console.warn('Redis unavailable - trying to reconnect...'); + logger.warn('Redis unavailable - trying to reconnect...'); } else { - console.warn('Error with Redis: ', error); + logger.warn('Error with Redis: ', error); } }); } diff --git a/packages/cli/commands/update/workflow.ts b/packages/cli/commands/update/workflow.ts index 06be05759d..c7d81901aa 100644 --- a/packages/cli/commands/update/workflow.ts +++ b/packages/cli/commands/update/workflow.ts @@ -11,6 +11,13 @@ import { GenericHelpers, } from '../../src'; +import { + getLogger, +} from '../../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; export class UpdateWorkflowCommand extends Command { static description = '\Update workflows'; @@ -34,25 +41,28 @@ export class UpdateWorkflowCommand extends Command { }; async run() { + const logger = getLogger(); + LoggerProxy.init(logger); + const { flags } = this.parse(UpdateWorkflowCommand); if (!flags.all && !flags.id) { - GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`); + console.info(`Either option "--all" or "--id" have to be set!`); return; } if (flags.all && flags.id) { - GenericHelpers.logOutput(`Either something else on top should be "--all" or "--id" can be set never both!`); + console.info(`Either something else on top should be "--all" or "--id" can be set never both!`); return; } const updateQuery: IDataObject = {}; if (flags.active === undefined) { - GenericHelpers.logOutput(`No update flag like "--active=true" has been set!`); + console.info(`No update flag like "--active=true" has been set!`); return; } else { if (!['false', 'true'].includes(flags.active)) { - GenericHelpers.logOutput(`Valid values for flag "--active" are only "false" or "true"!`); + console.info(`Valid values for flag "--active" are only "false" or "true"!`); return; } updateQuery.active = flags.active === 'true'; @@ -63,20 +73,21 @@ export class UpdateWorkflowCommand extends Command { const findQuery: IDataObject = {}; if (flags.id) { - console.log(`Deactivating workflow with ID: ${flags.id}`); + console.info(`Deactivating workflow with ID: ${flags.id}`); findQuery.id = flags.id; } else { - console.log('Deactivating all workflows'); + console.info('Deactivating all workflows'); findQuery.active = true; } await Db.collections.Workflow!.update(findQuery, updateQuery); - console.log('Done'); + console.info('Done'); } catch (e) { - console.error('\nGOT ERROR'); - console.log('===================================='); - console.error(e.message); - console.error(e.stack); + console.error('Error updating database. See log messages for details.'); + logger.error('\nGOT ERROR'); + logger.info('===================================='); + logger.error(e.message); + logger.error(e.stack); this.exit(1); } diff --git a/packages/cli/commands/webhook.ts b/packages/cli/commands/webhook.ts index 68c5876543..e1acc94874 100644 --- a/packages/cli/commands/webhook.ts +++ b/packages/cli/commands/webhook.ts @@ -20,6 +20,13 @@ import { } from '../src'; import { IDataObject } from 'n8n-workflow'; +import { + getLogger, +} from '../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined; let processExistCode = 0; @@ -42,7 +49,7 @@ export class Webhook extends Command { * get removed. */ static async stopProcess() { - console.log(`\nStopping n8n...`); + LoggerProxy.info(`\nStopping n8n...`); try { const externalHooks = ExternalHooks(); @@ -54,17 +61,6 @@ export class Webhook extends Command { process.exit(processExistCode); }, 30000); - const removePromises = []; - if (activeWorkflowRunner !== undefined) { - removePromises.push(activeWorkflowRunner.removeAll()); - } - - // Remove all test webhooks - const testWebhooks = TestWebhooks.getInstance(); - removePromises.push(testWebhooks.removeAll()); - - await Promise.all(removePromises); - // Wait for active workflow executions to finish const activeExecutionsInstance = ActiveExecutions.getInstance(); let executingWorkflows = activeExecutionsInstance.getActiveExecutions(); @@ -72,7 +68,7 @@ export class Webhook extends Command { let count = 0; while (executingWorkflows.length !== 0) { if (count++ % 4 === 0) { - console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`); + LoggerProxy.info(`Waiting for ${executingWorkflows.length} active executions to finish...`); } await new Promise((resolve) => { setTimeout(resolve, 500); @@ -81,7 +77,7 @@ export class Webhook extends Command { } } catch (error) { - console.error('There was an error shutting down n8n.', error); + LoggerProxy.error('There was an error shutting down n8n.', error); } process.exit(processExistCode); @@ -89,6 +85,9 @@ export class Webhook extends Command { async run() { + const logger = getLogger(); + LoggerProxy.init(logger); + // Make sure that n8n shuts down gracefully if possible process.on('SIGTERM', Webhook.stopProcess); process.on('SIGINT', Webhook.stopProcess); @@ -116,11 +115,12 @@ export class Webhook extends Command { try { // Start directly with the init of the database to improve startup time const startDbInitPromise = Db.init().catch(error => { - console.error(`There was an error initializing DB: ${error.message}`); + logger.error(`There was an error initializing DB: "${error.message}"`); processExistCode = 1; // @ts-ignore process.emit('SIGINT'); + process.exit(1); }); // Make sure the settings exist @@ -166,7 +166,7 @@ export class Webhook extends Command { cumulativeTimeout += now - lastTimer; lastTimer = now; if (cumulativeTimeout > redisConnectionTimeoutLimit) { - console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.'); + logger.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + ". Exiting process."); process.exit(1); } } @@ -195,9 +195,9 @@ export class Webhook extends Command { redis.on('error', (error) => { if (error.toString().includes('ECONNREFUSED') === true) { - console.warn('Redis unavailable - trying to reconnect...'); + logger.warn('Redis unavailable - trying to reconnect...'); } else { - console.warn('Error with Redis: ', error); + logger.warn('Error with Redis: ', error); } }); } @@ -209,14 +209,16 @@ export class Webhook extends Command { await activeWorkflowRunner.initWebhooks(); const editorUrl = GenericHelpers.getBaseUrl(); - this.log('Webhook listener waiting for requests.'); + console.info('Webhook listener waiting for requests.'); } catch (error) { - this.error(`There was an error: ${error.message}`); + console.error('Exiting due to error. See log message for details.'); + logger.error(`Webhook process cannot continue. "${error.message}"`); processExistCode = 1; // @ts-ignore process.emit('SIGINT'); + process.exit(1); } })(); } diff --git a/packages/cli/commands/worker.ts b/packages/cli/commands/worker.ts index d7af9ac794..56da5d9c98 100644 --- a/packages/cli/commands/worker.ts +++ b/packages/cli/commands/worker.ts @@ -37,6 +37,14 @@ import { WorkflowExecuteAdditionalData, } from '../src'; +import { + getLogger, +} from '../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; + import * as config from '../config'; import * as Bull from 'bull'; import * as Queue from '../src/Queue'; @@ -71,7 +79,7 @@ export class Worker extends Command { * get removed. */ static async stopProcess() { - console.log(`\nStopping n8n...`); + LoggerProxy.info(`Stopping n8n...`); // Stop accepting new jobs Worker.jobQueue.pause(true); @@ -95,7 +103,7 @@ export class Worker extends Command { while (Object.keys(Worker.runningJobs).length !== 0) { if (count++ % 4 === 0) { const waitLeft = Math.ceil((stopTime - new Date().getTime()) / 1000); - console.log(`Waiting for ${Object.keys(Worker.runningJobs).length} active executions to finish... (wait ${waitLeft} more seconds)`); + LoggerProxy.info(`Waiting for ${Object.keys(Worker.runningJobs).length} active executions to finish... (wait ${waitLeft} more seconds)`); } await new Promise((resolve) => { setTimeout(resolve, 500); @@ -103,7 +111,7 @@ export class Worker extends Command { } } catch (error) { - console.error('There was an error shutting down n8n.', error); + LoggerProxy.error('There was an error shutting down n8n.', error); } process.exit(Worker.processExistCode); @@ -113,7 +121,7 @@ export class Worker extends Command { const jobData = job.data as IBullJobData; const executionDb = await Db.collections.Execution!.findOne(jobData.executionId) as IExecutionFlattedDb; const currentExecutionDb = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse; - console.log(`Start job: ${job.id} (Workflow ID: ${currentExecutionDb.workflowData.id} | Execution: ${jobData.executionId})`); + LoggerProxy.info(`Start job: ${job.id} (Workflow ID: ${currentExecutionDb.workflowData.id} | Execution: ${jobData.executionId})`); let staticData = currentExecutionDb.workflowData!.staticData; if (jobData.loadStaticData === true) { @@ -170,7 +178,10 @@ export class Worker extends Command { } async run() { - console.log('Starting n8n worker...'); + const logger = getLogger(); + LoggerProxy.init(logger); + + console.info('Starting n8n worker...'); // Make sure that n8n shuts down gracefully if possible process.on('SIGTERM', Worker.stopProcess); @@ -183,11 +194,12 @@ export class Worker extends Command { // Start directly with the init of the database to improve startup time const startDbInitPromise = Db.init().catch(error => { - console.error(`There was an error initializing DB: ${error.message}`); + logger.error(`There was an error initializing DB: "${error.message}"`); Worker.processExistCode = 1; // @ts-ignore process.emit('SIGINT'); + process.exit(1); }); // Make sure the settings exist @@ -221,10 +233,10 @@ export class Worker extends Command { const versions = await GenericHelpers.getVersions(); - console.log('\nn8n worker is now ready'); - console.log(` * Version: ${versions.cli}`); - console.log(` * Concurrency: ${flags.concurrency}`); - console.log(''); + console.info('\nn8n worker is now ready'); + console.info(` * Version: ${versions.cli}`); + console.info(` * Concurrency: ${flags.concurrency}`); + console.info(''); Worker.jobQueue.on('global:progress', (jobId, progress) => { // Progress of a job got updated which does get used @@ -252,27 +264,28 @@ export class Worker extends Command { cumulativeTimeout += now - lastTimer; lastTimer = now; if (cumulativeTimeout > redisConnectionTimeoutLimit) { - console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.'); + logger.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + ". Exiting process."); process.exit(1); } } - console.warn('Redis unavailable - trying to reconnect...'); + logger.warn('Redis unavailable - trying to reconnect...'); } else if (error.toString().includes('Error initializing Lua scripts') === true) { // This is a non-recoverable error // Happens when worker starts and Redis is unavailable // Even if Redis comes back online, worker will be zombie - console.error('Error initializing worker.'); + logger.error('Error initializing worker.'); process.exit(2); } else { - console.error('Error from queue: ', error); + logger.error('Error from queue: ', error); } }); } catch (error) { - this.error(`There was an error: ${error.message}`); + logger.error(`Worker process cannot continue. "${error.message}"`); Worker.processExistCode = 1; // @ts-ignore process.emit('SIGINT'); + process.exit(1); } })(); diff --git a/packages/cli/config/index.ts b/packages/cli/config/index.ts index 9b2c6ea614..c8d97684d9 100644 --- a/packages/cli/config/index.ts +++ b/packages/cli/config/index.ts @@ -1,5 +1,7 @@ import * as convict from 'convict'; import * as dotenv from 'dotenv'; +import * as path from 'path'; +import * as core from 'n8n-core'; dotenv.config(); @@ -572,6 +574,41 @@ const config = convict({ }, }, + logs: { + level: { + doc: 'Log output level. Options are error, warn, info, verbose and debug.', + format: String, + default: 'info', + env: 'N8N_LOG_LEVEL', + }, + output: { + doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")', + format: String, + default: 'console', + env: 'N8N_LOG_OUTPUT', + }, + file: { + fileCountMax: { + doc: 'Maximum number of files to keep.', + format: Number, + default: 100, + env: 'N8N_LOG_FILE_COUNT_MAX', + }, + fileSizeMax: { + doc: 'Maximum size for each log file in MB.', + format: Number, + default: 16, + env: 'N8N_LOG_FILE_SIZE_MAX', + }, + location: { + doc: 'Log file location; only used if log output is set to file.', + format: String, + default: path.join(core.UserSettings.getUserN8nFolderPath(), 'logs/n8n.log'), + env: 'N8N_LOG_FILE_LOCATION', + }, + }, + }, + }); // Overwrite default configuration with settings which got defined in diff --git a/packages/cli/package.json b/packages/cli/package.json index daa98944e7..aa48f92da2 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "0.117.0", + "version": "0.118.0", "description": "n8n Workflow Automation Tool", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -44,7 +44,7 @@ "workflow" ], "engines": { - "node": ">=12.0.0" + "node": ">=14.0.0" }, "files": [ "bin", @@ -64,7 +64,7 @@ "@types/jest": "^26.0.13", "@types/localtunnel": "^1.9.0", "@types/lodash.get": "^4.4.6", - "@types/node": "14.0.27", + "@types/node": "^14.14.40", "@types/open": "^6.1.0", "@types/parseurl": "^1.3.1", "@types/request-promise-native": "~1.0.15", @@ -87,6 +87,7 @@ "body-parser": "^1.18.3", "body-parser-xml": "^1.1.0", "bull": "^3.19.0", + "callsites": "^3.1.0", "client-oauth2": "^4.2.5", "compression": "^1.7.4", "connect-history-api-fallback": "^1.6.0", @@ -103,10 +104,10 @@ "localtunnel": "^2.0.0", "lodash.get": "^4.4.2", "mysql2": "~2.2.0", - "n8n-core": "~0.68.0", - "n8n-editor-ui": "~0.87.0", - "n8n-nodes-base": "~0.114.0", - "n8n-workflow": "~0.56.0", + "n8n-core": "~0.69.0", + "n8n-editor-ui": "~0.88.0", + "n8n-nodes-base": "~0.115.0", + "n8n-workflow": "~0.57.0", "oauth-1.0a": "^2.2.6", "open": "^7.0.0", "pg": "^8.3.0", @@ -114,7 +115,7 @@ "request-promise-native": "^1.0.7", "sqlite3": "^5.0.1", "sse-channel": "^3.1.1", - "tslib": "1.13.0", + "tslib": "1.14.1", "typeorm": "^0.2.30" }, "jest": { diff --git a/packages/cli/src/ActiveWorkflowRunner.ts b/packages/cli/src/ActiveWorkflowRunner.ts index 07cd0a8b82..9f9b9f3580 100644 --- a/packages/cli/src/ActiveWorkflowRunner.ts +++ b/packages/cli/src/ActiveWorkflowRunner.ts @@ -35,6 +35,9 @@ import { } from 'n8n-workflow'; import * as express from 'express'; +import { + LoggerProxy as Logger, +} from 'n8n-workflow'; export class ActiveWorkflowRunner { private activeWorkflows: ActiveWorkflows | null = null; @@ -43,7 +46,6 @@ export class ActiveWorkflowRunner { [key: string]: IActivationError; } = {}; - async init() { // Get the active workflows from database @@ -59,20 +61,24 @@ export class ActiveWorkflowRunner { this.activeWorkflows = new ActiveWorkflows(); if (workflowsData.length !== 0) { - console.log('\n ================================'); - console.log(' Start Active Workflows:'); - console.log(' ================================'); + console.info(' ================================'); + console.info(' Start Active Workflows:'); + console.info(' ================================'); for (const workflowData of workflowsData) { console.log(` - ${workflowData.name}`); + Logger.debug(`Initializing active workflow "${workflowData.name}" (startup)`, {workflowName: workflowData.name, workflowId: workflowData.id}); try { await this.add(workflowData.id.toString(), 'init', workflowData); + Logger.verbose(`Successfully started workflow "${workflowData.name}"`, {workflowName: workflowData.name, workflowId: workflowData.id}); console.log(` => Started`); } catch (error) { console.log(` => ERROR: Workflow could not be activated:`); console.log(` ${error.message}`); + Logger.error(`Unable to initialize workflow "${workflowData.name}" (startup)`, {workflowName: workflowData.name, workflowId: workflowData.id}); } } + Logger.verbose('Finished initializing active workflows (startup)'); } } @@ -88,6 +94,7 @@ export class ActiveWorkflowRunner { */ async removeAll(): Promise { const activeWorkflowId: string[] = []; + Logger.verbose('Call to remove all active workflows received (removeAll)'); if (this.activeWorkflows !== null) { // TODO: This should be renamed! @@ -117,6 +124,7 @@ export class ActiveWorkflowRunner { * @memberof ActiveWorkflowRunner */ async executeWebhook(httpMethod: WebhookHttpMethod, path: string, req: express.Request, res: express.Response): Promise { + Logger.debug(`Received webhoook "${httpMethod}" for path "${path}"`); if (this.activeWorkflows === null) { throw new ResponseHelper.ResponseError('The "activeWorkflows" instance did not get initialized yet.', 404, 404); } @@ -437,6 +445,7 @@ export class ActiveWorkflowRunner { return ((workflow: Workflow, node: INode) => { const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions(workflow, node, additionalData, mode, activation); returnFunctions.__emit = (data: INodeExecutionData[][]): void => { + Logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`); this.runWorkflow(workflowData, node, data, additionalData, mode); }; return returnFunctions; @@ -458,6 +467,7 @@ export class ActiveWorkflowRunner { return ((workflow: Workflow, node: INode) => { const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions(workflow, node, additionalData, mode, activation); returnFunctions.emit = (data: INodeExecutionData[][]): void => { + Logger.debug(`Received trigger for workflow "${workflow.name}"`); WorkflowHelpers.saveStaticData(workflow); this.runWorkflow(workflowData, node, data, additionalData, mode).catch((err) => console.error(err)); }; @@ -492,6 +502,7 @@ export class ActiveWorkflowRunner { const canBeActivated = workflowInstance.checkIfWorkflowCanBeActivated(['n8n-nodes-base.start']); if (canBeActivated === false) { + Logger.error(`Unable to activate workflow "${workflowData.name}"`); throw new Error(`The workflow can not be activated because it does not contain any nodes which could start the workflow. Only workflows which have trigger or webhook nodes can be activated.`); } @@ -507,6 +518,7 @@ export class ActiveWorkflowRunner { if (workflowInstance.getTriggerNodes().length !== 0 || workflowInstance.getPollNodes().length !== 0) { await this.activeWorkflows.add(workflowId, workflowInstance, additionalData, mode, activation, getTriggerFunctions, getPollFunctions); + Logger.info(`Successfully activated workflow "${workflowData.name}"`); } if (this.activationErrors[workflowId] !== undefined) { diff --git a/packages/cli/src/GenericHelpers.ts b/packages/cli/src/GenericHelpers.ts index 74e9578fec..8dba633baa 100644 --- a/packages/cli/src/GenericHelpers.ts +++ b/packages/cli/src/GenericHelpers.ts @@ -3,14 +3,11 @@ import * as express from 'express'; import { join as pathJoin } from 'path'; import { readFile as fsReadFile, -} from 'fs'; -import { promisify } from 'util'; +} from 'fs/promises'; import { IDataObject } from 'n8n-workflow'; import { IPackageVersions } from './'; -const fsReadFileAsync = promisify(fsReadFile); - let versionCache: IPackageVersions | undefined; @@ -72,7 +69,7 @@ export async function getVersions(): Promise { return versionCache; } - const packageFile = await fsReadFileAsync(pathJoin(__dirname, '../../package.json'), 'utf8') as string; + const packageFile = await fsReadFile(pathJoin(__dirname, '../../package.json'), 'utf8') as string; const packageData = JSON.parse(packageFile); versionCache = { @@ -122,7 +119,7 @@ export async function getConfigValue(configKey: string): Promise => { const results: string[] = []; const nodeModulesPath = `${this.nodeModulesPath}/${relativePath}`; - for (const file of await fsReaddirAsync(nodeModulesPath)) { + for (const file of await fsReaddir(nodeModulesPath)) { const isN8nNodesPackage = file.indexOf('n8n-nodes-') === 0; const isNpmScopedPackage = file.indexOf('@') === 0; if (!isN8nNodesPackage && !isNpmScopedPackage) { continue; } - if (!(await fsStatAsync(nodeModulesPath)).isDirectory()) { + if (!(await fsStat(nodeModulesPath)).isDirectory()) { continue; } if (isN8nNodesPackage) { results.push(`${relativePath}${file}`); } @@ -234,7 +228,7 @@ class LoadNodesAndCredentialsClass { const packagePath = path.join(this.nodeModulesPath, packageName); // Read the data from the package.json file to see if any n8n data is defiend - const packageFileString = await fsReadFileAsync(path.join(packagePath, 'package.json'), 'utf8'); + const packageFileString = await fsReadFile(path.join(packagePath, 'package.json'), 'utf8'); const packageFile = JSON.parse(packageFileString); if (!packageFile.hasOwnProperty('n8n')) { return; diff --git a/packages/cli/src/Logger.ts b/packages/cli/src/Logger.ts new file mode 100644 index 0000000000..3bbc61e6a9 --- /dev/null +++ b/packages/cli/src/Logger.ts @@ -0,0 +1,114 @@ +import config = require('../config'); +import * as winston from 'winston'; + +import { + IDataObject, + ILogger, + LogTypes, +} from 'n8n-workflow'; + +import * as callsites from 'callsites'; +import { basename } from 'path'; + +class Logger implements ILogger { + private logger: winston.Logger; + + constructor() { + const level = config.get('logs.level'); + const output = (config.get('logs.output') as string).split(',').map(output => output.trim()); + + this.logger = winston.createLogger({ + level, + }); + + if (output.includes('console')) { + let format: winston.Logform.Format; + if (['debug', 'verbose'].includes(level)) { + format = winston.format.combine( + winston.format.metadata(), + winston.format.timestamp(), + winston.format.colorize({ all: true }), + winston.format.printf(({ level, message, timestamp, metadata }) => { + return `${timestamp} | ${level.padEnd(18)} | ${message}` + (Object.keys(metadata).length ? ` ${JSON.stringify(metadata)}` : ''); + }) as winston.Logform.Format + ); + } else { + format = winston.format.printf(({ message }) => message) as winston.Logform.Format; + } + + this.logger.add( + new winston.transports.Console({ + format, + }) + ); + } + + if (output.includes('file')) { + const fileLogFormat = winston.format.combine( + winston.format.timestamp(), + winston.format.metadata(), + winston.format.json() + ); + this.logger.add( + new winston.transports.File({ + filename: config.get('logs.file.location'), + format: fileLogFormat, + maxsize: config.get('logs.file.fileSizeMax') as number * 1048576, // config * 1mb + maxFiles: config.get('logs.file.fileCountMax'), + }) + ); + } + } + + log(type: LogTypes, message: string, meta: object = {}) { + const callsite = callsites(); + // We are using the third array element as the structure is as follows: + // [0]: this file + // [1]: Should be LoggerProxy + // [2]: Should point to the caller. + // Note: getting line number is useless because at this point + // We are in runtime, so it means we are looking at compiled js files + const logDetails = {} as IDataObject; + if (callsite[2] !== undefined) { + logDetails.file = basename(callsite[2].getFileName() || ''); + const functionName = callsite[2].getFunctionName(); + if (functionName) { + logDetails.function = functionName; + } + } + this.logger.log(type, message, {...meta, ...logDetails}); + } + + // Convenience methods below + + debug(message: string, meta: object = {}) { + this.log('debug', message, meta); + } + + info(message: string, meta: object = {}) { + this.log('info', message, meta); + } + + error(message: string, meta: object = {}) { + this.log('error', message, meta); + } + + verbose(message: string, meta: object = {}) { + this.log('verbose', message, meta); + } + + warn(message: string, meta: object = {}) { + this.log('warn', message, meta); + } + +} + +let activeLoggerInstance: Logger | undefined; + +export function getLogger() { + if (activeLoggerInstance === undefined) { + activeLoggerInstance = new Logger(); + } + + return activeLoggerInstance; +} diff --git a/packages/cli/src/Push.ts b/packages/cli/src/Push.ts index 94172ac35b..78a20fde57 100644 --- a/packages/cli/src/Push.ts +++ b/packages/cli/src/Push.ts @@ -7,6 +7,10 @@ import { IPushDataType, } from '.'; +import { + LoggerProxy as Logger, +} from 'n8n-workflow'; + export class Push { private channel: sseChannel; private connections: { @@ -24,6 +28,7 @@ export class Push { this.channel.on('disconnect', (channel: string, res: express.Response) => { if (res.req !== undefined) { + Logger.debug(`Remove editor-UI session`, { sessionId: res.req.query.sessionId }); delete this.connections[res.req.query.sessionId as string]; } }); @@ -39,6 +44,8 @@ export class Push { * @memberof Push */ add(sessionId: string, req: express.Request, res: express.Response) { + Logger.debug(`Add editor-UI session`, { sessionId }); + if (this.connections[sessionId] !== undefined) { // Make sure to remove existing connection with the same session // id if one exists already @@ -64,11 +71,12 @@ export class Push { send(type: IPushDataType, data: any, sessionId?: string) { // tslint:disable-line:no-any if (sessionId !== undefined && this.connections[sessionId] === undefined) { - // TODO: Log that properly! - console.error(`The session "${sessionId}" is not registred.`); + Logger.error(`The session "${sessionId}" is not registred.`, { sessionId }); return; } + Logger.debug(`Send data of type "${type}" to editor-UI`, { dataType: type, sessionId }); + const sendData: IPushData = { type, data, diff --git a/packages/cli/src/WebhookHelpers.ts b/packages/cli/src/WebhookHelpers.ts index 8913777329..f0d5f63818 100644 --- a/packages/cli/src/WebhookHelpers.ts +++ b/packages/cli/src/WebhookHelpers.ts @@ -30,13 +30,14 @@ import { IWebhookData, IWebhookResponseData, IWorkflowExecuteAdditionalData, + LoggerProxy as Logger, NodeHelpers, Workflow, WorkflowExecuteMode, } from 'n8n-workflow'; -const activeExecutions = ActiveExecutions.getInstance(); +const activeExecutions = ActiveExecutions.getInstance(); /** * Returns all the webhooks which should be created for the give workflow @@ -286,6 +287,8 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] { const workflowRunner = new WorkflowRunner(); const executionId = await workflowRunner.run(runData, true, !didSendResponse); + Logger.verbose(`Started execution of workflow "${workflow.name}" from webhook with execution ID ${executionId}`, { executionId }); + // Get a promise which resolves when the workflow did execute and send then response const executePromise = activeExecutions.getPostExecutePromise(executionId) as Promise; executePromise.then((data) => { diff --git a/packages/cli/src/WorkflowExecuteAdditionalData.ts b/packages/cli/src/WorkflowExecuteAdditionalData.ts index 8e0793385a..809a8439f8 100644 --- a/packages/cli/src/WorkflowExecuteAdditionalData.ts +++ b/packages/cli/src/WorkflowExecuteAdditionalData.ts @@ -37,6 +37,7 @@ import { IWorkflowExecuteAdditionalData, IWorkflowExecuteHooks, IWorkflowHooksOptionalParameters, + LoggerProxy as Logger, Workflow, WorkflowExecuteMode, WorkflowHooks, @@ -44,11 +45,10 @@ import { import * as config from '../config'; -import { LessThanOrEqual } from "typeorm"; +import { LessThanOrEqual } from 'typeorm'; const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string; - /** * Checks if there was an error and if errorWorkflow or a trigger is defined. If so it collects * all the data and executes it @@ -85,9 +85,11 @@ function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mo // Run the error workflow // To avoid an infinite loop do not run the error workflow again if the error-workflow itself failed and it is its own error-workflow. if (workflowData.settings !== undefined && workflowData.settings.errorWorkflow && !(mode === 'error' && workflowData.id && workflowData.settings.errorWorkflow.toString() === workflowData.id.toString())) { + Logger.verbose(`Start external error workflow`, { executionId, errorWorkflowId: workflowData.settings.errorWorkflow.toString(), workflowId: workflowData.id }); // If a specific error workflow is set run only that one WorkflowHelpers.executeErrorWorkflow(workflowData.settings.errorWorkflow as string, workflowErrorData); } else if (mode !== 'error' && workflowData.id !== undefined && workflowData.nodes.some((node) => node.type === ERROR_TRIGGER_TYPE)) { + Logger.verbose(`Start internal error workflow`, { executionId, workflowId: workflowData.id }); // If the workflow contains WorkflowHelpers.executeErrorWorkflow(workflowData.id.toString(), workflowErrorData); } @@ -102,6 +104,8 @@ function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mo let throttling = false; function pruneExecutionData(): void { if (!throttling) { + Logger.verbose('Pruning execution data from database'); + throttling = true; const timeout = config.get('executions.pruneDataTimeout') as number; // in seconds const maxAge = config.get('executions.pruneDataMaxAge') as number; // in h @@ -133,6 +137,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks { if (this.sessionId === undefined) { return; } + Logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, { executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id }); const pushInstance = Push.getInstance(); pushInstance.send('nodeExecuteBefore', { @@ -147,6 +152,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks { if (this.sessionId === undefined) { return; } + Logger.debug(`Executing hook on node "${nodeName}" (hookFunctionsPush)`, { executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id }); const pushInstance = Push.getInstance(); pushInstance.send('nodeExecuteAfter', { @@ -158,6 +164,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks { ], workflowExecuteBefore: [ async function (this: WorkflowHooks): Promise { + Logger.debug(`Executing hook (hookFunctionsPush)`, { executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id }); // Push data to session which started the workflow if (this.sessionId === undefined) { return; @@ -168,13 +175,14 @@ function hookFunctionsPush(): IWorkflowExecuteHooks { mode: this.mode, startedAt: new Date(), retryOf: this.retryOf, - workflowId: this.workflowData.id as string, + workflowId: this.workflowData.id, sessionId: this.sessionId as string, workflowName: this.workflowData.name, }, this.sessionId); }, ], workflowExecuteAfter: [ async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise { + Logger.debug(`Executing hook (hookFunctionsPush)`, { executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id }); // Push data to session which started the workflow if (this.sessionId === undefined) { return; @@ -195,6 +203,7 @@ function hookFunctionsPush(): IWorkflowExecuteHooks { }; // Push data to editor-ui once workflow finished + Logger.debug(`Save execution progress to database for execution ID ${this.executionId} `, { executionId: this.executionId, workflowId: this.workflowData.id }); // TODO: Look at this again const sendData: IPushDataExecutionFinished = { executionId: this.executionId, @@ -232,6 +241,8 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx } try { + Logger.debug(`Save execution progress to database for execution ID ${this.executionId} `, { executionId: this.executionId, nodeName }); + const execution = await Db.collections.Execution!.findOne(this.executionId); if (execution === undefined) { @@ -286,7 +297,7 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx // For busy machines, we may get "Database is locked" errors. // We do this to prevent crashes and executions ending in `unknown` state. - console.log(`Failed saving execution progress to database for execution ID ${this.executionId}`, err); + Logger.error(`Failed saving execution progress to database for execution ID ${this.executionId} (hookFunctionsPreExecute, nodeExecuteAfter)`, { ...err, executionId: this.executionId, sessionId: this.sessionId, workflowId: this.workflowData.id }); } }, @@ -307,6 +318,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { workflowExecuteBefore: [], workflowExecuteAfter: [ async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise { + Logger.debug(`Executing hook (hookFunctionsSave)`, { executionId: this.executionId, workflowId: this.workflowData.id }); // Prune old execution data if (config.get('executions.pruneData')) { @@ -321,8 +333,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { try { await WorkflowHelpers.saveStaticDataById(this.workflowData.id as string, newStaticData); } catch (e) { - // TODO: Add proper logging! - console.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: ${e.message}`); + Logger.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (hookFunctionsSave)`, { executionId: this.executionId, workflowId: this.workflowData.id }); } } @@ -375,6 +386,9 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { fullExecutionData.workflowId = this.workflowData.id.toString(); } + // Leave log message before flatten as that operation increased memory usage a lot and the chance of a crash is highest here + Logger.debug(`Save execution data to database for execution ID ${this.executionId}`, { executionId: this.executionId, workflowId: this.workflowData.id }); + const executionData = ResponseHelper.flattenExecutionData(fullExecutionData); // Save the Execution in DB @@ -420,8 +434,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { try { await WorkflowHelpers.saveStaticDataById(this.workflowData.id as string, newStaticData); } catch (e) { - // TODO: Add proper logging! - console.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: ${e.message}`); + Logger.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (workflowExecuteAfter)`, { sessionId: this.sessionId, workflowId: this.workflowData.id }); } } @@ -606,7 +619,16 @@ export async function executeWorkflow(workflowInfo: IExecuteWorkflowInfo, additi // This one already contains changes to talk to parent process // and get executionID from `activeExecutions` running on main process additionalDataIntegrated.executeWorkflow = additionalData.executeWorkflow; - additionalDataIntegrated.executionTimeoutTimestamp = additionalData.executionTimeoutTimestamp; + + let subworkflowTimeout = additionalData.executionTimeoutTimestamp; + if (workflowData.settings?.executionTimeout !== undefined && workflowData.settings.executionTimeout > 0) { + // We might have received a max timeout timestamp from the parent workflow + // If we did, then we get the minimum time between the two timeouts + // If no timeout was given from the parent, then we use our timeout. + subworkflowTimeout = Math.min(additionalData.executionTimeoutTimestamp || Number.MAX_SAFE_INTEGER, Date.now() + (workflowData.settings.executionTimeout as number * 1000)); + } + + additionalDataIntegrated.executionTimeoutTimestamp = subworkflowTimeout; // Execute the workflow diff --git a/packages/cli/src/WorkflowHelpers.ts b/packages/cli/src/WorkflowHelpers.ts index 7071c1b715..76c264d643 100644 --- a/packages/cli/src/WorkflowHelpers.ts +++ b/packages/cli/src/WorkflowHelpers.ts @@ -18,8 +18,8 @@ import { IRunExecutionData, ITaskData, IWorkflowCredentials, - Workflow, -} from 'n8n-workflow'; + LoggerProxy as Logger, + Workflow,} from 'n8n-workflow'; import * as config from '../config'; @@ -86,7 +86,7 @@ export async function executeErrorWorkflow(workflowId: string, workflowErrorData if (workflowData === undefined) { // The error workflow could not be found - console.error(`ERROR: Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find error workflow "${workflowId}"`); + Logger.error(`Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find error workflow "${workflowId}"`, { workflowId }); return; } @@ -105,7 +105,7 @@ export async function executeErrorWorkflow(workflowId: string, workflowErrorData } if (workflowStartNode === undefined) { - console.error(`ERROR: Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find "${ERROR_TRIGGER_TYPE}" in workflow "${workflowId}"`); + Logger.error(`Calling Error Workflow for "${workflowErrorData.workflow.id}". Could not find "${ERROR_TRIGGER_TYPE}" in workflow "${workflowId}"`); return; } @@ -153,7 +153,7 @@ export async function executeErrorWorkflow(workflowId: string, workflowErrorData const workflowRunner = new WorkflowRunner(); await workflowRunner.run(runData); } catch (error) { - console.error(`ERROR: Calling Error Workflow for "${workflowErrorData.workflow.id}": ${error.message}`); + Logger.error(`Calling Error Workflow for "${workflowErrorData.workflow.id}": "${error.message}"`, { workflowId: workflowErrorData.workflow.id }); } } @@ -315,8 +315,7 @@ export async function saveStaticData(workflow: Workflow): Promise { await saveStaticDataById(workflow.id!, workflow.staticData); workflow.staticData.__dataChanged = false; } catch (e) { - // TODO: Add proper logging! - console.error(`There was a problem saving the workflow with id "${workflow.id}" to save changed staticData: ${e.message}`); + Logger.error(`There was a problem saving the workflow with id "${workflow.id}" to save changed staticData: "${e.message}"`, { workflowId: workflow.id }); } } } diff --git a/packages/cli/src/WorkflowRunner.ts b/packages/cli/src/WorkflowRunner.ts index 7ded8d81b3..1cf4e43b35 100644 --- a/packages/cli/src/WorkflowRunner.ts +++ b/packages/cli/src/WorkflowRunner.ts @@ -29,6 +29,7 @@ import { import { ExecutionError, IRun, + LoggerProxy as Logger, Workflow, WorkflowExecuteMode, WorkflowHooks, @@ -177,20 +178,24 @@ export class WorkflowRunner { // Register the active execution const executionId = await this.activeExecutions.add(data, undefined); + Logger.verbose(`Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`, {executionId}); additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId, true); let workflowExecution: PCancelable; if (data.executionData !== undefined) { + Logger.debug(`Execution ID ${executionId} had Execution data. Running with payload.`, {executionId}); const workflowExecute = new WorkflowExecute(additionalData, data.executionMode, data.executionData); workflowExecution = workflowExecute.processRunExecutionData(workflow); } else if (data.runData === undefined || data.startNodes === undefined || data.startNodes.length === 0 || data.destinationNode === undefined) { + Logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {executionId}); // Execute all nodes // Can execute without webhook so go on const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); workflowExecution = workflowExecute.run(workflow, undefined, data.destinationNode); } else { + Logger.debug(`Execution ID ${executionId} is a partial execution.`, {executionId}); // Execute only the nodes between start and destination nodes const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); workflowExecution = workflowExecute.runPartialWorkflow(workflow, data.runData, data.startNodes, data.destinationNode); @@ -450,6 +455,7 @@ export class WorkflowRunner { // Listen to data from the subprocess subprocess.on('message', async (message: IProcessMessage) => { + Logger.debug(`Received child process message of type ${message.type} for execution ID ${executionId}.`, {executionId}); if (message.type === 'start') { // Now that the execution actually started set the timeout again so that does not time out to early. startedAt = new Date(); @@ -491,11 +497,13 @@ export class WorkflowRunner { // Also get informed when the processes does exit especially when it did crash or timed out subprocess.on('exit', async (code, signal) => { if (signal === 'SIGTERM'){ + Logger.debug(`Subprocess for execution ID ${executionId} timed out.`, {executionId}); // Execution timed out and its process has been terminated const timeoutError = new WorkflowOperationError('Workflow execution timed out!'); this.processError(timeoutError, startedAt, data.executionMode, executionId); } else if (code !== 0) { + Logger.debug(`Subprocess for execution ID ${executionId} finished with error code ${code}.`, {executionId}); // Process did exit with error code, so something went wrong. const executionError = new WorkflowOperationError('Workflow execution process did crash for an unknown reason!'); diff --git a/packages/cli/src/WorkflowRunnerProcess.ts b/packages/cli/src/WorkflowRunnerProcess.ts index f3c381fb7d..14ae4a1abf 100644 --- a/packages/cli/src/WorkflowRunnerProcess.ts +++ b/packages/cli/src/WorkflowRunnerProcess.ts @@ -20,23 +20,29 @@ import { ExecutionError, IDataObject, IExecuteWorkflowInfo, + ILogger, INodeExecutionData, INodeType, INodeTypeData, IRun, - IRunExecutionData, ITaskData, IWorkflowExecuteAdditionalData, IWorkflowExecuteHooks, + LoggerProxy, Workflow, WorkflowHooks, WorkflowOperationError, } from 'n8n-workflow'; +import { + getLogger, +} from '../src/Logger'; + import * as config from '../config'; export class WorkflowRunnerProcess { data: IWorkflowExecutionDataProcessWithExecution | undefined; + logger: ILogger; startedAt = new Date(); workflow: Workflow | undefined; workflowExecute: WorkflowExecute | undefined; @@ -57,7 +63,13 @@ export class WorkflowRunnerProcess { process.on('SIGTERM', WorkflowRunnerProcess.stopProcess); process.on('SIGINT', WorkflowRunnerProcess.stopProcess); + const logger = this.logger = getLogger(); + LoggerProxy.init(logger); + this.data = inputData; + + logger.verbose('Initializing n8n sub-process', { pid: process.pid, workflowId: this.data.workflowData.id }); + let className: string; let tempNode: INodeType; let filePath: string; @@ -152,6 +164,8 @@ export class WorkflowRunnerProcess { throw e; } + await sendToParentProcess('finishExecution', { executionId, result }); + const returnData = WorkflowHelpers.getDataLastExecutedNodeData(result); return returnData!.data!.main; }; @@ -187,12 +201,7 @@ export class WorkflowRunnerProcess { parameters, }); } catch (error) { - // TODO: Add proper logging - console.error(`There was a problem sending hook: "${hook}"`); - console.error('Parameters:'); - console.error(parameters); - console.error('Error:'); - console.error(error); + this.logger.error(`There was a problem sending hook: "${hook}"`, { parameters, error}); } } diff --git a/packages/core/package.json b/packages/core/package.json index 70e6874e66..69a76944f1 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "0.68.0", + "version": "0.69.0", "description": "Core functionality of n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -47,7 +47,7 @@ "file-type": "^14.6.2", "lodash.get": "^4.4.2", "mime-types": "^2.1.27", - "n8n-workflow": "~0.56.0", + "n8n-workflow": "~0.57.0", "oauth-1.0a": "^2.2.6", "p-cancelable": "^2.0.0", "request": "^2.88.2", diff --git a/packages/core/src/ActiveWorkflows.ts b/packages/core/src/ActiveWorkflows.ts index 320e9fa73f..7b2342b1c6 100644 --- a/packages/core/src/ActiveWorkflows.ts +++ b/packages/core/src/ActiveWorkflows.ts @@ -7,6 +7,7 @@ import { IPollResponse, ITriggerResponse, IWorkflowExecuteAdditionalData, + LoggerProxy as Logger, Workflow, WorkflowActivateMode, WorkflowExecuteMode, @@ -17,6 +18,7 @@ import { IWorkflowData, } from './'; + export class ActiveWorkflows { private workflowData: { [key: string]: IWorkflowData; @@ -163,6 +165,7 @@ export class ActiveWorkflows { // The trigger function to execute when the cron-time got reached const executeTrigger = async () => { + Logger.info(`Polling trigger initiated for workflow "${workflow.name}"`, {workflowName: workflow.name, workflowId: workflow.id}); const pollResponse = await workflow.runPoll(node, pollFunctions); if (pollResponse !== null) { diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index e77e1a3d94..aeeb39c810 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -51,6 +51,9 @@ import * as requestPromise from 'request-promise-native'; import { createHmac } from 'crypto'; import { fromBuffer } from 'file-type'; import { lookup } from 'mime-types'; +import { + LoggerProxy as Logger, +} from 'n8n-workflow'; const requestPromiseWithDefaults = requestPromise.defaults({ timeout: 300000, // 5 minutes @@ -188,8 +191,12 @@ export function requestOAuth2(this: IAllExecuteFunctions, credentialsType: strin }; } + Logger.debug(`OAuth2 token for "${credentialsType}" used by node "${node.name}" expired. Should revalidate.`); + const newToken = await token.refresh(tokenRefreshOptions); + Logger.debug(`OAuth2 token for "${credentialsType}" used by node "${node.name}" has been renewed.`); + credentials.oauthTokenData = newToken.data; // Find the name of the credentials @@ -201,6 +208,8 @@ export function requestOAuth2(this: IAllExecuteFunctions, credentialsType: strin // Save the refreshed token await additionalData.credentialsHelper.updateCredentials(name, credentialsType, credentials); + Logger.debug(`OAuth2 token for "${credentialsType}" used by node "${node.name}" has been saved to database successfully.`); + // Make the request again with the new token const newRequestOptions = newToken.sign(requestOptions as clientOAuth2.RequestObject); diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index 40fe227d33..5c73d4c9ef 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -15,6 +15,7 @@ import { ITaskDataConnections, IWaitingForExecution, IWorkflowExecuteAdditionalData, + LoggerProxy as Logger, Workflow, WorkflowExecuteMode, WorkflowOperationError, @@ -400,6 +401,13 @@ export class WorkflowExecute { nodeToAdd = parentNode; } + const parentNodesNodeToAdd = workflow.getParentNodes(nodeToAdd as string); + if (parentNodesNodeToAdd.includes(parentNodeName) && nodeSuccessData[outputIndex].length === 0) { + // We do not add the node if there is no input data and the node that should be connected + // is a child of the parent node. Because else it would run a node even though it should be + // specifically not run, as it did not receive any data. + nodeToAdd = undefined; + } if (nodeToAdd === undefined) { // No node has to get added so process @@ -482,6 +490,8 @@ export class WorkflowExecute { * @memberof WorkflowExecute */ processRunExecutionData(workflow: Workflow): PCancelable { + Logger.verbose('Workflow execution started', { workflowId: workflow.id }); + const startedAt = new Date(); const workflowIssues = workflow.checkReadyForExecution(); @@ -502,7 +512,6 @@ export class WorkflowExecute { this.runExecutionData.startData = {}; } - let currentExecutionTry = ''; let lastExecutionTry = ''; @@ -564,6 +573,7 @@ export class WorkflowExecute { executionData = this.runExecutionData.executionData!.nodeExecutionStack.shift() as IExecuteData; executionNode = executionData.node; + Logger.debug(`Start processing node "${executionNode.name}"`, { node: executionNode.name, workflowId: workflow.id }); await this.executeHook('nodeExecuteBefore', [executionNode.name]); // Get the index of the current run @@ -661,7 +671,9 @@ export class WorkflowExecute { } } + Logger.debug(`Running node "${executionNode.name}" started`, { node: executionNode.name, workflowId: workflow.id }); nodeSuccessData = await workflow.runNode(executionData.node, executionData.data, this.runExecutionData, runIndex, this.additionalData, NodeExecuteFunctions, this.mode); + Logger.debug(`Running node "${executionNode.name}" finished successfully`, { node: executionNode.name, workflowId: workflow.id }); if (nodeSuccessData === undefined) { // Node did not get executed @@ -698,6 +710,8 @@ export class WorkflowExecute { message: error.message, stack: error.stack, }; + + Logger.debug(`Running node "${executionNode.name}" finished with error`, { node: executionNode.name, workflowId: workflow.id }); } } @@ -829,8 +843,10 @@ export class WorkflowExecute { const fullRunData = this.getFullRunData(startedAt); if (executionError !== undefined) { + Logger.verbose(`Workflow execution finished with error`, { error: executionError, workflowId: workflow.id }); fullRunData.data.resultData.error = executionError; } else { + Logger.verbose(`Workflow execution finished successfully`, { workflowId: workflow.id }); fullRunData.finished = true; } diff --git a/packages/core/test/Helpers.ts b/packages/core/test/Helpers.ts index 4335e9e1e9..1a1fa9d12d 100644 --- a/packages/core/test/Helpers.ts +++ b/packages/core/test/Helpers.ts @@ -465,6 +465,30 @@ class NodeTypesClass implements INodeTypes { }, }, }, + 'n8n-nodes-base.noOp': { + sourcePath: '', + type: { + description: { + displayName: 'No Operation, do nothing', + name: 'noOp', + icon: 'fa:arrow-right', + group: ['organization'], + version: 1, + description: 'No Operation', + defaults: { + name: 'NoOp', + color: '#b0b0b0', + }, + inputs: ['main'], + outputs: ['main'], + properties: [], + }, + execute(this: IExecuteFunctions): Promise { + const items = this.getInputData(); + return this.prepareOutputData(items); + }, + }, + }, 'n8n-nodes-base.set': { sourcePath: '', type: { diff --git a/packages/core/test/WorkflowExecute.test.ts b/packages/core/test/WorkflowExecute.test.ts index c89cc07fd8..454dd521b5 100644 --- a/packages/core/test/WorkflowExecute.test.ts +++ b/packages/core/test/WorkflowExecute.test.ts @@ -1,8 +1,10 @@ import { IConnections, + ILogger, INode, IRun, + LoggerProxy, Workflow, } from 'n8n-workflow'; @@ -1152,11 +1154,188 @@ describe('WorkflowExecute', () => { }, }, }, + { + description: 'should not use empty data in sibling if parent did not send any data', + input: { + // Leave the workflowData in regular JSON to be able to easily + // copy it from/in the UI + workflowData: { + "nodes": [ + { + "parameters": {}, + "name": "Start", + "type": "n8n-nodes-base.start", + "typeVersion": 1, + "position": [ + 250, + 300, + ], + }, + { + "parameters": { + "values": { + "number": [ + { + "name": "value1", + }, + ], + }, + "options": {}, + }, + "name": "Set", + "type": "n8n-nodes-base.set", + "typeVersion": 1, + "position": [ + 450, + 300, + ], + }, + { + "parameters": {}, + "name": "Merge", + "type": "n8n-nodes-base.merge", + "typeVersion": 1, + "position": [ + 1050, + 250, + ], + }, + { + "parameters": { + "conditions": { + "number": [ + { + "value1": "={{$json[\"value1\"]}}", + "operation": "equal", + "value2": 1, + }, + ], + }, + }, + "name": "IF", + "type": "n8n-nodes-base.if", + "typeVersion": 1, + "position": [ + 650, + 300, + ], + }, + { + "parameters": {}, + "name": "NoOpTrue", + "type": "n8n-nodes-base.noOp", + "typeVersion": 1, + "position": [ + 850, + 150, + ], + }, + { + "parameters": {}, + "name": "NoOpFalse", + "type": "n8n-nodes-base.noOp", + "typeVersion": 1, + "position": [ + 850, + 400, + ], + }, + ], + "connections": { + "Start": { + "main": [ + [ + { + "node": "Set", + "type": "main", + "index": 0, + }, + ], + ], + }, + "Set": { + "main": [ + [ + { + "node": "IF", + "type": "main", + "index": 0, + }, + ], + ], + }, + "IF": { + "main": [ + [ + { + "node": "NoOpTrue", + "type": "main", + "index": 0, + }, + { + "node": "Merge", + "type": "main", + "index": 1, + }, + ], + [ + { + "node": "NoOpFalse", + "type": "main", + "index": 0, + }, + ], + ], + }, + "NoOpTrue": { + "main": [ + [ + { + "node": "Merge", + "type": "main", + "index": 0, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: [ + 'Start', + 'Set', + 'IF', + 'NoOpFalse', + ], + nodeData: { + IF: [ + [], + ], + NoOpFalse: [ + [ + { + value1: 0, + }, + ], + ], + }, + }, + }, ]; + const fakeLogger = { + log: () => {}, + debug: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + } as ILogger; const executionMode = 'manual'; const nodeTypes = Helpers.NodeTypes(); + LoggerProxy.init(fakeLogger); for (const testData of tests) { test(testData.description, async () => { @@ -1201,7 +1380,6 @@ describe('WorkflowExecute', () => { expect(result.finished).toEqual(true); expect(result.data.executionData!.contextData).toEqual({}); expect(result.data.executionData!.nodeExecutionStack).toEqual([]); - expect(result.data.executionData!.waitingExecution).toEqual({}); }); } diff --git a/packages/editor-ui/package.json b/packages/editor-ui/package.json index 1295acde1b..ccf591fe6b 100644 --- a/packages/editor-ui/package.json +++ b/packages/editor-ui/package.json @@ -1,6 +1,6 @@ { "name": "n8n-editor-ui", - "version": "0.87.0", + "version": "0.88.0", "description": "Workflow Editor UI for n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -65,7 +65,7 @@ "lodash.debounce": "^4.0.8", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", - "n8n-workflow": "~0.56.0", + "n8n-workflow": "~0.57.0", "node-sass": "^4.12.0", "normalize-wheel": "^1.0.1", "prismjs": "^1.17.1", diff --git a/packages/editor-ui/src/Interface.ts b/packages/editor-ui/src/Interface.ts index 745e0f11fb..1ac72fd90a 100644 --- a/packages/editor-ui/src/Interface.ts +++ b/packages/editor-ui/src/Interface.ts @@ -435,7 +435,7 @@ export type MenuItemPosition = 'top' | 'bottom'; export interface IMenuItem { id: string; type: MenuItemType; - position: MenuItemPosition; + position?: MenuItemPosition; properties: ILinkMenuItemProperties; } diff --git a/packages/editor-ui/src/components/CredentialsEdit.vue b/packages/editor-ui/src/components/CredentialsEdit.vue index 77a64de7bd..874a4e3451 100644 --- a/packages/editor-ui/src/components/CredentialsEdit.vue +++ b/packages/editor-ui/src/components/CredentialsEdit.vue @@ -50,6 +50,7 @@ \ No newline at end of file diff --git a/packages/editor-ui/src/components/Node.vue b/packages/editor-ui/src/components/Node.vue index 4d8eb6e5ff..335c19e6f4 100644 --- a/packages/editor-ui/src/components/Node.vue +++ b/packages/editor-ui/src/components/Node.vue @@ -47,14 +47,11 @@ import Vue from 'vue'; import { nodeBase } from '@/components/mixins/nodeBase'; +import { nodeHelpers } from '@/components/mixins/nodeHelpers'; import { workflowHelpers } from '@/components/mixins/workflowHelpers'; import { - INode, - INodeIssueObjectProperty, - INodePropertyOptions, INodeTypeDescription, - ITaskData, NodeHelpers, } from 'n8n-workflow'; @@ -62,7 +59,7 @@ import NodeIcon from '@/components/NodeIcon.vue'; import mixins from 'vue-typed-mixins'; -export default mixins(nodeBase, workflowHelpers).extend({ +export default mixins(nodeBase, nodeHelpers, workflowHelpers).extend({ name: 'Node', components: { NodeIcon, @@ -133,41 +130,7 @@ export default mixins(nodeBase, workflowHelpers).extend({ } }, nodeSubtitle (): string | undefined { - if (this.data.notesInFlow) { - return this.data.notes; - } - - if (this.nodeType !== null && this.nodeType.subtitle !== undefined) { - return this.workflow.expression.getSimpleParameterValue(this.data as INode, this.nodeType.subtitle, 'internal') as string | undefined; - } - - if (this.data.parameters.operation !== undefined) { - const operation = this.data.parameters.operation as string; - if (this.nodeType === null) { - return operation; - } - - const operationData = this.nodeType.properties.find((property) => { - return property.name === 'operation'; - }); - if (operationData === undefined) { - return operation; - } - - if (operationData.options === undefined) { - return operation; - } - - const optionData = operationData.options.find((option) => { - return (option as INodePropertyOptions).value === this.data.parameters.operation; - }); - if (optionData === undefined) { - return operation; - } - - return optionData.name; - } - return undefined; + return this.getNodeSubtitle(this.data, this.nodeType, this.workflow); }, workflowRunning (): boolean { return this.$store.getters.isActionActive('workflowRunning'); @@ -186,7 +149,7 @@ export default mixins(nodeBase, workflowHelpers).extend({ this.disableNodes([this.data]); }, executeNode () { - this.$emit('runWorkflow', this.data.name); + this.$emit('runWorkflow', this.data.name, 'Node.executeNode'); }, deleteNode () { Vue.nextTick(() => { diff --git a/packages/editor-ui/src/components/NodeCreateList.vue b/packages/editor-ui/src/components/NodeCreateList.vue index 3d318c2c8d..a00b3ff2d9 100644 --- a/packages/editor-ui/src/components/NodeCreateList.vue +++ b/packages/editor-ui/src/components/NodeCreateList.vue @@ -24,10 +24,13 @@ diff --git a/packages/editor-ui/src/components/RunData.vue b/packages/editor-ui/src/components/RunData.vue index b64029e69b..da72d6ed1b 100644 --- a/packages/editor-ui/src/components/RunData.vue +++ b/packages/editor-ui/src/components/RunData.vue @@ -5,7 +5,7 @@ @@ -228,6 +228,7 @@ import BinaryDataDisplay from '@/components/BinaryDataDisplay.vue'; import NodeErrorView from '@/components/Error/NodeViewError.vue'; import { copyPaste } from '@/components/mixins/copyPaste'; +import { externalHooks } from "@/components/mixins/externalHooks"; import { genericHelpers } from '@/components/mixins/genericHelpers'; import { nodeHelpers } from '@/components/mixins/nodeHelpers'; import { workflowRun } from '@/components/mixins/workflowRun'; @@ -239,6 +240,7 @@ const deselectedPlaceholder = '_!^&*'; export default mixins( copyPaste, + externalHooks, genericHelpers, nodeHelpers, workflowRun, @@ -617,8 +619,9 @@ export default mixins( jsonData () { this.refreshDataSize(); }, - displayMode () { + displayMode (newValue, oldValue) { this.closeBinaryDataDisplay(); + this.$externalHooks().run('runData.displayModeChanged', { newValue, oldValue }); }, maxRunIndex () { this.runIndex = Math.min(this.runIndex, this.maxRunIndex); diff --git a/packages/editor-ui/src/components/WorkflowSettings.vue b/packages/editor-ui/src/components/WorkflowSettings.vue index ad1d7fd7e9..53ffc57fd0 100644 --- a/packages/editor-ui/src/components/WorkflowSettings.vue +++ b/packages/editor-ui/src/components/WorkflowSettings.vue @@ -167,6 +167,7 @@