diff --git a/.github/workflows/test-workflows.yml b/.github/workflows/test-workflows.yml new file mode 100644 index 0000000000..8250a5a9b4 --- /dev/null +++ b/.github/workflows/test-workflows.yml @@ -0,0 +1,85 @@ +name: Run test workflows + +on: + schedule: + - cron: "0 2 * * *" + workflow_dispatch: + + +jobs: + run-test-workflows: + + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [14.x] + steps: + - + name: Checkout + uses: actions/checkout@v2 + with: + path: n8n + - + name: Checkout workflows repo + uses: actions/checkout@v2 + with: + repository: n8n-io/test-workflows + path: test-workflows + - + name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - + name: npm install and build + run: | + cd n8n + npm install + npm run bootstrap + npm run build --if-present + env: + CI: true + shell: bash + - + name: Import credentials + run: n8n/packages/cli/bin/n8n import:credentials --input=test-workflows/credentials.json + shell: bash + env: + N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}} + - + name: Import workflows + run: n8n/packages/cli/bin/n8n import:workflow --separate --input=test-workflows/workflows + shell: bash + env: + N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}} + - + name: Copy static assets + run: | + cp n8n/assets/n8n-logo.png /tmp/n8n-logo.png + cp n8n/assets/n8n-screenshot.png /tmp/n8n-screenshot.png + cp n8n/node_modules/pdf-parse/test/data/05-versions-space.pdf /tmp/05-versions-space.pdf + cp n8n/node_modules/pdf-parse/test/data/04-valid.pdf /tmp/04-valid.pdf + shell: bash + - + name: Run tests + run: n8n/packages/cli/bin/n8n executeBatch --shallow --skipList=test-workflows/skipList.txt --shortOutput --concurrency=16 --compare=test-workflows/snapshots + shell: bash + env: + N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}} + - + name: Export credentials + if: always() + run: n8n/packages/cli/bin/n8n export:credentials --output=test-workflows/credentials.json --all --pretty + shell: bash + env: + N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}} + - + name: Commit and push credential changes + if: always() + run: | + cd test-workflows + git config --global user.name 'n8n test bot' + git config --global user.email 'n8n-test-bot@users.noreply.github.com' + git commit -am "Automated credential update" + git push --force --quiet "https://janober:${{ secrets.TOKEN }}@github.com/n8n-io/test-workflows.git" main:main diff --git a/package.json b/package.json index 44145dbb5e..8855d43d8f 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,7 @@ "build": "lerna exec npm run build", "dev": "lerna exec npm run dev --parallel", "clean:dist": "lerna exec -- rimraf ./dist", - "optimize-svg": "find ./packages -name '*.svg' -print0 | xargs -0 -P16 -L20 npx svgo", + "optimize-svg": "find ./packages -name '*.svg' ! -name 'pipedrive.svg' -print0 | xargs -0 -P16 -L20 npx svgo", "start": "run-script-os", "start:default": "cd packages/cli/bin && ./n8n", "start:windows": "cd packages/cli/bin && n8n", diff --git a/packages/cli/BREAKING-CHANGES.md b/packages/cli/BREAKING-CHANGES.md index 2a762e0476..fb393e0f3c 100644 --- a/packages/cli/BREAKING-CHANGES.md +++ b/packages/cli/BREAKING-CHANGES.md @@ -2,6 +2,26 @@ This list shows all the versions which include breaking changes and how to upgrade. +## 0.130.0 + +### What changed? + +For the Taiga regular and trigger nodes, the server and cloud credentials types are now unified into a single credentials type and the `version` param has been removed. Also, the `issue:create` operation now automatically loads the tags as `multiOptions`. + +### When is action necessary? + +If you are using the Taiga nodes, reconnect the credentials. If you are using tags in the `issue:create` operation, reselect them. + +## 0.127.0 + +### What changed? + +For the Zoho node, the `lead:create` operation now requires a "Company" parameter, the parameter "Address" is now inside "Additional Options", and the parameters "Title" and "Is Duplicate Record" were removed. Also, the `lead:delete` operation now returns only the `id` of the deleted lead. + +### When is action necessary? + +If you are using `lead:create` with "Company" or "Address", reset the parameters; for the other two parameters, no action needed. If you are using the response from `lead:delete`, reselect the `id` key. + ## 0.118.0 ### What changed? diff --git a/packages/cli/commands/Interfaces.d.ts b/packages/cli/commands/Interfaces.d.ts new file mode 100644 index 0000000000..aedd194539 --- /dev/null +++ b/packages/cli/commands/Interfaces.d.ts @@ -0,0 +1,54 @@ +interface IResult { + totalWorkflows: number; + summary: { + failedExecutions: number, + successfulExecutions: number, + warningExecutions: number, + errors: IExecutionError[], + warnings: IExecutionError[], + }; + coveredNodes: { + [nodeType: string]: number + }; + executions: IExecutionResult[]; +} +interface IExecutionResult { + workflowId: string | number; + workflowName: string; + executionTime: number; // Given in seconds with decimals for milisseconds + finished: boolean; + executionStatus: ExecutionStatus; + error?: string; + changes?: string; + coveredNodes: { + [nodeType: string]: number + }; +} + +interface IExecutionError { + workflowId: string | number; + error: string; +} + +interface IWorkflowExecutionProgress { + workflowId: string | number; + status: ExecutionStatus; +} + +interface INodeSpecialCases { + [nodeName: string]: INodeSpecialCase; +} + +interface INodeSpecialCase { + ignoredProperties?: string[]; + capResults?: number; +} + +type ExecutionStatus = 'success' | 'error' | 'warning' | 'running'; + +declare module 'json-diff' { + interface IDiffOptions { + keysOnly?: boolean; + } + export function diff(obj1: unknown, obj2: unknown, diffOptions: IDiffOptions): string; +} diff --git a/packages/cli/commands/execute.ts b/packages/cli/commands/execute.ts index 9eafd28c9e..48ae6a19f0 100644 --- a/packages/cli/commands/execute.ts +++ b/packages/cli/commands/execute.ts @@ -22,7 +22,7 @@ import { WorkflowRunner, } from '../src'; -import { +import { getLogger, } from '../src/Logger'; @@ -46,6 +46,9 @@ export class Execute extends Command { id: flags.string({ description: 'id of the workflow to execute', }), + rawOutput: flags.boolean({ + description: 'Outputs only JSON data, with no other text', + }), }; @@ -183,10 +186,11 @@ export class Execute extends Command { stack: error.stack, }; } - - console.info('Execution was successful:'); - console.info('===================================='); - console.info(JSON.stringify(data, null, 2)); + if (flags.rawOutput === undefined) { + this.log('Execution was successful:'); + this.log('===================================='); + } + this.log(JSON.stringify(data, null, 2)); } catch (e) { console.error('Error executing workflow. See log messages for details.'); logger.error('\nExecution error:'); diff --git a/packages/cli/commands/executeBatch.ts b/packages/cli/commands/executeBatch.ts new file mode 100644 index 0000000000..7bc6bb858d --- /dev/null +++ b/packages/cli/commands/executeBatch.ts @@ -0,0 +1,796 @@ +import * as fs from 'fs'; +import { + Command, + flags, +} from '@oclif/command'; + +import { + UserSettings, +} from 'n8n-core'; + +import { + INode, + INodeExecutionData, + ITaskData, +} from 'n8n-workflow'; + +import { + ActiveExecutions, + CredentialsOverwrites, + CredentialTypes, + Db, + ExternalHooks, + IExecutionsCurrentSummary, + IWorkflowDb, + IWorkflowExecutionDataProcess, + LoadNodesAndCredentials, + NodeTypes, + WorkflowCredentials, + WorkflowRunner, +} from '../src'; + +import { + sep, +} from 'path'; + +import { + diff, +} from 'json-diff'; + +import { + getLogger, +} from '../src/Logger'; + +import { + LoggerProxy, +} from 'n8n-workflow'; + +export class ExecuteBatch extends Command { + static description = '\nExecutes multiple workflows once'; + + static cancelled = false; + + static workflowExecutionsProgress: IWorkflowExecutionProgress[][]; + + static shallow = false; + + static compare: string; + + static snapshot: string; + + static concurrency = 1; + + static debug = false; + + static executionTimeout = 3 * 60 * 1000; + + static examples = [ + `$ n8n executeAll`, + `$ n8n executeAll --concurrency=10 --skipList=/data/skipList.txt`, + `$ n8n executeAll --debug --output=/data/output.json`, + `$ n8n executeAll --ids=10,13,15 --shortOutput`, + `$ n8n executeAll --snapshot=/data/snapshots --shallow`, + `$ n8n executeAll --compare=/data/previousExecutionData --retries=2`, + ]; + + static flags = { + help: flags.help({ char: 'h' }), + debug: flags.boolean({ + description: 'Toggles on displaying all errors and debug messages.', + }), + ids: flags.string({ + description: 'Specifies workflow IDs to get executed, separated by a comma.', + }), + concurrency: flags.integer({ + default: 1, + description: 'How many workflows can run in parallel. Defaults to 1 which means no concurrency.', + }), + output: flags.string({ + description: 'Enable execution saving, You must inform an existing folder to save execution via this param', + }), + snapshot: flags.string({ + description: 'Enables snapshot saving. You must inform an existing folder to save snapshots via this param.', + }), + compare: flags.string({ + description: 'Compares current execution with an existing snapshot. You must inform an existing folder where the snapshots are saved.', + }), + shallow: flags.boolean({ + description: 'Compares only if attributes output from node are the same, with no regards to neste JSON objects.', + }), + skipList: flags.string({ + description: 'File containing a comma separated list of workflow IDs to skip.', + }), + retries: flags.integer({ + description: 'Retries failed workflows up to N tries. Default is 1. Set 0 to disable.', + default: 1, + }), + shortOutput: flags.boolean({ + description: 'Omits the full execution information from output, displaying only summary.', + }), + }; + + /** + * Gracefully handles exit. + * @param {boolean} skipExit Whether to skip exit or number according to received signal + */ + static async stopProcess(skipExit: boolean | number = false) { + + if (ExecuteBatch.cancelled === true) { + process.exit(0); + } + + ExecuteBatch.cancelled = true; + const activeExecutionsInstance = ActiveExecutions.getInstance(); + const stopPromises = activeExecutionsInstance.getActiveExecutions().map(async execution => { + activeExecutionsInstance.stopExecution(execution.id); + }); + + await Promise.allSettled(stopPromises); + + setTimeout(() => { + process.exit(0); + }, 30000); + + let executingWorkflows = activeExecutionsInstance.getActiveExecutions() as IExecutionsCurrentSummary[]; + + let count = 0; + while (executingWorkflows.length !== 0) { + if (count++ % 4 === 0) { + console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`); + executingWorkflows.map(execution => { + console.log(` - Execution ID ${execution.id}, workflow ID: ${execution.workflowId}`); + }); + } + await new Promise((resolve) => { + setTimeout(resolve, 500); + }); + executingWorkflows = activeExecutionsInstance.getActiveExecutions(); + } + // We may receive true but when called from `process.on` + // we get the signal (SIGNIT, etc.) + if (skipExit !== true) { + process.exit(0); + } + } + + formatJsonOutput(data: object) { + return JSON.stringify(data, null, 2); + } + + shouldBeConsideredAsWarning(errorMessage: string) { + + const warningStrings = [ + 'refresh token is invalid', + 'unable to connect to', + 'econnreset', + '429', + 'econnrefused', + 'missing a required parameter', + ]; + + errorMessage = errorMessage.toLowerCase(); + + for (let i = 0; i < warningStrings.length; i++) { + if (errorMessage.includes(warningStrings[i])) { + return true; + } + } + + return false; + } + + + async run() { + + process.on('SIGTERM', ExecuteBatch.stopProcess); + process.on('SIGINT', ExecuteBatch.stopProcess); + + const logger = getLogger(); + LoggerProxy.init(logger); + + const { flags } = this.parse(ExecuteBatch); + + ExecuteBatch.debug = flags.debug === true; + ExecuteBatch.concurrency = flags.concurrency || 1; + + const ids: number[] = []; + const skipIds: number[] = []; + + if (flags.snapshot !== undefined) { + if (fs.existsSync(flags.snapshot)) { + if (!fs.lstatSync(flags.snapshot).isDirectory()) { + console.log(`The parameter --snapshot must be an existing directory`); + return; + } + } else { + console.log(`The parameter --snapshot must be an existing directory`); + return; + } + + ExecuteBatch.snapshot = flags.snapshot; + } + if (flags.compare !== undefined) { + if (fs.existsSync(flags.compare)) { + if (!fs.lstatSync(flags.compare).isDirectory()) { + console.log(`The parameter --compare must be an existing directory`); + return; + } + } else { + console.log(`The parameter --compare must be an existing directory`); + return; + } + + ExecuteBatch.compare = flags.compare; + } + + if (flags.output !== undefined) { + if (fs.existsSync(flags.output)) { + if (fs.lstatSync(flags.output).isDirectory()) { + console.log(`The parameter --output must be a writable file`); + return; + } + } + } + + if (flags.ids !== undefined) { + const paramIds = flags.ids.split(','); + const re = /\d+/; + const matchedIds = paramIds.filter(id => id.match(re)).map(id => parseInt(id.trim(), 10)); + + if (matchedIds.length === 0) { + console.log(`The parameter --ids must be a list of numeric IDs separated by a comma.`); + return; + } + + ids.push(...matchedIds); + } + + if (flags.skipList !== undefined) { + if (fs.existsSync(flags.skipList)) { + const contents = fs.readFileSync(flags.skipList, { encoding: 'utf-8' }); + skipIds.push(...contents.split(',').map(id => parseInt(id.trim(), 10))); + } else { + console.log('Skip list file not found. Exiting.'); + return; + } + } + + if (flags.shallow === true) { + ExecuteBatch.shallow = true; + } + + + // Start directly with the init of the database to improve startup time + const startDbInitPromise = Db.init(); + + // Load all node and credential types + const loadNodesAndCredentials = LoadNodesAndCredentials(); + const loadNodesAndCredentialsPromise = loadNodesAndCredentials.init(); + + // Make sure the settings exist + await UserSettings.prepareUserSettings(); + + // Wait till the database is ready + await startDbInitPromise; + + let allWorkflows; + + const query = Db.collections!.Workflow!.createQueryBuilder('workflows'); + + if (ids.length > 0) { + query.andWhere(`workflows.id in (:...ids)`, { ids }); + } + + if (skipIds.length > 0) { + query.andWhere(`workflows.id not in (:...skipIds)`, { skipIds }); + } + + allWorkflows = await query.getMany() as IWorkflowDb[]; + + if (ExecuteBatch.debug === true) { + process.stdout.write(`Found ${allWorkflows.length} workflows to execute.\n`); + } + + // Wait till the n8n-packages have been read + await loadNodesAndCredentialsPromise; + + // Load the credentials overwrites if any exist + await CredentialsOverwrites().init(); + + // Load all external hooks + const externalHooks = ExternalHooks(); + await externalHooks.init(); + + // Add the found types to an instance other parts of the application can use + const nodeTypes = NodeTypes(); + await nodeTypes.init(loadNodesAndCredentials.nodeTypes); + const credentialTypes = CredentialTypes(); + await credentialTypes.init(loadNodesAndCredentials.credentialTypes); + + // Send a shallow copy of allWorkflows so we still have all workflow data. + const results = await this.runTests([...allWorkflows]); + + let { retries } = flags; + + while (retries > 0 && (results.summary.warningExecutions + results.summary.failedExecutions > 0) && ExecuteBatch.cancelled === false) { + const failedWorkflowIds = results.summary.errors.map(execution => execution.workflowId); + failedWorkflowIds.push(...results.summary.warnings.map(execution => execution.workflowId)); + + const newWorkflowList = allWorkflows.filter(workflow => failedWorkflowIds.includes(workflow.id)); + + const retryResults = await this.runTests(newWorkflowList); + + this.mergeResults(results, retryResults); + // By now, `results` has been updated with the new successful executions. + retries--; + } + + if (flags.output !== undefined) { + fs.writeFileSync(flags.output, this.formatJsonOutput(results)); + console.log('\nExecution finished.'); + console.log('Summary:'); + console.log(`\tSuccess: ${results.summary.successfulExecutions}`); + console.log(`\tFailures: ${results.summary.failedExecutions}`); + console.log(`\tWarnings: ${results.summary.warningExecutions}`); + console.log('\nNodes successfully tested:'); + Object.entries(results.coveredNodes).forEach(([nodeName, nodeCount]) => { + console.log(`\t${nodeName}: ${nodeCount}`); + }); + console.log('\nCheck the JSON file for more details.'); + } else { + if (flags.shortOutput === true) { + console.log(this.formatJsonOutput({ ...results, executions: results.executions.filter(execution => execution.executionStatus !== 'success') })); + } else { + console.log(this.formatJsonOutput(results)); + } + } + + await ExecuteBatch.stopProcess(true); + + if (results.summary.failedExecutions > 0) { + this.exit(1); + } + this.exit(0); + + } + + mergeResults(results: IResult, retryResults: IResult) { + + if (retryResults.summary.successfulExecutions === 0) { + // Nothing to replace. + return; + } + + // Find successful executions and replace them on previous result. + retryResults.executions.forEach(newExecution => { + if (newExecution.executionStatus === 'success') { + // Remove previous execution from list. + results.executions = results.executions.filter(previousExecutions => previousExecutions.workflowId !== newExecution.workflowId); + + const errorIndex = results.summary.errors.findIndex(summaryInformation => summaryInformation.workflowId === newExecution.workflowId); + if (errorIndex !== -1) { + // This workflow errored previously. Decrement error count. + results.summary.failedExecutions--; + // Remove from the list of errors. + results.summary.errors.splice(errorIndex, 1); + } + + const warningIndex = results.summary.warnings.findIndex(summaryInformation => summaryInformation.workflowId === newExecution.workflowId); + if (warningIndex !== -1) { + // This workflow errored previously. Decrement error count. + results.summary.warningExecutions--; + // Remove from the list of errors. + results.summary.warnings.splice(warningIndex, 1); + } + // Increment successful executions count and push it to all executions array. + results.summary.successfulExecutions++; + results.executions.push(newExecution); + } + }); + } + + async runTests(allWorkflows: IWorkflowDb[]): Promise { + const result: IResult = { + totalWorkflows: allWorkflows.length, + summary: { + failedExecutions: 0, + warningExecutions: 0, + successfulExecutions: 0, + errors: [], + warnings: [], + }, + coveredNodes: {}, + executions: [], + }; + + if (ExecuteBatch.debug) { + this.initializeLogs(); + } + + return new Promise(async (res) => { + const promisesArray = []; + for (let i = 0; i < ExecuteBatch.concurrency; i++) { + const promise = new Promise(async (resolve) => { + let workflow: IWorkflowDb | undefined; + while (allWorkflows.length > 0) { + workflow = allWorkflows.shift(); + if (ExecuteBatch.cancelled === true) { + process.stdout.write(`Thread ${i + 1} resolving and quitting.`); + resolve(true); + break; + } + // This if shouldn't be really needed + // but it's a concurrency precaution. + if (workflow === undefined) { + resolve(true); + return; + } + + if (ExecuteBatch.debug) { + ExecuteBatch.workflowExecutionsProgress[i].push({ + workflowId: workflow.id, + status: 'running', + }); + this.updateStatus(); + } + + await this.startThread(workflow).then((executionResult) => { + if (ExecuteBatch.debug) { + ExecuteBatch.workflowExecutionsProgress[i].pop(); + } + result.executions.push(executionResult); + if (executionResult.executionStatus === 'success') { + if (ExecuteBatch.debug) { + ExecuteBatch.workflowExecutionsProgress[i].push({ + workflowId: workflow!.id, + status: 'success', + }); + this.updateStatus(); + } + result.summary.successfulExecutions++; + const nodeNames = Object.keys(executionResult.coveredNodes); + + nodeNames.map(nodeName => { + if (result.coveredNodes[nodeName] === undefined) { + result.coveredNodes[nodeName] = 0; + } + result.coveredNodes[nodeName] += executionResult.coveredNodes[nodeName]; + }); + } else if (executionResult.executionStatus === 'warning') { + result.summary.warningExecutions++; + result.summary.warnings.push({ + workflowId: executionResult.workflowId, + error: executionResult.error!, + }); + if (ExecuteBatch.debug) { + ExecuteBatch.workflowExecutionsProgress[i].push({ + workflowId: workflow!.id, + status: 'warning', + }); + this.updateStatus(); + } + } else if (executionResult.executionStatus === 'error') { + result.summary.failedExecutions++; + result.summary.errors.push({ + workflowId: executionResult.workflowId, + error: executionResult.error!, + }); + if (ExecuteBatch.debug) { + ExecuteBatch.workflowExecutionsProgress[i].push({ + workflowId: workflow!.id, + status: 'error', + }); + this.updateStatus(); + } + } else { + throw new Error('Wrong execution status - cannot proceed'); + } + }); + } + + resolve(true); + }); + + promisesArray.push(promise); + } + + await Promise.allSettled(promisesArray); + + res(result); + }); + } + + updateStatus() { + + if (ExecuteBatch.cancelled === true) { + return; + } + + if (process.stdout.isTTY === true) { + process.stdout.moveCursor(0, - (ExecuteBatch.concurrency)); + process.stdout.cursorTo(0); + process.stdout.clearLine(0); + } + + + ExecuteBatch.workflowExecutionsProgress.map((concurrentThread, index) => { + let message = `${index + 1}: `; + concurrentThread.map((executionItem, workflowIndex) => { + let openColor = '\x1b[0m'; + const closeColor = '\x1b[0m'; + switch (executionItem.status) { + case 'success': + openColor = '\x1b[32m'; + break; + case 'error': + openColor = '\x1b[31m'; + break; + case 'warning': + openColor = '\x1b[33m'; + break; + default: + break; + } + message += (workflowIndex > 0 ? ', ' : '') + `${openColor}${executionItem.workflowId}${closeColor}`; + }); + if (process.stdout.isTTY === true) { + process.stdout.cursorTo(0); + process.stdout.clearLine(0); + } + process.stdout.write(message + '\n'); + }); + } + + initializeLogs() { + process.stdout.write('**********************************************\n'); + process.stdout.write(' n8n test workflows\n'); + process.stdout.write('**********************************************\n'); + process.stdout.write('\n'); + process.stdout.write('Batch number:\n'); + ExecuteBatch.workflowExecutionsProgress = []; + for (let i = 0; i < ExecuteBatch.concurrency; i++) { + ExecuteBatch.workflowExecutionsProgress.push([]); + process.stdout.write(`${i + 1}: \n`); + } + } + + startThread(workflowData: IWorkflowDb): Promise { + // This will be the object returned by the promise. + // It will be updated according to execution progress below. + const executionResult: IExecutionResult = { + workflowId: workflowData.id, + workflowName: workflowData.name, + executionTime: 0, + finished: false, + executionStatus: 'running', + coveredNodes: {}, + }; + + + + const requiredNodeTypes = ['n8n-nodes-base.start']; + let startNode: INode | undefined = undefined; + for (const node of workflowData.nodes) { + if (requiredNodeTypes.includes(node.type)) { + startNode = node; + break; + } + } + + // We have a cool feature here. + // On each node, on the Settings tab in the node editor you can change + // the `Notes` field to add special cases for comparison and snapshots. + // You need to set one configuration per line with the following possible keys: + // CAP_RESULTS_LENGTH=x where x is a number. Cap the number of rows from this node to x. + // This means if you set CAP_RESULTS_LENGTH=1 we will have only 1 row in the output + // IGNORED_PROPERTIES=x,y,z where x, y and z are JSON property names. Removes these + // properties from the JSON object (useful for optional properties that can + // cause the comparison to detect changes when not true). + const nodeEdgeCases = {} as INodeSpecialCases; + workflowData.nodes.forEach(node => { + executionResult.coveredNodes[node.type] = (executionResult.coveredNodes[node.type] || 0) + 1; + if (node.notes !== undefined && node.notes !== '') { + node.notes.split('\n').forEach(note => { + const parts = note.split('='); + if (parts.length === 2) { + if (nodeEdgeCases[node.name] === undefined) { + nodeEdgeCases[node.name] = {} as INodeSpecialCase; + } + if (parts[0] === 'CAP_RESULTS_LENGTH') { + nodeEdgeCases[node.name].capResults = parseInt(parts[1], 10); + } else if (parts[0] === 'IGNORED_PROPERTIES') { + nodeEdgeCases[node.name].ignoredProperties = parts[1].split(',').map(property => property.trim()); + } + } + }); + } + }); + + return new Promise(async (resolve) => { + if (startNode === undefined) { + // If the workflow does not contain a start-node we can not know what + // should be executed and with which data to start. + executionResult.error = 'Workflow cannot be started as it does not contain a "Start" node.'; + executionResult.executionStatus = 'warning'; + resolve(executionResult); + } + + let gotCancel = false; + + // Timeouts execution after 5 minutes. + const timeoutTimer = setTimeout(() => { + gotCancel = true; + executionResult.error = 'Workflow execution timed out.'; + executionResult.executionStatus = 'warning'; + resolve(executionResult); + }, ExecuteBatch.executionTimeout); + + + try { + const credentials = await WorkflowCredentials(workflowData!.nodes); + + const runData: IWorkflowExecutionDataProcess = { + credentials, + executionMode: 'cli', + startNodes: [startNode!.name], + workflowData: workflowData!, + }; + + const workflowRunner = new WorkflowRunner(); + const executionId = await workflowRunner.run(runData); + + const activeExecutions = ActiveExecutions.getInstance(); + const data = await activeExecutions.getPostExecutePromise(executionId); + if (gotCancel || ExecuteBatch.cancelled === true) { + clearTimeout(timeoutTimer); + // The promise was settled already so we simply ignore. + return; + } + + if (data === undefined) { + executionResult.error = 'Workflow did not return any data.'; + executionResult.executionStatus = 'error'; + } else { + executionResult.executionTime = (Date.parse(data.stoppedAt as unknown as string) - Date.parse(data.startedAt as unknown as string)) / 1000; + executionResult.finished = (data?.finished !== undefined) as boolean; + + if (data.data.resultData.error) { + executionResult.error = + data.data.resultData.error.hasOwnProperty('description') ? + // @ts-ignore + data.data.resultData.error.description : data.data.resultData.error.message; + if (data.data.resultData.lastNodeExecuted !== undefined) { + executionResult.error += ` on node ${data.data.resultData.lastNodeExecuted}`; + } + executionResult.executionStatus = 'error'; + + if (this.shouldBeConsideredAsWarning(executionResult.error || '')) { + executionResult.executionStatus = 'warning'; + } + } else { + if (ExecuteBatch.shallow === true) { + // What this does is guarantee that top-level attributes + // from the JSON are kept and the are the same type. + + // We convert nested JSON objects to a simple {object:true} + // and we convert nested arrays to ['json array'] + + // This reduces the chance of false positives but may + // result in not detecting deeper changes. + Object.keys(data.data.resultData.runData).map((nodeName: string) => { + data.data.resultData.runData[nodeName].map((taskData: ITaskData) => { + if (taskData.data === undefined) { + return; + } + Object.keys(taskData.data).map(connectionName => { + const connection = taskData.data![connectionName] as Array; + connection.map(executionDataArray => { + if (executionDataArray === null) { + return; + } + + if (nodeEdgeCases[nodeName] !== undefined && nodeEdgeCases[nodeName].capResults !== undefined) { + executionDataArray.splice(nodeEdgeCases[nodeName].capResults!); + } + + executionDataArray.map(executionData => { + if (executionData.json === undefined) { + return; + } + if (nodeEdgeCases[nodeName] !== undefined && nodeEdgeCases[nodeName].ignoredProperties !== undefined) { + nodeEdgeCases[nodeName].ignoredProperties!.forEach(ignoredProperty => delete executionData.json[ignoredProperty]); + } + + const jsonProperties = executionData.json; + + const nodeOutputAttributes = Object.keys(jsonProperties); + nodeOutputAttributes.map(attributeName => { + if (Array.isArray(jsonProperties[attributeName])) { + jsonProperties[attributeName] = ['json array']; + } else if (typeof jsonProperties[attributeName] === 'object') { + jsonProperties[attributeName] = { object: true }; + } + }); + }); + }); + + }); + }); + }); + } else { + // If not using shallow comparison then we only treat nodeEdgeCases. + const specialCases = Object.keys(nodeEdgeCases); + + specialCases.forEach(nodeName => { + data.data.resultData.runData[nodeName].map((taskData: ITaskData) => { + if (taskData.data === undefined) { + return; + } + Object.keys(taskData.data).map(connectionName => { + const connection = taskData.data![connectionName] as Array; + connection.map(executionDataArray => { + if (executionDataArray === null) { + return; + } + + if (nodeEdgeCases[nodeName].capResults !== undefined) { + executionDataArray.splice(nodeEdgeCases[nodeName].capResults!); + } + + if (nodeEdgeCases[nodeName].ignoredProperties !== undefined) { + executionDataArray.map(executionData => { + if (executionData.json === undefined) { + return; + } + nodeEdgeCases[nodeName].ignoredProperties!.forEach(ignoredProperty => delete executionData.json[ignoredProperty]); + }); + } + }); + + }); + }); + }); + } + + const serializedData = this.formatJsonOutput(data); + if (ExecuteBatch.compare === undefined) { + executionResult.executionStatus = 'success'; + } else { + const fileName = (ExecuteBatch.compare.endsWith(sep) ? ExecuteBatch.compare : ExecuteBatch.compare + sep) + `${workflowData.id}-snapshot.json`; + if (fs.existsSync(fileName) === true) { + + const contents = fs.readFileSync(fileName, { encoding: 'utf-8' }); + + const changes = diff(JSON.parse(contents), data, { keysOnly: true }); + + if (changes !== undefined) { + // we have structural changes. Report them. + executionResult.error = `Workflow may contain breaking changes`; + executionResult.changes = changes; + executionResult.executionStatus = 'error'; + } else { + executionResult.executionStatus = 'success'; + } + } else { + executionResult.error = 'Snapshot for not found.'; + executionResult.executionStatus = 'warning'; + } + } + // Save snapshots only after comparing - this is to make sure we're updating + // After comparing to existing verion. + if (ExecuteBatch.snapshot !== undefined) { + const fileName = (ExecuteBatch.snapshot.endsWith(sep) ? ExecuteBatch.snapshot : ExecuteBatch.snapshot + sep) + `${workflowData.id}-snapshot.json`; + fs.writeFileSync(fileName, serializedData); + } + } + } + } catch (e) { + executionResult.error = 'Workflow failed to execute.'; + executionResult.executionStatus = 'error'; + } + clearTimeout(timeoutTimer); + resolve(executionResult); + }); + } + +} diff --git a/packages/cli/commands/import/credentials.ts b/packages/cli/commands/import/credentials.ts index b038f33693..af2d7e0d46 100644 --- a/packages/cli/commands/import/credentials.ts +++ b/packages/cli/commands/import/credentials.ts @@ -65,6 +65,9 @@ export class ImportCredentialsCommand extends Command { try { await Db.init(); + + // Make sure the settings exist + await UserSettings.prepareUserSettings(); let i; const encryptionKey = await UserSettings.getEncryptionKey(); diff --git a/packages/cli/commands/import/workflow.ts b/packages/cli/commands/import/workflow.ts index 5b31041a44..65ddb77000 100644 --- a/packages/cli/commands/import/workflow.ts +++ b/packages/cli/commands/import/workflow.ts @@ -18,6 +18,9 @@ import { import * as fs from 'fs'; import * as glob from 'glob-promise'; import * as path from 'path'; +import { + UserSettings, +} from 'n8n-core'; export class ImportWorkflowsCommand extends Command { static description = 'Import workflows'; @@ -60,6 +63,9 @@ export class ImportWorkflowsCommand extends Command { try { await Db.init(); + + // Make sure the settings exist + await UserSettings.prepareUserSettings(); let i; if (flags.separate) { const files = await glob((flags.input.endsWith(path.sep) ? flags.input : flags.input + path.sep) + '*.json'); diff --git a/packages/cli/commands/list/workflow.ts b/packages/cli/commands/list/workflow.ts new file mode 100644 index 0000000000..6fdca2e253 --- /dev/null +++ b/packages/cli/commands/list/workflow.ts @@ -0,0 +1,67 @@ +import { + Command, + flags, +} from '@oclif/command'; + +import { + IDataObject +} from 'n8n-workflow'; + +import { + Db, +} from "../../src"; + + +export class ListWorkflowCommand extends Command { + static description = '\nList workflows'; + + static examples = [ + '$ n8n list:workflow', + '$ n8n list:workflow --active=true --onlyId', + '$ n8n list:workflow --active=false', + ]; + + static flags = { + help: flags.help({ char: 'h' }), + active: flags.string({ + description: 'Filters workflows by active status. Can be true or false', + }), + onlyId: flags.boolean({ + description: 'Outputs workflow IDs only, one per line.', + }), + }; + + async run() { + const { flags } = this.parse(ListWorkflowCommand); + + if (flags.active !== undefined && !['true', 'false'].includes(flags.active)) { + this.error('The --active flag has to be passed using true or false'); + } + + try { + await Db.init(); + + const findQuery: IDataObject = {}; + if (flags.active !== undefined) { + findQuery.active = flags.active === 'true'; + } + + const workflows = await Db.collections.Workflow!.find(findQuery); + if (flags.onlyId) { + workflows.forEach(workflow => console.log(workflow.id)); + } else { + workflows.forEach(workflow => console.log(workflow.id + "|" + workflow.name)); + } + + + } catch (e) { + console.error('\nGOT ERROR'); + console.log('===================================='); + console.error(e.message); + console.error(e.stack); + this.exit(1); + } + + this.exit(); + } +} diff --git a/packages/cli/package.json b/packages/cli/package.json index 4c095c3960..19c8102235 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "0.126.0", + "version": "0.129.0", "description": "n8n Workflow Automation Tool", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -82,6 +82,7 @@ "dependencies": { "@oclif/command": "^1.5.18", "@oclif/errors": "^1.2.2", + "@types/json-diff": "^0.5.1", "@types/jsonwebtoken": "^8.5.2", "basic-auth": "^2.0.1", "bcryptjs": "^2.4.3", @@ -101,15 +102,16 @@ "glob-promise": "^3.4.0", "google-timezones-json": "^1.0.2", "inquirer": "^7.0.1", + "json-diff": "^0.5.4", "jsonwebtoken": "^8.5.1", "jwks-rsa": "~1.12.1", "localtunnel": "^2.0.0", "lodash.get": "^4.4.2", "mysql2": "~2.2.0", - "n8n-core": "~0.75.0", - "n8n-editor-ui": "~0.96.0", - "n8n-nodes-base": "~0.123.0", - "n8n-workflow": "~0.62.0", + "n8n-core": "~0.77.0", + "n8n-editor-ui": "~0.98.0", + "n8n-nodes-base": "~0.126.0", + "n8n-workflow": "~0.63.0", "oauth-1.0a": "^2.2.6", "open": "^7.0.0", "pg": "^8.3.0", diff --git a/packages/cli/src/Interfaces.ts b/packages/cli/src/Interfaces.ts index df965c50e7..e1c09cf55e 100644 --- a/packages/cli/src/Interfaces.ts +++ b/packages/cli/src/Interfaces.ts @@ -188,6 +188,7 @@ export interface IExecutionsListResponse { count: number; // results: IExecutionShortResponse[]; results: IExecutionsSummary[]; + estimated: boolean; } export interface IExecutionsStopData { diff --git a/packages/cli/src/Server.ts b/packages/cli/src/Server.ts index 0a31144999..ee2f50f2cf 100644 --- a/packages/cli/src/Server.ts +++ b/packages/cli/src/Server.ts @@ -33,6 +33,7 @@ import { CredentialsHelper, CredentialsOverwrites, CredentialTypes, + DatabaseType, Db, ExternalHooks, GenericHelpers, @@ -88,6 +89,7 @@ import { IRunData, IWorkflowBase, IWorkflowCredentials, + LoggerProxy, Workflow, WorkflowExecuteMode, } from 'n8n-workflow'; @@ -1612,8 +1614,7 @@ class App { executingWorkflowIds.push(...this.activeExecutionsInstance.getActiveExecutions().map(execution => execution.id.toString()) as string[]); const countFilter = JSON.parse(JSON.stringify(filter)); - countFilter.select = ['id']; - countFilter.where = {id: Not(In(executingWorkflowIds))}; + countFilter.id = Not(In(executingWorkflowIds)); const resultsQuery = await Db.collections.Execution! .createQueryBuilder("execution") @@ -1645,10 +1646,10 @@ class App { const resultsPromise = resultsQuery.getMany(); - const countPromise = Db.collections.Execution!.count(countFilter); + const countPromise = getExecutionsCount(countFilter); const results: IExecutionFlattedDb[] = await resultsPromise; - const count = await countPromise; + const countedObjects = await countPromise; const returnResults: IExecutionsSummary[] = []; @@ -1667,8 +1668,9 @@ class App { } return { - count, + count: countedObjects.count, results: returnResults, + estimated: countedObjects.estimate, }; })); @@ -2161,3 +2163,35 @@ export async function start(): Promise { await app.externalHooks.run('n8n.ready', [app]); }); } + +async function getExecutionsCount(countFilter: IDataObject): Promise<{ count: number; estimate: boolean; }> { + + const dbType = await GenericHelpers.getConfigValue('database.type') as DatabaseType; + const filteredFields = Object.keys(countFilter).filter(field => field !== 'id'); + + // Do regular count for other databases than pgsql and + // if we are filtering based on workflowId or finished fields. + if (dbType !== 'postgresdb' || filteredFields.length > 0) { + const count = await Db.collections.Execution!.count(countFilter); + return { count, estimate: false }; + } + + try { + // Get an estimate of rows count. + const estimateRowsNumberSql = "SELECT n_live_tup FROM pg_stat_all_tables WHERE relname = 'execution_entity';"; + const rows: Array<{ n_live_tup: string }> = await Db.collections.Execution!.query(estimateRowsNumberSql); + + const estimate = parseInt(rows[0].n_live_tup, 10); + // If over 100k, return just an estimate. + if (estimate > 100000) { + // if less than 100k, we get the real count as even a full + // table scan should not take so long. + return { count: estimate, estimate: true }; + } + } catch (err) { + LoggerProxy.warn('Unable to get executions count from postgres: ' + err); + } + + const count = await Db.collections.Execution!.count(countFilter); + return { count, estimate: false }; +} diff --git a/packages/cli/src/WorkflowExecuteAdditionalData.ts b/packages/cli/src/WorkflowExecuteAdditionalData.ts index 9d0478d997..679ba945eb 100644 --- a/packages/cli/src/WorkflowExecuteAdditionalData.ts +++ b/packages/cli/src/WorkflowExecuteAdditionalData.ts @@ -387,9 +387,9 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { } // Leave log message before flatten as that operation increased memory usage a lot and the chance of a crash is highest here - Logger.debug(`Save execution data to database for execution ID ${this.executionId}`, { - executionId: this.executionId, - workflowId: this.workflowData.id, + Logger.debug(`Save execution data to database for execution ID ${this.executionId}`, { + executionId: this.executionId, + workflowId: this.workflowData.id, finished: fullExecutionData.finished, stoppedAt: fullExecutionData.stoppedAt, }); @@ -409,12 +409,12 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks { executeErrorWorkflow(this.workflowData, fullRunData, this.mode, this.executionId, this.retryOf); } } catch (error) { - Logger.error(`Failed saving execution data to DB on execution ID ${this.executionId}`, { - executionId: this.executionId, + Logger.error(`Failed saving execution data to DB on execution ID ${this.executionId}`, { + executionId: this.executionId, workflowId: this.workflowData.id, error, }); - + if (!isManualMode) { executeErrorWorkflow(this.workflowData, fullRunData, this.mode, undefined, this.retryOf); } @@ -608,44 +608,78 @@ export async function executeWorkflow(workflowInfo: IExecuteWorkflowInfo, additi executionId = parentExecutionId !== undefined ? parentExecutionId : await ActiveExecutions.getInstance().add(runData); } - const runExecutionData = runData.executionData as IRunExecutionData; + let data; + try { + // Get the needed credentials for the current workflow as they will differ to the ones of the + // calling workflow. + const credentials = await WorkflowCredentials(workflowData!.nodes); - // Get the needed credentials for the current workflow as they will differ to the ones of the - // calling workflow. - const credentials = await WorkflowCredentials(workflowData!.nodes); + // Create new additionalData to have different workflow loaded and to call + // different webooks + const additionalDataIntegrated = await getBase(credentials); + additionalDataIntegrated.hooks = getWorkflowHooksIntegrated(runData.executionMode, executionId, workflowData!, { parentProcessMode: additionalData.hooks!.mode }); + // Make sure we pass on the original executeWorkflow function we received + // This one already contains changes to talk to parent process + // and get executionID from `activeExecutions` running on main process + additionalDataIntegrated.executeWorkflow = additionalData.executeWorkflow; + let subworkflowTimeout = additionalData.executionTimeoutTimestamp; + if (workflowData.settings?.executionTimeout !== undefined && workflowData.settings.executionTimeout > 0) { + // We might have received a max timeout timestamp from the parent workflow + // If we did, then we get the minimum time between the two timeouts + // If no timeout was given from the parent, then we use our timeout. + subworkflowTimeout = Math.min(additionalData.executionTimeoutTimestamp || Number.MAX_SAFE_INTEGER, Date.now() + (workflowData.settings.executionTimeout as number * 1000)); + } - // Create new additionalData to have different workflow loaded and to call - // different webooks - const additionalDataIntegrated = await getBase(credentials); - additionalDataIntegrated.hooks = getWorkflowHooksIntegrated(runData.executionMode, executionId, workflowData!, { parentProcessMode: additionalData.hooks!.mode }); - // Make sure we pass on the original executeWorkflow function we received - // This one already contains changes to talk to parent process - // and get executionID from `activeExecutions` running on main process - additionalDataIntegrated.executeWorkflow = additionalData.executeWorkflow; + additionalDataIntegrated.executionTimeoutTimestamp = subworkflowTimeout; - let subworkflowTimeout = additionalData.executionTimeoutTimestamp; - if (workflowData.settings?.executionTimeout !== undefined && workflowData.settings.executionTimeout > 0) { - // We might have received a max timeout timestamp from the parent workflow - // If we did, then we get the minimum time between the two timeouts - // If no timeout was given from the parent, then we use our timeout. - subworkflowTimeout = Math.min(additionalData.executionTimeoutTimestamp || Number.MAX_SAFE_INTEGER, Date.now() + (workflowData.settings.executionTimeout as number * 1000)); - } + const runExecutionData = runData.executionData as IRunExecutionData; - additionalDataIntegrated.executionTimeoutTimestamp = subworkflowTimeout; - - - // Execute the workflow - const workflowExecute = new WorkflowExecute(additionalDataIntegrated, runData.executionMode, runExecutionData); - if (parentExecutionId !== undefined) { - // Must be changed to become typed - return { + // Execute the workflow + const workflowExecute = new WorkflowExecute(additionalDataIntegrated, runData.executionMode, runExecutionData); + if (parentExecutionId !== undefined) { + // Must be changed to become typed + return { + startedAt: new Date(), + workflow, + workflowExecute, + }; + } + data = await workflowExecute.processRunExecutionData(workflow); + } catch (error) { + const fullRunData: IRun = { + data: { + resultData: { + error, + runData: {}, + }, + }, + finished: false, + mode: 'integrated', startedAt: new Date(), - workflow, - workflowExecute, + stoppedAt: new Date(), + }; + // When failing, we might not have finished the execution + // Therefore, database might not contain finished errors. + // Force an update to db as there should be no harm doing this + + const fullExecutionData: IExecutionDb = { + data: fullRunData.data, + mode: fullRunData.mode, + finished: fullRunData.finished ? fullRunData.finished : false, + startedAt: fullRunData.startedAt, + stoppedAt: fullRunData.stoppedAt, + workflowData, + }; + + const executionData = ResponseHelper.flattenExecutionData(fullExecutionData); + + await Db.collections.Execution!.update(executionId, executionData as IExecutionFlattedDb); + throw { + ...error, + stack: error!.stack, }; } - const data = await workflowExecute.processRunExecutionData(workflow); await externalHooks.run('workflow.postExecute', [data, workflowData]); diff --git a/packages/cli/src/WorkflowRunner.ts b/packages/cli/src/WorkflowRunner.ts index 56031d5a77..07f9b91fe2 100644 --- a/packages/cli/src/WorkflowRunner.ts +++ b/packages/cli/src/WorkflowRunner.ts @@ -8,6 +8,7 @@ import { IBullJobResponse, ICredentialsOverwrite, ICredentialsTypeData, + IExecutionDb, IExecutionFlattedDb, IExecutionResponse, IProcessMessageDataHook, @@ -29,6 +30,7 @@ import { import { ExecutionError, IRun, + IWorkflowBase, LoggerProxy as Logger, Workflow, WorkflowExecuteMode, @@ -85,11 +87,15 @@ export class WorkflowRunner { * @param {string} executionId * @memberof WorkflowRunner */ - processError(error: ExecutionError, startedAt: Date, executionMode: WorkflowExecuteMode, executionId: string) { + async processError(error: ExecutionError, startedAt: Date, executionMode: WorkflowExecuteMode, executionId: string, hooks?: WorkflowHooks) { const fullRunData: IRun = { data: { resultData: { - error, + error: { + ...error, + message: error.message, + stack: error.stack, + }, runData: {}, }, }, @@ -102,6 +108,10 @@ export class WorkflowRunner { // Remove from active execution with empty data. That will // set the execution to failed. this.activeExecutions.remove(executionId, fullRunData); + + if (hooks) { + await hooks.executeHookFunctions('workflowExecuteAfter', [fullRunData]); + } } /** @@ -159,7 +169,6 @@ export class WorkflowRunner { const nodeTypes = NodeTypes(); - // Soft timeout to stop workflow execution after current running node // Changes were made by adding the `workflowTimeout` to the `additionalData` // So that the timeout will also work for executions with nested workflows. @@ -178,47 +187,55 @@ export class WorkflowRunner { // Register the active execution const executionId = await this.activeExecutions.add(data, undefined); - Logger.verbose(`Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`, {executionId}); - - additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId, true); - - additionalData.sendMessageToUI = WorkflowExecuteAdditionalData.sendMessageToUI.bind({sessionId: data.sessionId}); - let workflowExecution: PCancelable; - if (data.executionData !== undefined) { - Logger.debug(`Execution ID ${executionId} had Execution data. Running with payload.`, {executionId}); - const workflowExecute = new WorkflowExecute(additionalData, data.executionMode, data.executionData); - workflowExecution = workflowExecute.processRunExecutionData(workflow); - } else if (data.runData === undefined || data.startNodes === undefined || data.startNodes.length === 0 || data.destinationNode === undefined) { - Logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {executionId}); - // Execute all nodes - // Can execute without webhook so go on - const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); - workflowExecution = workflowExecute.run(workflow, undefined, data.destinationNode); - } else { - Logger.debug(`Execution ID ${executionId} is a partial execution.`, {executionId}); - // Execute only the nodes between start and destination nodes - const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); - workflowExecution = workflowExecute.runPartialWorkflow(workflow, data.runData, data.startNodes, data.destinationNode); - } + try { + Logger.verbose(`Execution for workflow ${data.workflowData.name} was assigned id ${executionId}`, { executionId }); + additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId, true); + additionalData.sendMessageToUI = WorkflowExecuteAdditionalData.sendMessageToUI.bind({sessionId: data.sessionId}); - this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution); + if (data.executionData !== undefined) { + Logger.debug(`Execution ID ${executionId} had Execution data. Running with payload.`, {executionId}); + const workflowExecute = new WorkflowExecute(additionalData, data.executionMode, data.executionData); + workflowExecution = workflowExecute.processRunExecutionData(workflow); + } else if (data.runData === undefined || data.startNodes === undefined || data.startNodes.length === 0 || data.destinationNode === undefined) { + Logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {executionId}); + // Execute all nodes - if (workflowTimeout > 0) { - const timeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds - executionTimeout = setTimeout(() => { - this.activeExecutions.stopExecution(executionId, 'timeout'); - }, timeout); - } - - workflowExecution.then((fullRunData) => { - clearTimeout(executionTimeout); - if (workflowExecution.isCanceled) { - fullRunData.finished = false; + // Can execute without webhook so go on + const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); + workflowExecution = workflowExecute.run(workflow, undefined, data.destinationNode); + } else { + Logger.debug(`Execution ID ${executionId} is a partial execution.`, {executionId}); + // Execute only the nodes between start and destination nodes + const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); + workflowExecution = workflowExecute.runPartialWorkflow(workflow, data.runData, data.startNodes, data.destinationNode); } - this.activeExecutions.remove(executionId, fullRunData); - }); + + this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution); + + if (workflowTimeout > 0) { + const timeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds + executionTimeout = setTimeout(() => { + this.activeExecutions.stopExecution(executionId, 'timeout'); + }, timeout); + } + + workflowExecution.then((fullRunData) => { + clearTimeout(executionTimeout); + if (workflowExecution.isCanceled) { + fullRunData.finished = false; + } + this.activeExecutions.remove(executionId, fullRunData); + }).catch((error) => { + this.processError(error, new Date(), data.executionMode, executionId, additionalData.hooks); + }); + + } catch (error) { + await this.processError(error, new Date(), data.executionMode, executionId, additionalData.hooks); + + throw error; + } return executionId; } @@ -247,39 +264,48 @@ export class WorkflowRunner { removeOnComplete: true, removeOnFail: true, }; - const job = await this.jobQueue.add(jobData, jobOptions); - console.log('Started with ID: ' + job.id.toString()); + let job: Bull.Job; + let hooks: WorkflowHooks; + try { + job = await this.jobQueue.add(jobData, jobOptions); - const hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain(data.executionMode, executionId, data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined }); + console.log('Started with ID: ' + job.id.toString()); - // Normally also workflow should be supplied here but as it only used for sending - // data to editor-UI is not needed. - hooks.executeHookFunctions('workflowExecuteBefore', []); + hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain(data.executionMode, executionId, data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined }); + + // Normally also workflow should be supplied here but as it only used for sending + // data to editor-UI is not needed. + hooks.executeHookFunctions('workflowExecuteBefore', []); + } catch (error) { + // We use "getWorkflowHooksWorkerExecuter" as "getWorkflowHooksWorkerMain" does not contain the + // "workflowExecuteAfter" which we require. + const hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerExecuter(data.executionMode, executionId, data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined }); + await this.processError(error, new Date(), data.executionMode, executionId, hooks); + throw error; + } const workflowExecution: PCancelable = new PCancelable(async (resolve, reject, onCancel) => { onCancel.shouldReject = false; onCancel(async () => { await Queue.getInstance().stopJob(job); - const fullRunData :IRun = { - data: { - resultData: { - error: new WorkflowOperationError('Workflow has been canceled!'), - runData: {}, - }, - }, - mode: data.executionMode, - startedAt: new Date(), - stoppedAt: new Date(), - }; - this.activeExecutions.remove(executionId, fullRunData); - resolve(fullRunData); + // We use "getWorkflowHooksWorkerExecuter" as "getWorkflowHooksWorkerMain" does not contain the + // "workflowExecuteAfter" which we require. + const hooksWorker = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerExecuter(data.executionMode, executionId, data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined }); + + const error = new WorkflowOperationError('Workflow-Execution has been canceled!'); + await this.processError(error, new Date(), data.executionMode, executionId, hooksWorker); + + reject(error); }); const jobData: Promise = job.finished(); const queueRecoveryInterval = config.get('queue.bull.queueRecoveryInterval') as number; + const racingPromises: Array> = [jobData]; + + let clearWatchdogInterval; if (queueRecoveryInterval > 0) { /************************************************* * Long explanation about what this solves: * @@ -295,7 +321,7 @@ export class WorkflowRunner { *************************************************/ let watchDogInterval: NodeJS.Timeout | undefined; - const watchDog = new Promise((res) => { + const watchDog: Promise = new Promise((res) => { watchDogInterval = setInterval(async () => { const currentJob = await this.jobQueue.getJob(job.id); // When null means job is finished (not found in queue) @@ -306,22 +332,33 @@ export class WorkflowRunner { }, queueRecoveryInterval * 1000); }); + racingPromises.push(watchDog); - const clearWatchdogInterval = () => { + clearWatchdogInterval = () => { if (watchDogInterval) { clearInterval(watchDogInterval); watchDogInterval = undefined; } }; - - await Promise.race([jobData, watchDog]); - clearWatchdogInterval(); - - } else { - await jobData; } + try { + await Promise.race(racingPromises); + if (clearWatchdogInterval !== undefined) { + clearWatchdogInterval(); + } + } catch (error) { + // We use "getWorkflowHooksWorkerExecuter" as "getWorkflowHooksWorkerMain" does not contain the + // "workflowExecuteAfter" which we require. + const hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerExecuter(data.executionMode, executionId, data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined }); + Logger.error(`Problem with execution ${executionId}: ${error.message}. Aborting.`); + if (clearWatchdogInterval !== undefined) { + clearWatchdogInterval(); + } + await this.processError(error, new Date(), data.executionMode, executionId, hooks); + reject(error); + } const executionDb = await Db.collections.Execution!.findOne(executionId) as IExecutionFlattedDb; const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse; @@ -333,7 +370,6 @@ export class WorkflowRunner { stoppedAt: fullExecutionData.stoppedAt, } as IRun; - this.activeExecutions.remove(executionId, runData); // Normally also static data should be supplied here but as it only used for sending // data to editor-UI is not needed. @@ -427,8 +463,13 @@ export class WorkflowRunner { const workflowHooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId); - // Send all data to subprocess it needs to run the workflow - subprocess.send({ type: 'startWorkflow', data } as IProcessMessage); + try { + // Send all data to subprocess it needs to run the workflow + subprocess.send({ type: 'startWorkflow', data } as IProcessMessage); + } catch (error) { + await this.processError(error, new Date(), data.executionMode, executionId, workflowHooks); + return executionId; + } // Start timeout for the execution let executionTimeout: NodeJS.Timeout; @@ -476,14 +517,14 @@ export class WorkflowRunner { } else if (message.type === 'processError') { clearTimeout(executionTimeout); const executionError = message.data.executionError as ExecutionError; - this.processError(executionError, startedAt, data.executionMode, executionId); - + await this.processError(executionError, startedAt, data.executionMode, executionId, workflowHooks); } else if (message.type === 'processHook') { this.processHookMessage(workflowHooks, message.data as IProcessMessageDataHook); } else if (message.type === 'timeout') { // Execution timed out and its process has been terminated const timeoutError = new WorkflowOperationError('Workflow execution timed out!'); + // No need to add hook here as the subprocess takes care of calling the hooks this.processError(timeoutError, startedAt, data.executionMode, executionId); } else if (message.type === 'startExecution') { const executionId = await this.activeExecutions.add(message.data.runData); @@ -506,13 +547,13 @@ export class WorkflowRunner { // Execution timed out and its process has been terminated const timeoutError = new WorkflowOperationError('Workflow execution timed out!'); - this.processError(timeoutError, startedAt, data.executionMode, executionId); + await this.processError(timeoutError, startedAt, data.executionMode, executionId, workflowHooks); } else if (code !== 0) { Logger.debug(`Subprocess for execution ID ${executionId} finished with error code ${code}.`, {executionId}); // Process did exit with error code, so something went wrong. const executionError = new WorkflowOperationError('Workflow execution process did crash for an unknown reason!'); - this.processError(executionError, startedAt, data.executionMode, executionId); + await this.processError(executionError, startedAt, data.executionMode, executionId, workflowHooks); } for(const executionId of childExecutionIds) { diff --git a/packages/cli/src/WorkflowRunnerProcess.ts b/packages/cli/src/WorkflowRunnerProcess.ts index 3dbc77cf6c..321389ae2f 100644 --- a/packages/cli/src/WorkflowRunnerProcess.ts +++ b/packages/cli/src/WorkflowRunnerProcess.ts @@ -30,6 +30,7 @@ import { IWorkflowExecuteHooks, LoggerProxy, Workflow, + WorkflowExecuteMode, WorkflowHooks, WorkflowOperationError, } from 'n8n-workflow'; @@ -315,7 +316,7 @@ process.on('message', async (message: IProcessMessage) => { for (const executionId of executionIds) { const childWorkflowExecute = workflowRunner.childExecutions[executionId]; runData = childWorkflowExecute.workflowExecute.getFullRunData(workflowRunner.childExecutions[executionId].startedAt); - const timeOutError = message.type === 'timeout' ? new WorkflowOperationError('Workflow execution timed out!') : undefined; + const timeOutError = message.type === 'timeout' ? new WorkflowOperationError('Workflow execution timed out!') : new WorkflowOperationError('Workflow-Execution has been canceled!'); // If there is any data send it to parent process, if execution timedout add the error await childWorkflowExecute.workflowExecute.processSuccessExecution(workflowRunner.childExecutions[executionId].startedAt, childWorkflowExecute.workflow, timeOutError); @@ -324,7 +325,7 @@ process.on('message', async (message: IProcessMessage) => { // Workflow started already executing runData = workflowRunner.workflowExecute.getFullRunData(workflowRunner.startedAt); - const timeOutError = message.type === 'timeout' ? new WorkflowOperationError('Workflow execution timed out!') : undefined; + const timeOutError = message.type === 'timeout' ? new WorkflowOperationError('Workflow execution timed out!') : new WorkflowOperationError('Workflow-Execution has been canceled!'); // If there is any data send it to parent process, if execution timedout add the error await workflowRunner.workflowExecute.processSuccessExecution(workflowRunner.startedAt, workflowRunner.workflow!, timeOutError); @@ -336,8 +337,8 @@ process.on('message', async (message: IProcessMessage) => { runData: {}, }, }, - finished: message.type !== 'timeout', - mode: workflowRunner.data!.executionMode, + finished: false, + mode: workflowRunner.data ? workflowRunner.data!.executionMode : 'own' as WorkflowExecuteMode, startedAt: workflowRunner.startedAt, stoppedAt: new Date(), }; diff --git a/packages/cli/tsconfig.json b/packages/cli/tsconfig.json index 4aaf4747fc..aa44bc610f 100644 --- a/packages/cli/tsconfig.json +++ b/packages/cli/tsconfig.json @@ -1,7 +1,8 @@ { "compilerOptions": { "lib": [ - "es2017" + "es2017", + "ES2020.Promise" ], "types": [ "node", diff --git a/packages/core/package.json b/packages/core/package.json index 71871514bf..c011222a8a 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "0.75.0", + "version": "0.77.0", "description": "Core functionality of n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -47,7 +47,7 @@ "file-type": "^14.6.2", "lodash.get": "^4.4.2", "mime-types": "^2.1.27", - "n8n-workflow": "~0.62.0", + "n8n-workflow": "~0.63.0", "oauth-1.0a": "^2.2.6", "p-cancelable": "^2.0.0", "request": "^2.88.2", diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index 5c73d4c9ef..b893b85cb0 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -807,7 +807,7 @@ export class WorkflowExecute { })() .then(async () => { if (gotCancel && executionError === undefined) { - return this.processSuccessExecution(startedAt, workflow, new WorkflowOperationError('Workflow has been canceled!')); + return this.processSuccessExecution(startedAt, workflow, new WorkflowOperationError('Workflow has been canceled or timed out!')); } return this.processSuccessExecution(startedAt, workflow, executionError); }) @@ -844,7 +844,11 @@ export class WorkflowExecute { if (executionError !== undefined) { Logger.verbose(`Workflow execution finished with error`, { error: executionError, workflowId: workflow.id }); - fullRunData.data.resultData.error = executionError; + fullRunData.data.resultData.error = { + ...executionError, + message: executionError.message, + stack: executionError.stack, + } as ExecutionError; } else { Logger.verbose(`Workflow execution finished successfully`, { workflowId: workflow.id }); fullRunData.finished = true; diff --git a/packages/editor-ui/package.json b/packages/editor-ui/package.json index f829f339f0..6d16570dea 100644 --- a/packages/editor-ui/package.json +++ b/packages/editor-ui/package.json @@ -1,6 +1,6 @@ { "name": "n8n-editor-ui", - "version": "0.96.0", + "version": "0.98.0", "description": "Workflow Editor UI for n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -68,7 +68,7 @@ "lodash.debounce": "^4.0.8", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", - "n8n-workflow": "~0.62.0", + "n8n-workflow": "~0.63.0", "node-sass": "^4.12.0", "normalize-wheel": "^1.0.1", "prismjs": "^1.17.1", diff --git a/packages/editor-ui/src/Interface.ts b/packages/editor-ui/src/Interface.ts index f119699f6e..cc3f89b8c6 100644 --- a/packages/editor-ui/src/Interface.ts +++ b/packages/editor-ui/src/Interface.ts @@ -202,7 +202,7 @@ export interface IVariableSelectorOption { // Simple version of n8n-workflow.Workflow export interface IWorkflowData { - id?: string; + id?: string | number; name?: string; active?: boolean; nodes: INode[]; @@ -212,7 +212,7 @@ export interface IWorkflowData { } export interface IWorkflowDataUpdate { - id?: string; + id?: string | number; name?: string; nodes?: INode[]; connections?: IConnections; @@ -325,6 +325,7 @@ export interface IExecutionShortResponse { export interface IExecutionsListResponse { count: number; results: IExecutionsSummary[]; + estimated: boolean; } export interface IExecutionsCurrentSummaryExtended { diff --git a/packages/editor-ui/src/components/BinaryDataDisplay.vue b/packages/editor-ui/src/components/BinaryDataDisplay.vue index c6ab2aa748..3a737c1f0f 100644 --- a/packages/editor-ui/src/components/BinaryDataDisplay.vue +++ b/packages/editor-ui/src/components/BinaryDataDisplay.vue @@ -14,6 +14,10 @@
Data to display did not get found
+ diff --git a/packages/editor-ui/src/components/ExecutionsList.vue b/packages/editor-ui/src/components/ExecutionsList.vue index e0e129a737..1ecfc4caf8 100644 --- a/packages/editor-ui/src/components/ExecutionsList.vue +++ b/packages/editor-ui/src/components/ExecutionsList.vue @@ -1,6 +1,6 @@ @@ -47,8 +47,10 @@ export default Vue.extend({ onEnter() { this.$emit('enter', (this.$refs.input as HTMLInputElement).value); }, - onBlur() { - this.$emit('blur', (this.$refs.input as HTMLInputElement).value); + onClickOutside(e: Event) { + if (e.type === 'click') { + this.$emit('blur', (this.$refs.input as HTMLInputElement).value); + } }, onEscape() { this.$emit('esc'); diff --git a/packages/editor-ui/src/components/MainSidebar.vue b/packages/editor-ui/src/components/MainSidebar.vue index 09faab9bc7..9a615abccc 100644 --- a/packages/editor-ui/src/components/MainSidebar.vue +++ b/packages/editor-ui/src/components/MainSidebar.vue @@ -408,6 +408,9 @@ export default mixins( const workflowData = await this.getWorkflowDataToSave(); const {tags, ...data} = workflowData; + if (data.id && typeof data.id === 'string') { + data.id = parseInt(data.id, 10); + } const blob = new Blob([JSON.stringify(data, null, 2)], { type: 'application/json;charset=utf-8', }); diff --git a/packages/editor-ui/src/components/NodeCreator/ItemIterator.vue b/packages/editor-ui/src/components/NodeCreator/ItemIterator.vue index 7cf00057ef..e7a8774408 100644 --- a/packages/editor-ui/src/components/NodeCreator/ItemIterator.vue +++ b/packages/editor-ui/src/components/NodeCreator/ItemIterator.vue @@ -8,7 +8,7 @@ @before-leave="beforeLeave" @leave="leave" > -
+
-
+
@@ -29,9 +29,17 @@ export default Vue.extend({ props: [ 'active', ], + data() { + return { + allNodeTypes: [], + }; + }, computed: { + nodeTypes(): INodeTypeDescription[] { + return this.$store.getters.allNodeTypes; + }, visibleNodeTypes(): INodeTypeDescription[] { - return this.$store.getters.allNodeTypes + return this.allNodeTypes .filter((nodeType: INodeTypeDescription) => { return !HIDDEN_NODES.includes(nodeType.name); }); @@ -64,13 +72,22 @@ export default Vue.extend({ }, }, methods: { - closeCreator () { - this.$emit('closeNodeCreator'); + onClickOutside (e: Event) { + if (e.type === 'click') { + this.$emit('closeNodeCreator'); + } }, nodeTypeSelected (nodeTypeName: string) { this.$emit('nodeTypeSelected', nodeTypeName); }, }, + watch: { + nodeTypes(newList, prevList) { + if (prevList.length === 0) { + this.allNodeTypes = newList; + } + }, + }, }); diff --git a/packages/editor-ui/src/components/TagsDropdown.vue b/packages/editor-ui/src/components/TagsDropdown.vue index 5afc839159..6eedc1ec05 100644 --- a/packages/editor-ui/src/components/TagsDropdown.vue +++ b/packages/editor-ui/src/components/TagsDropdown.vue @@ -1,5 +1,5 @@ @@ -124,6 +124,11 @@ export default mixins( updateTagsFilter(tags: string[]) { this.filterTagIds = tags; }, + onTagClick(tagId: string) { + if (tagId !== 'count' && !this.filterTagIds.includes(tagId)) { + this.filterTagIds.push(tagId); + } + }, async openWorkflow (data: IWorkflowShortResponse, column: any) { // tslint:disable-line:no-any if (column.label !== 'Active') { diff --git a/packages/editor-ui/src/components/mixins/pushConnection.ts b/packages/editor-ui/src/components/mixins/pushConnection.ts index 038335a904..4c500b59ee 100644 --- a/packages/editor-ui/src/components/mixins/pushConnection.ts +++ b/packages/editor-ui/src/components/mixins/pushConnection.ts @@ -213,33 +213,16 @@ export const pushConnection = mixins( const runDataExecuted = pushData.data; - let runDataExecutedErrorMessage; + const runDataExecutedErrorMessage = this.$getExecutionError(runDataExecuted.data.resultData.error); + // @ts-ignore const workflow = this.getWorkflow(); if (runDataExecuted.finished !== true) { - // There was a problem with executing the workflow - let errorMessage = 'There was a problem executing the workflow!'; - - if (runDataExecuted.data.resultData.error && runDataExecuted.data.resultData.error.message) { - let nodeName: string | undefined; - if (runDataExecuted.data.resultData.error.node) { - nodeName = typeof runDataExecuted.data.resultData.error.node === 'string' - ? runDataExecuted.data.resultData.error.node - : runDataExecuted.data.resultData.error.node.name; - } - - const receivedError = nodeName - ? `${nodeName}: ${runDataExecuted.data.resultData.error.message}` - : runDataExecuted.data.resultData.error.message; - errorMessage = `There was a problem executing the workflow:
"${receivedError}"`; - } - - runDataExecutedErrorMessage = errorMessage; - this.$titleSet(workflow.name as string, 'ERROR'); + this.$showMessage({ title: 'Problem executing workflow', - message: errorMessage, + message: runDataExecutedErrorMessage, type: 'error', }); } else { diff --git a/packages/editor-ui/src/components/mixins/showMessage.ts b/packages/editor-ui/src/components/mixins/showMessage.ts index 6ed718ca52..67bb9301a8 100644 --- a/packages/editor-ui/src/components/mixins/showMessage.ts +++ b/packages/editor-ui/src/components/mixins/showMessage.ts @@ -3,6 +3,7 @@ import { ElNotificationOptions } from 'element-ui/types/notification'; import mixins from 'vue-typed-mixins'; import { externalHooks } from '@/components/mixins/externalHooks'; +import { ExecutionError } from 'n8n-workflow'; export const showMessage = mixins(externalHooks).extend({ methods: { @@ -15,6 +16,27 @@ export const showMessage = mixins(externalHooks).extend({ return Notification(messageData); }, + $getExecutionError(error?: ExecutionError) { + // There was a problem with executing the workflow + let errorMessage = 'There was a problem executing the workflow!'; + + if (error && error.message) { + let nodeName: string | undefined; + if (error.node) { + nodeName = typeof error.node === 'string' + ? error.node + : error.node.name; + } + + const receivedError = nodeName + ? `${nodeName}: ${error.message}` + : error.message; + errorMessage = `There was a problem executing the workflow:
"${receivedError}"`; + } + + return errorMessage; + }, + $showError(error: Error, title: string, message?: string) { const messageLine = message ? `${message}
` : ''; this.$showMessage({ diff --git a/packages/editor-ui/src/modules/ui.ts b/packages/editor-ui/src/modules/ui.ts index 9fbe35fb37..a5681f4a84 100644 --- a/packages/editor-ui/src/modules/ui.ts +++ b/packages/editor-ui/src/modules/ui.ts @@ -31,9 +31,6 @@ const module: Module = { isModalActive: (state: IUiState) => { return (name: string) => state.modalStack.length > 0 && name === state.modalStack[0]; }, - anyModalsOpen: (state: IUiState) => { - return state.modalStack.length > 0; - }, sidebarMenuCollapsed: (state: IUiState): boolean => state.sidebarMenuCollapsed, }, mutations: { diff --git a/packages/editor-ui/src/views/NodeView.vue b/packages/editor-ui/src/views/NodeView.vue index 617aabcb41..5cd1bfea05 100644 --- a/packages/editor-ui/src/views/NodeView.vue +++ b/packages/editor-ui/src/views/NodeView.vue @@ -375,6 +375,39 @@ export default mixins( }); this.$externalHooks().run('execution.open', { workflowId: data.workflowData.id, workflowName: data.workflowData.name, executionId }); + + if (data.finished !== true && data.data.resultData.error) { + // Check if any node contains an error + let nodeErrorFound = false; + if (data.data.resultData.runData) { + const runData = data.data.resultData.runData; + errorCheck: + for (const nodeName of Object.keys(runData)) { + for (const taskData of runData[nodeName]) { + if (taskData.error) { + nodeErrorFound = true; + break errorCheck; + } + } + } + } + + if (nodeErrorFound === false) { + const errorMessage = this.$getExecutionError(data.data.resultData.error); + this.$showMessage({ + title: 'Failed execution', + message: errorMessage, + type: 'error', + }); + + if (data.data.resultData.error.stack) { + // Display some more information for now in console to make debugging easier + // TODO: Improve this in the future by displaying in UI + console.error(`Execution ${executionId} error:`); // eslint-disable-line no-console + console.error(data.data.resultData.error.stack); // eslint-disable-line no-console + } + } + } }, async openWorkflowTemplate (templateId: string) { this.setLoadingText('Loading template'); @@ -514,15 +547,29 @@ export default mixins( // else which should ignore the default keybindings for (let index = 0; index < path.length; index++) { if (path[index].className && typeof path[index].className === 'string' && ( - path[index].className.includes('el-message-box') || - path[index].className.includes('el-select') || path[index].className.includes('ignore-key-press') )) { return; } } - const anyModalsOpen = this.$store.getters['ui/anyModalsOpen']; - if (anyModalsOpen) { + + // el-dialog or el-message-box element is open + if (window.document.body.classList.contains('el-popup-parent--hidden')) { + return; + } + + if (e.key === 'Escape') { + this.createNodeActive = false; + if (this.activeNode) { + this.$externalHooks().run('dataDisplay.nodeEditingFinished'); + this.$store.commit('setActiveNode', null); + } + + return; + } + + // node modal is open + if (this.activeNode) { return; } @@ -533,15 +580,12 @@ export default mixins( e.preventDefault(); this.callDebounced('deleteSelectedNodes', 500); - } else if (e.key === 'Escape') { - this.$externalHooks().run('dataDisplay.nodeEditingFinished'); - this.createNodeActive = false; - this.$store.commit('setActiveNode', null); + } else if (e.key === 'Tab') { this.createNodeActive = !this.createNodeActive && !this.isReadOnly; } else if (e.key === this.controlKeyCode) { this.ctrlKeyPressed = true; - } else if (e.key === 'F2') { + } else if (e.key === 'F2' && !this.isReadOnly) { const lastSelectedNode = this.lastSelectedNode; if (lastSelectedNode !== null) { this.callDebounced('renameNodePrompt', 1500, lastSelectedNode.name); diff --git a/packages/node-dev/package.json b/packages/node-dev/package.json index 594fe74133..9fa927fbeb 100644 --- a/packages/node-dev/package.json +++ b/packages/node-dev/package.json @@ -1,6 +1,6 @@ { "name": "n8n-node-dev", - "version": "0.15.0", + "version": "0.17.0", "description": "CLI to simplify n8n credentials/node development", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -59,8 +59,8 @@ "change-case": "^4.1.1", "copyfiles": "^2.1.1", "inquirer": "^7.0.1", - "n8n-core": "~0.75.0", - "n8n-workflow": "~0.62.0", + "n8n-core": "~0.77.0", + "n8n-workflow": "~0.63.0", "oauth-1.0a": "^2.2.6", "replace-in-file": "^6.0.0", "request": "^2.88.2", diff --git a/packages/nodes-base/credentials/TaigaServerApi.credentials.ts b/packages/nodes-base/credentials/BaserowApi.credentials.ts similarity index 52% rename from packages/nodes-base/credentials/TaigaServerApi.credentials.ts rename to packages/nodes-base/credentials/BaserowApi.credentials.ts index 99c7125fec..ff81c05fee 100644 --- a/packages/nodes-base/credentials/TaigaServerApi.credentials.ts +++ b/packages/nodes-base/credentials/BaserowApi.credentials.ts @@ -3,11 +3,18 @@ import { INodeProperties, } from 'n8n-workflow'; -export class TaigaServerApi implements ICredentialType { - name = 'taigaServerApi'; - displayName = 'Taiga Server API'; - documentationUrl = 'taiga'; +// https://api.baserow.io/api/redoc/#section/Authentication + +export class BaserowApi implements ICredentialType { + name = 'baserowApi'; + displayName = 'Baserow API'; properties: INodeProperties[] = [ + { + displayName: 'Host', + name: 'host', + type: 'string', + default: 'https://api.baserow.io', + }, { displayName: 'Username', name: 'username', @@ -19,13 +26,9 @@ export class TaigaServerApi implements ICredentialType { name: 'password', type: 'string', default: '', - }, - { - displayName: 'URL', - name: 'url', - type: 'string', - default: '', - placeholder: 'https://taiga.yourdomain.com', + typeOptions: { + password: true, + }, }, ]; } diff --git a/packages/nodes-base/credentials/HomeAssistantApi.credentials.ts b/packages/nodes-base/credentials/HomeAssistantApi.credentials.ts new file mode 100644 index 0000000000..802561ba98 --- /dev/null +++ b/packages/nodes-base/credentials/HomeAssistantApi.credentials.ts @@ -0,0 +1,36 @@ +import { + ICredentialType, + NodePropertyTypes, +} from 'n8n-workflow'; + +export class HomeAssistantApi implements ICredentialType { + name = 'homeAssistantApi'; + displayName = 'Home Assistant API'; + documentationUrl = 'homeAssistant'; + properties = [ + { + displayName: 'Host', + name: 'host', + type: 'string' as NodePropertyTypes, + default: '', + }, + { + displayName: 'Port', + name: 'port', + type: 'number' as NodePropertyTypes, + default: 8123, + }, + { + displayName: 'SSL', + name: 'ssl', + type: 'boolean' as NodePropertyTypes, + default: false, + }, + { + displayName: 'Access Token', + name: 'accessToken', + type: 'string' as NodePropertyTypes, + default: '', + }, + ]; +} diff --git a/packages/nodes-base/credentials/MicrosoftSql.credentials.ts b/packages/nodes-base/credentials/MicrosoftSql.credentials.ts index 7d0693ca72..2b09904e18 100644 --- a/packages/nodes-base/credentials/MicrosoftSql.credentials.ts +++ b/packages/nodes-base/credentials/MicrosoftSql.credentials.ts @@ -57,5 +57,12 @@ export class MicrosoftSql implements ICredentialType { default: 15000, description: 'Connection timeout in ms.', }, + { + displayName: 'Request Timeout', + name: 'requestTimeout', + type: 'number', + default: 15000, + description: ' Request timeout in ms.', + }, ]; } diff --git a/packages/nodes-base/credentials/SalesforceOAuth2Api.credentials.ts b/packages/nodes-base/credentials/SalesforceOAuth2Api.credentials.ts index 0aaba074ec..3fd9daf8d4 100644 --- a/packages/nodes-base/credentials/SalesforceOAuth2Api.credentials.ts +++ b/packages/nodes-base/credentials/SalesforceOAuth2Api.credentials.ts @@ -11,19 +11,35 @@ export class SalesforceOAuth2Api implements ICredentialType { displayName = 'Salesforce OAuth2 API'; documentationUrl = 'salesforce'; properties: INodeProperties[] = [ + { + displayName: 'Environment Type', + name: 'environment', + type: 'options', + options: [ + { + name: 'Production', + value: 'production', + }, + { + name: 'Sandbox', + value: 'sandbox', + }, + ], + default: 'production', + }, { displayName: 'Authorization URL', name: 'authUrl', type: 'hidden', - default: 'https://login.salesforce.com/services/oauth2/authorize', required: true, + default: '={{ $self["environment"] === "sandbox" ? "https://test.salesforce.com/services/oauth2/authorize" : "https://login.salesforce.com/services/oauth2/authorize" }}', }, { displayName: 'Access Token URL', name: 'accessTokenUrl', - type: 'string', - default: 'https://yourcompany.salesforce.com/services/oauth2/token', + type: 'hidden', required: true, + default: '={{ $self["environment"] === "sandbox" ? "https://test.salesforce.com/services/oauth2/token" : "https://login.salesforce.com/services/oauth2/token" }}', }, { displayName: 'Scope', diff --git a/packages/nodes-base/credentials/ServiceNowOAuth2Api.credentials.ts b/packages/nodes-base/credentials/ServiceNowOAuth2Api.credentials.ts new file mode 100644 index 0000000000..d12b0cd8f1 --- /dev/null +++ b/packages/nodes-base/credentials/ServiceNowOAuth2Api.credentials.ts @@ -0,0 +1,62 @@ +import { + ICredentialType, + NodePropertyTypes, +} from 'n8n-workflow'; + +export class ServiceNowOAuth2Api implements ICredentialType { + name = 'serviceNowOAuth2Api'; + extends = [ + 'oAuth2Api', + ]; + displayName = 'ServiceNow OAuth2 API'; + documentationUrl = 'serviceNow'; + properties = [ + { + displayName: 'Subdomain', + name: 'subdomain', + type: 'string' as NodePropertyTypes, + default: '', + placeholder: 'n8n', + description: 'The subdomain of your ServiceNow environment', + required: true, + }, + { + displayName: 'Authorization URL', + name: 'authUrl', + type: 'hidden' as NodePropertyTypes, + default: '=https://{{$self["subdomain"]}}.service-now.com/oauth_auth.do', + required: true, + }, + { + displayName: 'Access Token URL', + name: 'accessTokenUrl', + type: 'hidden' as NodePropertyTypes, + default: '=https://{{$self["subdomain"]}}.service-now.com/oauth_token.do', + required: true, + }, + { + displayName: 'Scope', + name: 'scope', + type: 'hidden' as NodePropertyTypes, + default: 'useraccount', + }, + { + displayName: 'Auth URI Query Parameters', + name: 'authQueryParameters', + type: 'hidden' as NodePropertyTypes, + default: 'response_type=code', + }, + { + displayName: 'Auth URI Query Parameters', + name: 'authQueryParameters', + type: 'hidden' as NodePropertyTypes, + default: 'grant_type=authorization_code', + }, + { + displayName: 'Authentication', + name: 'authentication', + type: 'hidden' as NodePropertyTypes, + default: 'header', + }, + ]; +} diff --git a/packages/nodes-base/credentials/StripeApi.credentials.ts b/packages/nodes-base/credentials/StripeApi.credentials.ts index b6aef0d814..991af27fab 100644 --- a/packages/nodes-base/credentials/StripeApi.credentials.ts +++ b/packages/nodes-base/credentials/StripeApi.credentials.ts @@ -6,7 +6,7 @@ import { export class StripeApi implements ICredentialType { name = 'stripeApi'; - displayName = 'Stripe Api'; + displayName = 'Stripe API'; documentationUrl = 'stripe'; properties: INodeProperties[] = [ // The credentials to get from user and save encrypted. diff --git a/packages/nodes-base/credentials/TaigaApi.credentials.ts b/packages/nodes-base/credentials/TaigaApi.credentials.ts new file mode 100644 index 0000000000..db5e7ec6ad --- /dev/null +++ b/packages/nodes-base/credentials/TaigaApi.credentials.ts @@ -0,0 +1,54 @@ +import { + ICredentialType, + INodeProperties, +} from 'n8n-workflow'; + +export class TaigaApi implements ICredentialType { + name = 'taigaApi'; + displayName = 'Taiga API'; + documentationUrl = 'taiga'; + properties: INodeProperties[] = [ + { + displayName: 'Username', + name: 'username', + type: 'string', + default: '', + }, + { + displayName: 'Password', + name: 'password', + type: 'string', + default: '', + }, + { + displayName: 'Environment', + name: 'environment', + type: 'options', + default: 'cloud', + options: [ + { + name: 'Cloud', + value: 'cloud', + }, + { + name: 'Self-Hosted', + value: 'selfHosted', + }, + ], + }, + { + displayName: 'URL', + name: 'url', + type: 'string', + default: '', + placeholder: 'https://taiga.yourdomain.com', + displayOptions: { + show: { + environment: [ + 'selfHosted', + ], + }, + }, + }, + ]; +} diff --git a/packages/nodes-base/credentials/TaigaCloudApi.credentials.ts b/packages/nodes-base/credentials/TaigaCloudApi.credentials.ts deleted file mode 100644 index 2a32b29873..0000000000 --- a/packages/nodes-base/credentials/TaigaCloudApi.credentials.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { - ICredentialType, - INodeProperties, -} from 'n8n-workflow'; - -export class TaigaCloudApi implements ICredentialType { - name = 'taigaCloudApi'; - displayName = 'Taiga Cloud API'; - documentationUrl = 'taiga'; - properties: INodeProperties[] = [ - { - displayName: 'Username', - name: 'username', - type: 'string', - default: '', - }, - { - displayName: 'Password', - name: 'password', - type: 'string', - default: '', - }, - ]; -} diff --git a/packages/nodes-base/nodes/ActiveCampaign/GenericFunctions.ts b/packages/nodes-base/nodes/ActiveCampaign/GenericFunctions.ts index 215d31934c..172cbd37ec 100644 --- a/packages/nodes-base/nodes/ActiveCampaign/GenericFunctions.ts +++ b/packages/nodes-base/nodes/ActiveCampaign/GenericFunctions.ts @@ -163,7 +163,7 @@ export function activeCampaignDefaultGetAllProperties(resource: string, operatio description: 'How many results to return.', }, { - displayName: 'Simple', + displayName: 'Simplify Response', name: 'simple', type: 'boolean', displayOptions: { @@ -177,7 +177,7 @@ export function activeCampaignDefaultGetAllProperties(resource: string, operatio }, }, default: true, - description: 'When set to true a simplify version of the response will be used else the raw data.', + description: 'Return a simplified version of the response instead of the raw data.', }, ]; } diff --git a/packages/nodes-base/nodes/Airtable/Airtable.node.ts b/packages/nodes-base/nodes/Airtable/Airtable.node.ts index 36c2846618..333183e84a 100644 --- a/packages/nodes-base/nodes/Airtable/Airtable.node.ts +++ b/packages/nodes-base/nodes/Airtable/Airtable.node.ts @@ -221,7 +221,7 @@ export class Airtable implements INodeType { }, }, default: '', - description: `Name of the fields of type 'attachment' that should be downloaded. Multiple ones can be defined separated by comma. Case sensitive.`, + description: `Name of the fields of type 'attachment' that should be downloaded. Multiple ones can be defined separated by comma. Case sensitive and cannot include spaces after a comma.`, }, { displayName: 'Additional Options', @@ -390,7 +390,7 @@ export class Airtable implements INodeType { }, // ---------------------------------- - // append + update + // append + delete + update // ---------------------------------- { displayName: 'Options', @@ -401,12 +401,24 @@ export class Airtable implements INodeType { show: { operation: [ 'append', + 'delete', 'update', ], }, }, default: {}, options: [ + { + displayName: 'Bulk Size', + name: 'bulkSize', + type: 'number', + typeOptions: { + minValue: 1, + maxValue: 10, + }, + default: 10, + description: `Number of records to process at once.`, + }, { displayName: 'Ignore Fields', name: 'ignoreFields', @@ -428,6 +440,14 @@ export class Airtable implements INodeType { displayName: 'Typecast', name: 'typecast', type: 'boolean', + displayOptions: { + show: { + '/operation': [ + 'append', + 'update', + ], + }, + }, default: false, description: 'If the Airtable API should attempt mapping of string values for linked records & select options.', }, @@ -465,54 +485,81 @@ export class Airtable implements INodeType { let fields: string[]; let options: IDataObject; + const rows: IDataObject[] = []; + let bulkSize = 10; + for (let i = 0; i < items.length; i++) { addAllFields = this.getNodeParameter('addAllFields', i) as boolean; options = this.getNodeParameter('options', i, {}) as IDataObject; + bulkSize = options.bulkSize as number || bulkSize; + + const row: IDataObject = {}; if (addAllFields === true) { // Add all the fields the item has - body.fields = items[i].json; + row.fields = { ...items[i].json }; + // tslint:disable-next-line: no-any + delete (row.fields! as any).id; } else { // Add only the specified fields - body.fields = {} as IDataObject; + row.fields = {} as IDataObject; fields = this.getNodeParameter('fields', i, []) as string[]; for (const fieldName of fields) { // @ts-ignore - body.fields[fieldName] = items[i].json[fieldName]; + row.fields[fieldName] = items[i].json[fieldName]; } } - if (options.typecast === true) { - body['typecast'] = true; + rows.push(row); + + if (rows.length === bulkSize || i === items.length - 1) { + if (options.typecast === true) { + body['typecast'] = true; + } + + body['records'] = rows; + + responseData = await apiRequest.call(this, requestMethod, endpoint, body, qs); + + returnData.push(...responseData.records); + // empty rows + rows.length = 0; } - - responseData = await apiRequest.call(this, requestMethod, endpoint, body, qs); - - returnData.push(responseData); } } else if (operation === 'delete') { requestMethod = 'DELETE'; - let id: string; + const rows: string[] = []; + const options = this.getNodeParameter('options', 0, {}) as IDataObject; + const bulkSize = options.bulkSize as number || 10; + for (let i = 0; i < items.length; i++) { + let id: string; + id = this.getNodeParameter('id', i) as string; - endpoint = `${application}/${table}`; + rows.push(id); - // Make one request after another. This is slower but makes - // sure that we do not run into the rate limit they have in - // place and so block for 30 seconds. Later some global - // functionality in core should make it easy to make requests - // according to specific rules like not more than 5 requests - // per seconds. - qs.records = [id]; + if (rows.length === bulkSize || i === items.length - 1) { + endpoint = `${application}/${table}`; - responseData = await apiRequest.call(this, requestMethod, endpoint, body, qs); + // Make one request after another. This is slower but makes + // sure that we do not run into the rate limit they have in + // place and so block for 30 seconds. Later some global + // functionality in core should make it easy to make requests + // according to specific rules like not more than 5 requests + // per seconds. + qs.records = rows; - returnData.push(...responseData.records); + responseData = await apiRequest.call(this, requestMethod, endpoint, body, qs); + + returnData.push(...responseData.records); + // empty rows + rows.length = 0; + } } } else if (operation === 'list') { @@ -585,55 +632,69 @@ export class Airtable implements INodeType { requestMethod = 'PATCH'; - let id: string; let updateAllFields: boolean; let fields: string[]; let options: IDataObject; + + const rows: IDataObject[] = []; + let bulkSize = 10; + for (let i = 0; i < items.length; i++) { updateAllFields = this.getNodeParameter('updateAllFields', i) as boolean; options = this.getNodeParameter('options', i, {}) as IDataObject; + bulkSize = options.bulkSize as number || bulkSize; + + const row: IDataObject = {}; + row.fields = {} as IDataObject; if (updateAllFields === true) { // Update all the fields the item has - body.fields = items[i].json; + row.fields = { ...items[i].json }; + // remove id field + // tslint:disable-next-line: no-any + delete (row.fields! as any).id; if (options.ignoreFields && options.ignoreFields !== '') { const ignoreFields = (options.ignoreFields as string).split(',').map(field => field.trim()).filter(field => !!field); if (ignoreFields.length) { // From: https://stackoverflow.com/questions/17781472/how-to-get-a-subset-of-a-javascript-objects-properties - body.fields = Object.entries(items[i].json) + row.fields = Object.entries(items[i].json) .filter(([key]) => !ignoreFields.includes(key)) .reduce((obj, [key, val]) => Object.assign(obj, { [key]: val }), {}); } } } else { - // Update only the specified fields - body.fields = {} as IDataObject; - fields = this.getNodeParameter('fields', i, []) as string[]; for (const fieldName of fields) { // @ts-ignore - body.fields[fieldName] = items[i].json[fieldName]; + row.fields[fieldName] = items[i].json[fieldName]; } } - id = this.getNodeParameter('id', i) as string; + row.id = this.getNodeParameter('id', i) as string; - endpoint = `${application}/${table}`; + rows.push(row); - // Make one request after another. This is slower but makes - // sure that we do not run into the rate limit they have in - // place and so block for 30 seconds. Later some global - // functionality in core should make it easy to make requests - // according to specific rules like not more than 5 requests - // per seconds. + if (rows.length === bulkSize || i === items.length - 1) { + endpoint = `${application}/${table}`; - const data = { records: [{ id, fields: body.fields }], typecast: (options.typecast) ? true : false }; + // Make one request after another. This is slower but makes + // sure that we do not run into the rate limit they have in + // place and so block for 30 seconds. Later some global + // functionality in core should make it easy to make requests + // according to specific rules like not more than 5 requests + // per seconds. - responseData = await apiRequest.call(this, requestMethod, endpoint, data, qs); + const data = { records: rows, typecast: (options.typecast) ? true : false }; - returnData.push(...responseData.records); + responseData = await apiRequest.call(this, requestMethod, endpoint, data, qs); + + returnData.push(...responseData.records); + + // empty rows + rows.length = 0; + } } } else { diff --git a/packages/nodes-base/nodes/Aws/Comprehend/AwsComprehend.node.ts b/packages/nodes-base/nodes/Aws/Comprehend/AwsComprehend.node.ts index 7223ddc282..e611586aa9 100644 --- a/packages/nodes-base/nodes/Aws/Comprehend/AwsComprehend.node.ts +++ b/packages/nodes-base/nodes/Aws/Comprehend/AwsComprehend.node.ts @@ -158,7 +158,7 @@ export class AwsComprehend implements INodeType { description: 'The text to send.', }, { - displayName: 'Simple', + displayName: 'Simplify Response', name: 'simple', type: 'boolean', displayOptions: { @@ -172,7 +172,7 @@ export class AwsComprehend implements INodeType { }, }, default: true, - description: 'When set to true a simplify version of the response will be used else the raw data.', + description: 'Return a simplified version of the response instead of the raw data.', }, { displayName: 'Additional Fields', diff --git a/packages/nodes-base/nodes/Baserow/Baserow.node.json b/packages/nodes-base/nodes/Baserow/Baserow.node.json new file mode 100644 index 0000000000..3ab9005110 --- /dev/null +++ b/packages/nodes-base/nodes/Baserow/Baserow.node.json @@ -0,0 +1,19 @@ +{ + "node": "n8n-nodes-base.baserow", + "nodeVersion": "1.0", + "codexVersion": "1.0", + "categories": ["Data & Storage"], + "resources": { + "credentialDocumentation": [ + { + "url": "https://docs.n8n.io/credentials/baserow" + } + ], + "primaryDocumentation": [ + { + "url": "https://docs.n8n.io/nodes/n8n-nodes-base.baserow/" + } + ], + "generic": [] + } +} diff --git a/packages/nodes-base/nodes/Baserow/Baserow.node.ts b/packages/nodes-base/nodes/Baserow/Baserow.node.ts new file mode 100644 index 0000000000..c3a26bc580 --- /dev/null +++ b/packages/nodes-base/nodes/Baserow/Baserow.node.ts @@ -0,0 +1,318 @@ +import { + IDataObject, + ILoadOptionsFunctions, + INodeExecutionData, + INodeType, + INodeTypeDescription, +} from 'n8n-workflow'; + +import { + IExecuteFunctions, +} from 'n8n-core'; + +import { + baserowApiRequest, + baserowApiRequestAllItems, + getJwtToken, + TableFieldMapper, + toOptions, +} from './GenericFunctions'; + +import { + operationFields +} from './OperationDescription'; + +import { + BaserowCredentials, + FieldsUiValues, + GetAllAdditionalOptions, + LoadedResource, + Operation, + Row, +} from './types'; + +export class Baserow implements INodeType { + description: INodeTypeDescription = { + displayName: 'Baserow', + name: 'baserow', + icon: 'file:baserow.svg', + group: ['output'], + version: 1, + description: 'Consume the Baserow API', + subtitle: '={{$parameter["operation"] + ":" + $parameter["resource"]}}', + defaults: { + name: 'Baserow', + color: '#00a2ce', + }, + inputs: ['main'], + outputs: ['main'], + credentials: [ + { + name: 'baserowApi', + required: true, + }, + ], + properties: [ + { + displayName: 'Resource', + name: 'resource', + type: 'options', + options: [ + { + name: 'Row', + value: 'row', + }, + ], + default: 'row', + description: 'Operation to perform', + }, + { + displayName: 'Operation', + name: 'operation', + type: 'options', + displayOptions: { + show: { + resource: [ + 'row', + ], + }, + }, + options: [ + { + name: 'Create', + value: 'create', + description: 'Create a row', + }, + { + name: 'Delete', + value: 'delete', + description: 'Delete a row', + }, + { + name: 'Get', + value: 'get', + description: 'Retrieve a row', + }, + { + name: 'Get All', + value: 'getAll', + description: 'Retrieve all rows', + }, + { + name: 'Update', + value: 'update', + description: 'Update a row', + }, + ], + default: 'getAll', + description: 'Operation to perform', + }, + ...operationFields, + ], + }; + + methods = { + loadOptions: { + async getDatabaseIds(this: ILoadOptionsFunctions) { + const credentials = this.getCredentials('baserowApi') as BaserowCredentials; + const jwtToken = await getJwtToken.call(this, credentials); + const endpoint = '/api/applications/'; + const databases = await baserowApiRequest.call(this, 'GET', endpoint, {}, {}, jwtToken) as LoadedResource[]; + return toOptions(databases); + }, + + async getTableIds(this: ILoadOptionsFunctions) { + const credentials = this.getCredentials('baserowApi') as BaserowCredentials; + const jwtToken = await getJwtToken.call(this, credentials); + const databaseId = this.getNodeParameter('databaseId', 0) as string; + const endpoint = `/api/database/tables/database/${databaseId}`; + const tables = await baserowApiRequest.call(this, 'GET', endpoint, {}, {}, jwtToken) as LoadedResource[]; + return toOptions(tables); + }, + + async getTableFields(this: ILoadOptionsFunctions) { + const credentials = this.getCredentials('baserowApi') as BaserowCredentials; + const jwtToken = await getJwtToken.call(this, credentials); + const tableId = this.getNodeParameter('tableId', 0) as string; + const endpoint = `/api/database/fields/table/${tableId}/`; + const fields = await baserowApiRequest.call(this, 'GET', endpoint, {}, {}, jwtToken) as LoadedResource[]; + return toOptions(fields); + }, + }, + }; + + async execute(this: IExecuteFunctions): Promise { + const items = this.getInputData(); + const mapper = new TableFieldMapper(); + const returnData: IDataObject[] = []; + const operation = this.getNodeParameter('operation', 0) as Operation; + + const tableId = this.getNodeParameter('tableId', 0) as string; + const credentials = this.getCredentials('baserowApi') as BaserowCredentials; + const jwtToken = await getJwtToken.call(this, credentials); + const fields = await mapper.getTableFields.call(this, tableId, jwtToken); + mapper.createMappings(fields); + + for (let i = 0; i < items.length; i++) { + + try { + + if (operation === 'getAll') { + + // ---------------------------------- + // getAll + // ---------------------------------- + + // https://api.baserow.io/api/redoc/#operation/list_database_table_rows + + const { order, filters, filterType, search } = this.getNodeParameter('additionalOptions', 0) as GetAllAdditionalOptions; + + const qs: IDataObject = {}; + + if (order?.fields) { + qs['order_by'] = order.fields + .map(({ field, direction }) => `${direction}${mapper.setField(field)}`) + .join(','); + } + + if (filters?.fields) { + filters.fields.forEach(({ field, operator, value }) => { + qs[`filter__field_${mapper.setField(field)}__${operator}`] = value; + }); + } + + if (filterType) { + qs.filter_type = filterType; + } + + if (search) { + qs.search = search; + } + + const endpoint = `/api/database/rows/table/${tableId}/`; + const rows = await baserowApiRequestAllItems.call(this, 'GET', endpoint, {}, qs, jwtToken) as Row[]; + + rows.forEach(row => mapper.idsToNames(row)); + + returnData.push(...rows); + + } else if (operation === 'get') { + + // ---------------------------------- + // get + // ---------------------------------- + + // https://api.baserow.io/api/redoc/#operation/get_database_table_row + + const rowId = this.getNodeParameter('rowId', i) as string; + const endpoint = `/api/database/rows/table/${tableId}/${rowId}/`; + const row = await baserowApiRequest.call(this, 'GET', endpoint, {}, {}, jwtToken); + + mapper.idsToNames(row); + + returnData.push(row); + + } else if (operation === 'create') { + + // ---------------------------------- + // create + // ---------------------------------- + + // https://api.baserow.io/api/redoc/#operation/create_database_table_row + + const body: IDataObject = {}; + + const dataToSend = this.getNodeParameter('dataToSend', 0) as 'defineBelow' | 'autoMapColumns'; + + if (dataToSend === 'autoMapColumns') { + + const incomingKeys = Object.keys(items[i].json); + const rawInputsToIgnore = this.getNodeParameter('inputDataToIgnore', i) as string; + const inputDataToIgnore = rawInputsToIgnore.split(',').map(c => c.trim()); + + for (const key of incomingKeys) { + if (inputDataToIgnore.includes(key)) continue; + body[key] = items[i].json[key]; + mapper.namesToIds(body); + } + } else { + const fields = this.getNodeParameter('fieldsUi.fieldValues', i, []) as FieldsUiValues; + for (const field of fields) { + body[`field_${field.fieldId}`] = field.fieldValue; + } + } + + const endpoint = `/api/database/rows/table/${tableId}/`; + const createdRow = await baserowApiRequest.call(this, 'POST', endpoint, body, {}, jwtToken); + + mapper.idsToNames(createdRow); + + returnData.push(createdRow); + + } else if (operation === 'update') { + + // ---------------------------------- + // update + // ---------------------------------- + + // https://api.baserow.io/api/redoc/#operation/update_database_table_row + + const rowId = this.getNodeParameter('rowId', i) as string; + + const body: IDataObject = {}; + + const dataToSend = this.getNodeParameter('dataToSend', 0) as 'defineBelow' | 'autoMapInputData'; + + if (dataToSend === 'autoMapInputData') { + + const incomingKeys = Object.keys(items[i].json); + const rawInputsToIgnore = this.getNodeParameter('inputsToIgnore', i) as string; + const inputsToIgnore = rawInputsToIgnore.split(',').map(c => c.trim()); + + for (const key of incomingKeys) { + if (inputsToIgnore.includes(key)) continue; + body[key] = items[i].json[key]; + mapper.namesToIds(body); + } + } else { + const fields = this.getNodeParameter('fieldsUi.fieldValues', i, []) as FieldsUiValues; + for (const field of fields) { + body[`field_${field.fieldId}`] = field.fieldValue; + } + } + + const endpoint = `/api/database/rows/table/${tableId}/${rowId}/`; + const updatedRow = await baserowApiRequest.call(this, 'PATCH', endpoint, body, {}, jwtToken); + + mapper.idsToNames(updatedRow); + + returnData.push(updatedRow); + + } else if (operation === 'delete') { + + // ---------------------------------- + // delete + // ---------------------------------- + + // https://api.baserow.io/api/redoc/#operation/delete_database_table_row + + const rowId = this.getNodeParameter('rowId', i) as string; + + const endpoint = `/api/database/rows/table/${tableId}/${rowId}/`; + await baserowApiRequest.call(this, 'DELETE', endpoint, {}, {}, jwtToken); + + returnData.push({ success: true }); + } + + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ error: error.message }); + continue; + } + throw error; + } + } + + return [this.helpers.returnJsonArray(returnData)]; + } +} diff --git a/packages/nodes-base/nodes/Baserow/GenericFunctions.ts b/packages/nodes-base/nodes/Baserow/GenericFunctions.ts new file mode 100644 index 0000000000..e360b6192b --- /dev/null +++ b/packages/nodes-base/nodes/Baserow/GenericFunctions.ts @@ -0,0 +1,198 @@ +import { + IExecuteFunctions, +} from 'n8n-core'; + +import { + OptionsWithUri, +} from 'request'; + +import { + IDataObject, + ILoadOptionsFunctions, + NodeApiError, + NodeOperationError, +} from 'n8n-workflow'; + +import { + Accumulator, + BaserowCredentials, + LoadedResource, +} from './types'; + +/** + * Make a request to Baserow API. + */ +export async function baserowApiRequest( + this: IExecuteFunctions | ILoadOptionsFunctions, + method: string, + endpoint: string, + body: IDataObject = {}, + qs: IDataObject = {}, + jwtToken: string, +) { + const credentials = this.getCredentials('baserowApi') as BaserowCredentials; + + if (credentials === undefined) { + throw new NodeOperationError(this.getNode(), 'No credentials got returned!'); + } + + const options: OptionsWithUri = { + headers: { + Authorization: `JWT ${jwtToken}`, + }, + method, + body, + qs, + uri: `${credentials.host}${endpoint}`, + json: true, + }; + + if (Object.keys(qs).length === 0) { + delete options.qs; + } + + if (Object.keys(body).length === 0) { + delete options.body; + } + + try { + return await this.helpers.request!(options); + } catch (error) { + throw new NodeApiError(this.getNode(), error); + } +} + +/** + * Get all results from a paginated query to Baserow API. + */ +export async function baserowApiRequestAllItems( + this: IExecuteFunctions, + method: string, + endpoint: string, + body: IDataObject, + qs: IDataObject = {}, + jwtToken: string, +): Promise { + const returnData: IDataObject[] = []; + let responseData; + + qs.page = 1; + qs.size = 100; + + const returnAll = this.getNodeParameter('returnAll', 0, false) as boolean; + const limit = this.getNodeParameter('limit', 0, 0) as number; + + do { + responseData = await baserowApiRequest.call(this, method, endpoint, body, qs, jwtToken); + returnData.push(...responseData.results); + + if (!returnAll && returnData.length > limit) { + return returnData.slice(0, limit); + } + + qs.page += 1; + } while (responseData.next !== null); + + return returnData; +} + +/** + * Get a JWT token based on Baserow account username and password. + */ +export async function getJwtToken( + this: IExecuteFunctions | ILoadOptionsFunctions, + { username, password, host }: BaserowCredentials, +) { + const options: OptionsWithUri = { + method: 'POST', + body: { + username, + password, + }, + uri: `${host}/api/user/token-auth/`, + json: true, + }; + + try { + const { token } = await this.helpers.request!(options) as { token: string }; + return token; + } catch (error) { + throw new NodeApiError(this.getNode(), error); + } +} + +export async function getFieldNamesAndIds( + this: IExecuteFunctions, + tableId: string, + jwtToken: string, +) { + const endpoint = `/api/database/fields/table/${tableId}/`; + const response = await baserowApiRequest.call(this, 'GET', endpoint, {}, {}, jwtToken) as LoadedResource[]; + + return { + names: response.map((field) => field.name), + ids: response.map((field) => `field_${field.id}`), + }; +} + +export const toOptions = (items: LoadedResource[]) => + items.map(({ name, id }) => ({ name, value: id })); + +/** + * Responsible for mapping field IDs `field_n` to names and vice versa. + */ +export class TableFieldMapper { + nameToIdMapping: Record = {}; + idToNameMapping: Record = {}; + mapIds = true; + + async getTableFields( + this: IExecuteFunctions, + table: string, + jwtToken: string, + ): Promise { + const endpoint = `/api/database/fields/table/${table}/`; + return await baserowApiRequest.call(this, 'GET', endpoint, {}, {}, jwtToken); + } + + createMappings(tableFields: LoadedResource[]) { + this.nameToIdMapping = this.createNameToIdMapping(tableFields); + this.idToNameMapping = this.createIdToNameMapping(tableFields); + } + + private createIdToNameMapping(responseData: LoadedResource[]) { + return responseData.reduce((acc, cur) => { + acc[`field_${cur.id}`] = cur.name; + return acc; + }, {}); + } + + private createNameToIdMapping(responseData: LoadedResource[]) { + return responseData.reduce((acc, cur) => { + acc[cur.name] = `field_${cur.id}`; + return acc; + }, {}); + } + + setField(field: string) { + return this.mapIds ? field : this.nameToIdMapping[field] ?? field; + } + + idsToNames(obj: Record) { + Object.entries(obj).forEach(([key, value]) => { + if (this.idToNameMapping[key] !== undefined) { + delete obj[key]; + obj[this.idToNameMapping[key]] = value; + } + }); + } + + namesToIds(obj: Record) { + Object.entries(obj).forEach(([key, value]) => { + if (this.nameToIdMapping[key] !== undefined) { + delete obj[key]; + obj[this.nameToIdMapping[key]] = value; + } + }); + } +} diff --git a/packages/nodes-base/nodes/Baserow/OperationDescription.ts b/packages/nodes-base/nodes/Baserow/OperationDescription.ts new file mode 100644 index 0000000000..76d20e0dab --- /dev/null +++ b/packages/nodes-base/nodes/Baserow/OperationDescription.ts @@ -0,0 +1,455 @@ +import { + INodeProperties, +} from 'n8n-workflow'; + +export const operationFields = [ + // ---------------------------------- + // shared + // ---------------------------------- + { + displayName: 'Database', + name: 'databaseId', + type: 'options', + default: '', + required: true, + description: 'Database to operate on', + typeOptions: { + loadOptionsMethod: 'getDatabaseIds', + }, + }, + { + displayName: 'Table', + name: 'tableId', + type: 'options', + default: '', + required: true, + description: 'Table to operate on', + typeOptions: { + loadOptionsDependsOn: [ + 'databaseId', + ], + loadOptionsMethod: 'getTableIds', + }, + }, + + // ---------------------------------- + // get + // ---------------------------------- + { + displayName: 'Row ID', + name: 'rowId', + type: 'string', + displayOptions: { + show: { + operation: [ + 'get', + ], + }, + }, + default: '', + required: true, + description: 'ID of the row to return', + }, + + // ---------------------------------- + // update + // ---------------------------------- + { + displayName: 'Row ID', + name: 'rowId', + type: 'string', + displayOptions: { + show: { + operation: [ + 'update', + ], + }, + }, + default: '', + required: true, + description: 'ID of the row to update', + }, + + // ---------------------------------- + // create/update + // ---------------------------------- + { + displayName: 'Data to Send', + name: 'dataToSend', + type: 'options', + options: [ + { + name: 'Auto-map Input Data to Columns', + value: 'autoMapInputData', + description: 'Use when node input properties match destination column names', + }, + { + name: 'Define Below for Each Column', + value: 'defineBelow', + description: 'Set the value for each destination column', + }, + ], + displayOptions: { + show: { + operation: [ + 'create', + 'update', + ], + }, + }, + default: 'defineBelow', + description: 'Whether to insert the input data this node receives in the new row', + }, + { + displayName: 'Inputs to Ignore', + name: 'inputsToIgnore', + type: 'string', + displayOptions: { + show: { + operation: [ + 'create', + 'update', + ], + dataToSend: [ + 'autoMapInputData', + ], + }, + }, + default: '', + required: false, + description: 'List of input properties to avoid sending, separated by commas. Leave empty to send all properties.', + placeholder: 'Enter properties...', + }, + { + displayName: 'Fields to Send', + name: 'fieldsUi', + placeholder: 'Add Field', + type: 'fixedCollection', + typeOptions: { + multipleValueButtonText: 'Add Field to Send', + multipleValues: true, + }, + displayOptions: { + show: { + operation: [ + 'create', + 'update', + ], + dataToSend: [ + 'defineBelow', + ], + }, + }, + default: {}, + options: [ + { + displayName: 'Field', + name: 'fieldValues', + values: [ + { + displayName: 'Field ID', + name: 'fieldId', + type: 'options', + typeOptions: { + loadOptionsDependsOn: [ + 'tableId', + ], + loadOptionsMethod: 'getTableFields', + }, + default: '', + }, + { + displayName: 'Field Value', + name: 'fieldValue', + type: 'string', + default: '', + }, + ], + }, + ], + }, + + // ---------------------------------- + // delete + // ---------------------------------- + { + displayName: 'Row ID', + name: 'rowId', + type: 'string', + displayOptions: { + show: { + operation: [ + 'delete', + ], + }, + }, + default: '', + required: true, + description: 'ID of the row to delete', + }, + + // ---------------------------------- + // getAll + // ---------------------------------- + { + displayName: 'Return All', + name: 'returnAll', + type: 'boolean', + default: false, + description: 'Whether to return all results or only up to a given limit', + displayOptions: { + show: { + operation: [ + 'getAll', + ], + }, + }, + }, + { + displayName: 'Limit', + name: 'limit', + type: 'number', + default: 50, + description: 'How many results to return', + typeOptions: { + minValue: 1, + maxValue: 100, + }, + displayOptions: { + show: { + operation: [ + 'getAll', + ], + returnAll: [ + false, + ], + }, + }, + }, + { + displayName: 'Options', + name: 'additionalOptions', + type: 'collection', + placeholder: 'Add Option', + default: {}, + displayOptions: { + show: { + operation: [ + 'getAll', + ], + }, + }, + options: [ + { + displayName: 'Filters', + name: 'filters', + placeholder: 'Add Filter', + description: 'Filter rows based on comparison operators', + type: 'fixedCollection', + typeOptions: { + multipleValues: true, + }, + default: {}, + options: [ + { + name: 'fields', + displayName: 'Field', + values: [ + { + displayName: 'Field', + name: 'field', + type: 'options', + default: '', + description: 'Field to compare', + typeOptions: { + loadOptionsDependsOn: [ + 'tableId', + ], + loadOptionsMethod: 'getTableFields', + }, + }, + { + displayName: 'Filter', + name: 'operator', + description: 'Operator to compare field and value with', + type: 'options', + options: [ + { + name: 'Equal', + value: 'equal', + description: 'Field is equal to value', + }, + { + name: 'Not Equal', + value: 'not_equal', + description: 'Field is not equal to value', + }, + { + name: 'Date Equal', + value: 'date_equal', + description: 'Field is date. Format: \'YYYY-MM-DD\'', + }, + { + name: 'Date Not Equal', + value: 'date_not_equal', + description: 'Field is not date. Format: \'YYYY-MM-DD\'', + }, + { + name: 'Date Equals Today', + value: 'date_equals_today', + description: 'Field is today. Format: string', + }, + { + name: 'Date Equals Month', + value: 'date_equals_month', + description: 'Field in this month. Format: string', + }, + { + name: 'Date Equals Year', + value: 'date_equals_year', + description: 'Field in this year. Format: string', + }, + { + name: 'Contains', + value: 'contains', + description: 'Field contains value', + }, + { + name: 'File Name Contains', + value: 'filename_contains', + description: 'Field filename contains value', + }, + { + name: 'Contains Not', + value: 'contains_not', + description: 'Field does not contain value', + }, + { + name: 'Higher Than', + value: 'higher_than', + description: 'Field is higher than value', + }, + { + name: 'Lower Than', + value: 'lower_than', + description: 'Field is lower than value', + }, + { + name: 'Single Select Equal', + value: 'single_select_equal', + description: 'Field selected option is value', + }, + { + name: 'Single Select Not Equal', + value: 'single_select_not_equal', + description: 'Field selected option is not value', + }, + { + name: 'Is True', + value: 'boolean', + description: 'Boolean field is true', + }, + { + name: 'Is Empty', + value: 'empty', + description: 'Field is empty', + }, + { + name: 'Not Empty', + value: 'not_empty', + description: 'Field is not empty', + }, + ], + default: 'equal', + }, + { + displayName: 'Value', + name: 'value', + type: 'string', + default: '', + description: 'Value to compare to', + }, + ], + }, + ], + }, + { + displayName: 'Filter Type', + name: 'filterType', + type: 'options', + options: [ + { + name: 'AND', + value: 'AND', + description: 'Indicates that the rows must match all the provided filters', + }, + { + name: 'OR', + value: 'OR', + description: 'Indicates that the rows only have to match one of the filters', + }, + ], + default: 'AND', + description: 'This works only if two or more filters are provided. Defaults to AND', + }, + { + displayName: 'Search Term', + name: 'search', + type: 'string', + default: '', + description: 'Text to match (can be in any column)', + }, + { + displayName: 'Sorting', + name: 'order', + placeholder: 'Add Sort Order', + description: 'Set the sort order of the result rows', + type: 'fixedCollection', + typeOptions: { + multipleValues: true, + }, + default: {}, + options: [ + { + name: 'Fields', + displayName: 'Field', + values: [ + { + displayName: 'Field Name', + name: 'field', + type: 'options', + default: '', + description: 'Field name to sort by', + typeOptions: { + loadOptionsDependsOn: [ + 'tableId', + ], + loadOptionsMethod: 'getTableFields', + }, + }, + { + displayName: 'Direction', + name: 'direction', + type: 'options', + options: [ + { + name: 'ASC', + value: '', + description: 'Sort in ascending order', + }, + { + name: 'DESC', + value: '-', + description: 'Sort in descending order', + }, + ], + default: '', + description: 'Sort direction, either ascending or descending', + }, + ], + }, + ], + }, + ], + }, +] as INodeProperties[]; diff --git a/packages/nodes-base/nodes/Baserow/baserow.svg b/packages/nodes-base/nodes/Baserow/baserow.svg new file mode 100644 index 0000000000..a184ad4b99 --- /dev/null +++ b/packages/nodes-base/nodes/Baserow/baserow.svg @@ -0,0 +1,143 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + +