diff --git a/packages/cli/package.json b/packages/cli/package.json index 4b0d0526d7..f4924adbe7 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "0.146.0", + "version": "0.147.1", "description": "n8n Workflow Automation Tool", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -112,7 +112,7 @@ "mysql2": "~2.3.0", "n8n-core": "~0.91.0", "n8n-editor-ui": "~0.114.0", - "n8n-nodes-base": "~0.143.0", + "n8n-nodes-base": "~0.144.1", "n8n-workflow": "~0.74.0", "oauth-1.0a": "^2.2.6", "open": "^7.0.0", diff --git a/packages/cli/src/databases/MigrationHelpers.ts b/packages/cli/src/databases/MigrationHelpers.ts new file mode 100644 index 0000000000..7db121bd85 --- /dev/null +++ b/packages/cli/src/databases/MigrationHelpers.ts @@ -0,0 +1,39 @@ +import { QueryRunner } from 'typeorm'; + +export class MigrationHelpers { + queryRunner: QueryRunner; + + constructor(queryRunner: QueryRunner) { + this.queryRunner = queryRunner; + } + + // runs an operation sequential on chunks of a query that returns a potentially large Array. + /* eslint-disable no-await-in-loop */ + async runChunked( + query: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + operation: (results: any[]) => Promise, + limit = 100, + ): Promise { + let offset = 0; + let chunkedQuery: string; + let chunkedQueryResults: unknown[]; + + do { + chunkedQuery = this.chunkQuery(query, limit, offset); + chunkedQueryResults = (await this.queryRunner.query(chunkedQuery)) as unknown[]; + // pass a copy to prevent errors from mutation + await operation([...chunkedQueryResults]); + offset += limit; + } while (chunkedQueryResults.length === limit); + } + /* eslint-enable no-await-in-loop */ + + private chunkQuery(query: string, limit: number, offset = 0): string { + return ` + ${query} + LIMIT ${limit} + OFFSET ${offset} + `; + } +} diff --git a/packages/cli/src/databases/mysqldb/migrations/1630451444017-UpdateWorkflowCredentials.ts b/packages/cli/src/databases/mysqldb/migrations/1630451444017-UpdateWorkflowCredentials.ts index 0012ee0aa1..0061052c2a 100644 --- a/packages/cli/src/databases/mysqldb/migrations/1630451444017-UpdateWorkflowCredentials.ts +++ b/packages/cli/src/databases/mysqldb/migrations/1630451444017-UpdateWorkflowCredentials.ts @@ -1,5 +1,6 @@ import { MigrationInterface, QueryRunner } from 'typeorm'; import config = require('../../../../config'); +import { MigrationHelpers } from '../../MigrationHelpers'; // replacing the credentials in workflows and execution // `nodeType: name` changes to `nodeType: { id, name }` @@ -8,58 +9,100 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac name = 'UpdateWorkflowCredentials1630451444017'; public async up(queryRunner: QueryRunner): Promise { + console.log('Start migration', this.name); + console.time(this.name); const tablePrefix = config.get('database.tablePrefix'); + const helpers = new MigrationHelpers(queryRunner); const credentialsEntities = await queryRunner.query(` SELECT id, name, type FROM ${tablePrefix}credentials_entity `); - const workflows = await queryRunner.query(` + const workflowsQuery = ` SELECT id, nodes FROM ${tablePrefix}workflow_entity - `); + `; // @ts-ignore - workflows.forEach(async (workflow) => { - const nodes = workflow.nodes; - let credentialsUpdated = false; - // @ts-ignore - nodes.forEach((node) => { - if (node.credentials) { - const allNodeCredentials = Object.entries(node.credentials); - for (const [type, name] of allNodeCredentials) { - if (typeof name === 'string') { - // @ts-ignore - const matchingCredentials = credentialsEntities.find( - // @ts-ignore - (credentials) => credentials.name === name && credentials.type === type, - ); - node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name }; - credentialsUpdated = true; + await helpers.runChunked(workflowsQuery, (workflows) => { + workflows.forEach(async (workflow) => { + const nodes = workflow.nodes; + let credentialsUpdated = false; + // @ts-ignore + nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, name] of allNodeCredentials) { + if (typeof name === 'string') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.name === name && credentials.type === type, + ); + node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name }; + credentialsUpdated = true; + } } } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE ${tablePrefix}workflow_entity + SET nodes = :nodes + WHERE id = '${workflow.id}' + `, + { nodes: JSON.stringify(nodes) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); } }); - if (credentialsUpdated) { - const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( - ` - UPDATE ${tablePrefix}workflow_entity - SET nodes = :nodes - WHERE id = '${workflow.id}' - `, - { nodes: JSON.stringify(nodes) }, - {}, - ); - - await queryRunner.query(updateQuery, updateParams); - } }); - const waitingExecutions = await queryRunner.query(` + const waitingExecutionsQuery = ` SELECT id, workflowData FROM ${tablePrefix}execution_entity WHERE waitTill IS NOT NULL AND finished = 0 - `); + `; + // @ts-ignore + await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => { + waitingExecutions.forEach(async (execution) => { + const data = execution.workflowData; + let credentialsUpdated = false; + // @ts-ignore + data.nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, name] of allNodeCredentials) { + if (typeof name === 'string') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.name === name && credentials.type === type, + ); + node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name }; + credentialsUpdated = true; + } + } + } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE ${tablePrefix}execution_entity + SET workflowData = :data + WHERE id = '${execution.id}' + `, + { data: JSON.stringify(data) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); + } + }); + }); const retryableExecutions = await queryRunner.query(` SELECT id, workflowData @@ -68,8 +111,8 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac ORDER BY startedAt DESC LIMIT 200 `); - - [...waitingExecutions, ...retryableExecutions].forEach(async (execution) => { + // @ts-ignore + retryableExecutions.forEach(async (execution) => { const data = execution.workflowData; let credentialsUpdated = false; // @ts-ignore @@ -78,7 +121,6 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac const allNodeCredentials = Object.entries(node.credentials); for (const [type, name] of allNodeCredentials) { if (typeof name === 'string') { - // @ts-ignore const matchingCredentials = credentialsEntities.find( // @ts-ignore (credentials) => credentials.name === name && credentials.type === type, @@ -92,77 +134,124 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( ` - UPDATE ${tablePrefix}execution_entity - SET workflowData = :data - WHERE id = '${execution.id}' - `, + UPDATE ${tablePrefix}execution_entity + SET workflowData = :data + WHERE id = '${execution.id}' + `, { data: JSON.stringify(data) }, {}, ); - await queryRunner.query(updateQuery, updateParams); + queryRunner.query(updateQuery, updateParams); } }); + console.timeEnd(this.name); } public async down(queryRunner: QueryRunner): Promise { const tablePrefix = config.get('database.tablePrefix'); + const helpers = new MigrationHelpers(queryRunner); const credentialsEntities = await queryRunner.query(` SELECT id, name, type FROM ${tablePrefix}credentials_entity `); - const workflows = await queryRunner.query(` + const workflowsQuery = ` SELECT id, nodes FROM ${tablePrefix}workflow_entity - `); + `; // @ts-ignore - workflows.forEach(async (workflow) => { - const nodes = workflow.nodes; - let credentialsUpdated = false; - // @ts-ignore - nodes.forEach((node) => { - if (node.credentials) { - const allNodeCredentials = Object.entries(node.credentials); - for (const [type, creds] of allNodeCredentials) { - if (typeof creds === 'object') { - // @ts-ignore - const matchingCredentials = credentialsEntities.find( - // @ts-ignore - (credentials) => credentials.id === creds.id && credentials.type === type, - ); - if (matchingCredentials) { - node.credentials[type] = matchingCredentials.name; - } else { - // @ts-ignore - node.credentials[type] = creds.name; + await helpers.runChunked(workflowsQuery, (workflows) => { + workflows.forEach(async (workflow) => { + const nodes = workflow.nodes; + let credentialsUpdated = false; + // @ts-ignore + nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, creds] of allNodeCredentials) { + if (typeof creds === 'object') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.id === creds.id && credentials.type === type, + ); + if (matchingCredentials) { + node.credentials[type] = matchingCredentials.name; + } else { + // @ts-ignore + node.credentials[type] = creds.name; + } + credentialsUpdated = true; } - credentialsUpdated = true; } } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE ${tablePrefix}workflow_entity + SET nodes = :nodes + WHERE id = '${workflow.id}' + `, + { nodes: JSON.stringify(nodes) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); } }); - if (credentialsUpdated) { - const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( - ` - UPDATE ${tablePrefix}workflow_entity - SET nodes = :nodes - WHERE id = '${workflow.id}' - `, - { nodes: JSON.stringify(nodes) }, - {}, - ); - - await queryRunner.query(updateQuery, updateParams); - } }); - const waitingExecutions = await queryRunner.query(` + const waitingExecutionsQuery = ` SELECT id, workflowData FROM ${tablePrefix}execution_entity WHERE waitTill IS NOT NULL AND finished = 0 - `); + `; + // @ts-ignore + await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => { + waitingExecutions.forEach(async (execution) => { + const data = execution.workflowData; + let credentialsUpdated = false; + // @ts-ignore + data.nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, creds] of allNodeCredentials) { + if (typeof creds === 'object') { + // @ts-ignore + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.id === creds.id && credentials.type === type, + ); + if (matchingCredentials) { + node.credentials[type] = matchingCredentials.name; + } else { + // @ts-ignore + node.credentials[type] = creds.name; + } + credentialsUpdated = true; + } + } + } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE ${tablePrefix}execution_entity + SET workflowData = :data + WHERE id = '${execution.id}' + `, + { data: JSON.stringify(data) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); + } + }); + }); const retryableExecutions = await queryRunner.query(` SELECT id, workflowData @@ -171,8 +260,8 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac ORDER BY startedAt DESC LIMIT 200 `); - - [...waitingExecutions, ...retryableExecutions].forEach(async (execution) => { + // @ts-ignore + retryableExecutions.forEach(async (execution) => { const data = execution.workflowData; let credentialsUpdated = false; // @ts-ignore @@ -200,15 +289,15 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( ` - UPDATE ${tablePrefix}execution_entity - SET workflowData = :data - WHERE id = '${execution.id}' - `, + UPDATE ${tablePrefix}execution_entity + SET workflowData = :data + WHERE id = '${execution.id}' + `, { data: JSON.stringify(data) }, {}, ); - await queryRunner.query(updateQuery, updateParams); + queryRunner.query(updateQuery, updateParams); } }); } diff --git a/packages/cli/src/databases/postgresdb/migrations/1630419189837-UpdateWorkflowCredentials.ts b/packages/cli/src/databases/postgresdb/migrations/1630419189837-UpdateWorkflowCredentials.ts index 357d7c2974..ad3e44f0e6 100644 --- a/packages/cli/src/databases/postgresdb/migrations/1630419189837-UpdateWorkflowCredentials.ts +++ b/packages/cli/src/databases/postgresdb/migrations/1630419189837-UpdateWorkflowCredentials.ts @@ -1,5 +1,6 @@ import { MigrationInterface, QueryRunner } from 'typeorm'; import config = require('../../../../config'); +import { MigrationHelpers } from '../../MigrationHelpers'; // replacing the credentials in workflows and execution // `nodeType: name` changes to `nodeType: { id, name }` @@ -8,62 +9,104 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac name = 'UpdateWorkflowCredentials1630419189837'; public async up(queryRunner: QueryRunner): Promise { + console.log('Start migration', this.name); + console.time(this.name); let tablePrefix = config.get('database.tablePrefix'); const schema = config.get('database.postgresdb.schema'); if (schema) { tablePrefix = schema + '.' + tablePrefix; } + const helpers = new MigrationHelpers(queryRunner); const credentialsEntities = await queryRunner.query(` SELECT id, name, type FROM ${tablePrefix}credentials_entity `); - const workflows = await queryRunner.query(` + const workflowsQuery = ` SELECT id, nodes FROM ${tablePrefix}workflow_entity - `); + `; // @ts-ignore - workflows.forEach(async (workflow) => { - const nodes = workflow.nodes; - let credentialsUpdated = false; - // @ts-ignore - nodes.forEach((node) => { - if (node.credentials) { - const allNodeCredentials = Object.entries(node.credentials); - for (const [type, name] of allNodeCredentials) { - if (typeof name === 'string') { - // @ts-ignore - const matchingCredentials = credentialsEntities.find( - // @ts-ignore - (credentials) => credentials.name === name && credentials.type === type, - ); - node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name }; - credentialsUpdated = true; + await helpers.runChunked(workflowsQuery, (workflows) => { + workflows.forEach(async (workflow) => { + const nodes = workflow.nodes; + let credentialsUpdated = false; + // @ts-ignore + nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, name] of allNodeCredentials) { + if (typeof name === 'string') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.name === name && credentials.type === type, + ); + node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name }; + credentialsUpdated = true; + } } } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE ${tablePrefix}workflow_entity + SET nodes = :nodes + WHERE id = '${workflow.id}' + `, + { nodes: JSON.stringify(nodes) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); } }); - if (credentialsUpdated) { - const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( - ` - UPDATE ${tablePrefix}workflow_entity - SET nodes = :nodes - WHERE id = '${workflow.id}' - `, - { nodes: JSON.stringify(nodes) }, - {}, - ); - - await queryRunner.query(updateQuery, updateParams); - } }); - const waitingExecutions = await queryRunner.query(` + const waitingExecutionsQuery = ` SELECT id, "workflowData" FROM ${tablePrefix}execution_entity WHERE "waitTill" IS NOT NULL AND finished = FALSE - `); + `; + // @ts-ignore + await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => { + waitingExecutions.forEach(async (execution) => { + const data = execution.workflowData; + let credentialsUpdated = false; + // @ts-ignore + data.nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, name] of allNodeCredentials) { + if (typeof name === 'string') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.name === name && credentials.type === type, + ); + node.credentials[type] = { id: matchingCredentials?.id.toString() || null, name }; + credentialsUpdated = true; + } + } + } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE ${tablePrefix}execution_entity + SET "workflowData" = :data + WHERE id = '${execution.id}' + `, + { data: JSON.stringify(data) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); + } + }); + }); const retryableExecutions = await queryRunner.query(` SELECT id, "workflowData" @@ -73,7 +116,8 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac LIMIT 200 `); - [...waitingExecutions, ...retryableExecutions].forEach(async (execution) => { + // @ts-ignore + retryableExecutions.forEach(async (execution) => { const data = execution.workflowData; let credentialsUpdated = false; // @ts-ignore @@ -104,9 +148,10 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac {}, ); - await queryRunner.query(updateQuery, updateParams); + queryRunner.query(updateQuery, updateParams); } }); + console.timeEnd(this.name); } public async down(queryRunner: QueryRunner): Promise { @@ -115,62 +160,109 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac if (schema) { tablePrefix = schema + '.' + tablePrefix; } + const helpers = new MigrationHelpers(queryRunner); const credentialsEntities = await queryRunner.query(` SELECT id, name, type FROM ${tablePrefix}credentials_entity `); - const workflows = await queryRunner.query(` + const workflowsQuery = ` SELECT id, nodes FROM ${tablePrefix}workflow_entity - `); + `; // @ts-ignore - workflows.forEach(async (workflow) => { - const nodes = workflow.nodes; - let credentialsUpdated = false; - // @ts-ignore - nodes.forEach((node) => { - if (node.credentials) { - const allNodeCredentials = Object.entries(node.credentials); - for (const [type, creds] of allNodeCredentials) { - if (typeof creds === 'object') { - // @ts-ignore - const matchingCredentials = credentialsEntities.find( + await helpers.runChunked(workflowsQuery, (workflows) => { + workflows.forEach(async (workflow) => { + const nodes = workflow.nodes; + let credentialsUpdated = false; + // @ts-ignore + nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, creds] of allNodeCredentials) { + if (typeof creds === 'object') { // @ts-ignore - (credentials) => credentials.id === creds.id && credentials.type === type, - ); - if (matchingCredentials) { - node.credentials[type] = matchingCredentials.name; - } else { - // @ts-ignore - node.credentials[type] = creds.name; + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.id === creds.id && credentials.type === type, + ); + if (matchingCredentials) { + node.credentials[type] = matchingCredentials.name; + } else { + // @ts-ignore + node.credentials[type] = creds.name; + } + credentialsUpdated = true; } - credentialsUpdated = true; } } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE ${tablePrefix}workflow_entity + SET nodes = :nodes + WHERE id = '${workflow.id}' + `, + { nodes: JSON.stringify(nodes) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); } }); - if (credentialsUpdated) { - const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( - ` - UPDATE ${tablePrefix}workflow_entity - SET nodes = :nodes - WHERE id = '${workflow.id}' - `, - { nodes: JSON.stringify(nodes) }, - {}, - ); - - await queryRunner.query(updateQuery, updateParams); - } }); - const waitingExecutions = await queryRunner.query(` + const waitingExecutionsQuery = ` SELECT id, "workflowData" FROM ${tablePrefix}execution_entity WHERE "waitTill" IS NOT NULL AND finished = FALSE - `); + `; + // @ts-ignore + await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => { + waitingExecutions.forEach(async (execution) => { + const data = execution.workflowData; + let credentialsUpdated = false; + // @ts-ignore + data.nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, creds] of allNodeCredentials) { + if (typeof creds === 'object') { + // @ts-ignore + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.id === creds.id && credentials.type === type, + ); + if (matchingCredentials) { + node.credentials[type] = matchingCredentials.name; + } else { + // @ts-ignore + node.credentials[type] = creds.name; + } + credentialsUpdated = true; + } + } + } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE ${tablePrefix}execution_entity + SET "workflowData" = :data + WHERE id = '${execution.id}' + `, + { data: JSON.stringify(data) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); + } + }); + }); const retryableExecutions = await queryRunner.query(` SELECT id, "workflowData" @@ -179,8 +271,8 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac ORDER BY "startedAt" DESC LIMIT 200 `); - - [...waitingExecutions, ...retryableExecutions].forEach(async (execution) => { + // @ts-ignore + retryableExecutions.forEach(async (execution) => { const data = execution.workflowData; let credentialsUpdated = false; // @ts-ignore @@ -208,15 +300,15 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( ` - UPDATE ${tablePrefix}execution_entity - SET "workflowData" = :data - WHERE id = '${execution.id}' - `, + UPDATE ${tablePrefix}execution_entity + SET "workflowData" = :data + WHERE id = '${execution.id}' + `, { data: JSON.stringify(data) }, {}, ); - await queryRunner.query(updateQuery, updateParams); + queryRunner.query(updateQuery, updateParams); } }); } diff --git a/packages/cli/src/databases/sqlite/migrations/1630330987096-UpdateWorkflowCredentials.ts b/packages/cli/src/databases/sqlite/migrations/1630330987096-UpdateWorkflowCredentials.ts index f2a6f0a19a..147e5e49b2 100644 --- a/packages/cli/src/databases/sqlite/migrations/1630330987096-UpdateWorkflowCredentials.ts +++ b/packages/cli/src/databases/sqlite/migrations/1630330987096-UpdateWorkflowCredentials.ts @@ -1,5 +1,6 @@ import { MigrationInterface, QueryRunner } from 'typeorm'; import config = require('../../../../config'); +import { MigrationHelpers } from '../../MigrationHelpers'; // replacing the credentials in workflows and execution // `nodeType: name` changes to `nodeType: { id, name }` @@ -8,58 +9,101 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac name = 'UpdateWorkflowCredentials1630330987096'; public async up(queryRunner: QueryRunner): Promise { + console.log('Start migration', this.name); + console.time(this.name); const tablePrefix = config.get('database.tablePrefix'); + const helpers = new MigrationHelpers(queryRunner); const credentialsEntities = await queryRunner.query(` SELECT id, name, type FROM "${tablePrefix}credentials_entity" `); - const workflows = await queryRunner.query(` + const workflowsQuery = ` SELECT id, nodes FROM "${tablePrefix}workflow_entity" - `); + `; + // @ts-ignore - workflows.forEach(async (workflow) => { - const nodes = JSON.parse(workflow.nodes); - let credentialsUpdated = false; - // @ts-ignore - nodes.forEach((node) => { - if (node.credentials) { - const allNodeCredentials = Object.entries(node.credentials); - for (const [type, name] of allNodeCredentials) { - if (typeof name === 'string') { - // @ts-ignore - const matchingCredentials = credentialsEntities.find( - // @ts-ignore - (credentials) => credentials.name === name && credentials.type === type, - ); - node.credentials[type] = { id: matchingCredentials?.id || null, name }; - credentialsUpdated = true; + await helpers.runChunked(workflowsQuery, (workflows) => { + workflows.forEach(async (workflow) => { + const nodes = JSON.parse(workflow.nodes); + let credentialsUpdated = false; + // @ts-ignore + nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, name] of allNodeCredentials) { + if (typeof name === 'string') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.name === name && credentials.type === type, + ); + node.credentials[type] = { id: matchingCredentials?.id || null, name }; + credentialsUpdated = true; + } } } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE "${tablePrefix}workflow_entity" + SET nodes = :nodes + WHERE id = '${workflow.id}' + `, + { nodes: JSON.stringify(nodes) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); } }); - if (credentialsUpdated) { - const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( - ` - UPDATE "${tablePrefix}workflow_entity" - SET nodes = :nodes - WHERE id = '${workflow.id}' - `, - { nodes: JSON.stringify(nodes) }, - {}, - ); - - await queryRunner.query(updateQuery, updateParams); - } }); - const waitingExecutions = await queryRunner.query(` + const waitingExecutionsQuery = ` SELECT id, "workflowData" FROM "${tablePrefix}execution_entity" WHERE "waitTill" IS NOT NULL AND finished = 0 - `); + `; + // @ts-ignore + await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => { + waitingExecutions.forEach(async (execution) => { + const data = JSON.parse(execution.workflowData); + let credentialsUpdated = false; + // @ts-ignore + data.nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, name] of allNodeCredentials) { + if (typeof name === 'string') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.name === name && credentials.type === type, + ); + node.credentials[type] = { id: matchingCredentials?.id || null, name }; + credentialsUpdated = true; + } + } + } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE "${tablePrefix}execution_entity" + SET "workflowData" = :data + WHERE id = '${execution.id}' + `, + { data: JSON.stringify(data) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); + } + }); + }); const retryableExecutions = await queryRunner.query(` SELECT id, "workflowData" @@ -68,8 +112,8 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac ORDER BY "startedAt" DESC LIMIT 200 `); - - [...waitingExecutions, ...retryableExecutions].forEach(async (execution) => { + // @ts-ignore + retryableExecutions.forEach(async (execution) => { const data = JSON.parse(execution.workflowData); let credentialsUpdated = false; // @ts-ignore @@ -78,7 +122,6 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac const allNodeCredentials = Object.entries(node.credentials); for (const [type, name] of allNodeCredentials) { if (typeof name === 'string') { - // @ts-ignore const matchingCredentials = credentialsEntities.find( // @ts-ignore (credentials) => credentials.name === name && credentials.type === type, @@ -92,77 +135,127 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( ` - UPDATE "${tablePrefix}execution_entity" - SET "workflowData" = :data - WHERE id = '${execution.id}' - `, + UPDATE "${tablePrefix}execution_entity" + SET "workflowData" = :data + WHERE id = '${execution.id}' + `, { data: JSON.stringify(data) }, {}, ); - await queryRunner.query(updateQuery, updateParams); + queryRunner.query(updateQuery, updateParams); } }); + console.timeEnd(this.name); } public async down(queryRunner: QueryRunner): Promise { const tablePrefix = config.get('database.tablePrefix'); + const helpers = new MigrationHelpers(queryRunner); const credentialsEntities = await queryRunner.query(` SELECT id, name, type FROM "${tablePrefix}credentials_entity" `); - const workflows = await queryRunner.query(` + const workflowsQuery = ` SELECT id, nodes FROM "${tablePrefix}workflow_entity" - `); + `; + // @ts-ignore - workflows.forEach(async (workflow) => { - const nodes = JSON.parse(workflow.nodes); - let credentialsUpdated = false; + await helpers.runChunked(workflowsQuery, (workflows) => { // @ts-ignore - nodes.forEach((node) => { - if (node.credentials) { - const allNodeCredentials = Object.entries(node.credentials); - for (const [type, creds] of allNodeCredentials) { - if (typeof creds === 'object') { - // @ts-ignore - const matchingCredentials = credentialsEntities.find( - // @ts-ignore - (credentials) => credentials.id === creds.id && credentials.type === type, - ); - if (matchingCredentials) { - node.credentials[type] = matchingCredentials.name; - } else { - // @ts-ignore - node.credentials[type] = creds.name; + workflows.forEach(async (workflow) => { + const nodes = JSON.parse(workflow.nodes); + let credentialsUpdated = false; + // @ts-ignore + nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, creds] of allNodeCredentials) { + if (typeof creds === 'object') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.id === creds.id && credentials.type === type, + ); + if (matchingCredentials) { + node.credentials[type] = matchingCredentials.name; + } else { + // @ts-ignore + node.credentials[type] = creds.name; + } + credentialsUpdated = true; } - credentialsUpdated = true; } } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE "${tablePrefix}workflow_entity" + SET nodes = :nodes + WHERE id = '${workflow.id}' + `, + { nodes: JSON.stringify(nodes) }, + {}, + ); + + queryRunner.query(updateQuery, updateParams); } }); - if (credentialsUpdated) { - const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( - ` - UPDATE "${tablePrefix}workflow_entity" - SET nodes = :nodes - WHERE id = '${workflow.id}' - `, - { nodes: JSON.stringify(nodes) }, - {}, - ); - - await queryRunner.query(updateQuery, updateParams); - } }); - const waitingExecutions = await queryRunner.query(` + const waitingExecutionsQuery = ` SELECT id, "workflowData" FROM "${tablePrefix}execution_entity" WHERE "waitTill" IS NOT NULL AND finished = 0 - `); + `; + + // @ts-ignore + await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => { + // @ts-ignore + waitingExecutions.forEach(async (execution) => { + const data = JSON.parse(execution.workflowData); + let credentialsUpdated = false; + // @ts-ignore + data.nodes.forEach((node) => { + if (node.credentials) { + const allNodeCredentials = Object.entries(node.credentials); + for (const [type, creds] of allNodeCredentials) { + if (typeof creds === 'object') { + const matchingCredentials = credentialsEntities.find( + // @ts-ignore + (credentials) => credentials.id === creds.id && credentials.type === type, + ); + if (matchingCredentials) { + node.credentials[type] = matchingCredentials.name; + } else { + // @ts-ignore + node.credentials[type] = creds.name; + } + credentialsUpdated = true; + } + } + } + }); + if (credentialsUpdated) { + const [updateQuery, updateParams] = + queryRunner.connection.driver.escapeQueryWithParameters( + ` + UPDATE "${tablePrefix}execution_entity" + SET "workflowData" = :data + WHERE id = '${execution.id}' + `, + { data: JSON.stringify(data) }, + {}, + ); + + await queryRunner.query(updateQuery, updateParams); + } + }); + }); const retryableExecutions = await queryRunner.query(` SELECT id, "workflowData" @@ -172,7 +265,8 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac LIMIT 200 `); - [...waitingExecutions, ...retryableExecutions].forEach(async (execution) => { + // @ts-ignore + retryableExecutions.forEach(async (execution) => { const data = JSON.parse(execution.workflowData); let credentialsUpdated = false; // @ts-ignore @@ -181,7 +275,6 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac const allNodeCredentials = Object.entries(node.credentials); for (const [type, creds] of allNodeCredentials) { if (typeof creds === 'object') { - // @ts-ignore const matchingCredentials = credentialsEntities.find( // @ts-ignore (credentials) => credentials.id === creds.id && credentials.type === type, @@ -200,15 +293,15 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters( ` - UPDATE "${tablePrefix}execution_entity" - SET "workflowData" = :data - WHERE id = '${execution.id}' - `, + UPDATE "${tablePrefix}execution_entity" + SET "workflowData" = :data + WHERE id = '${execution.id}' + `, { data: JSON.stringify(data) }, {}, ); - await queryRunner.query(updateQuery, updateParams); + queryRunner.query(updateQuery, updateParams); } }); } diff --git a/packages/cli/src/telemetry/index.ts b/packages/cli/src/telemetry/index.ts index fb38ed257d..d350c6d8bd 100644 --- a/packages/cli/src/telemetry/index.ts +++ b/packages/cli/src/telemetry/index.ts @@ -119,6 +119,7 @@ export class Telemetry { this.client.identify( { userId: this.instanceId, + anonymousId: '000000000000', traits: { ...traits, instanceId: this.instanceId, @@ -138,6 +139,7 @@ export class Telemetry { this.client.track( { userId: this.instanceId, + anonymousId: '000000000000', event: eventName, properties, }, diff --git a/packages/nodes-base/credentials/SlackOAuth2Api.credentials.ts b/packages/nodes-base/credentials/SlackOAuth2Api.credentials.ts index 7c44beeb63..e4aef0c824 100644 --- a/packages/nodes-base/credentials/SlackOAuth2Api.credentials.ts +++ b/packages/nodes-base/credentials/SlackOAuth2Api.credentials.ts @@ -15,6 +15,8 @@ const userScopes = [ 'reactions:write', 'stars:read', 'stars:write', + 'usergroups:write', + 'usergroups:read', 'users.profile:read', 'users.profile:write', ]; diff --git a/packages/nodes-base/nodes/Lemlist/descriptions/LeadDescription.ts b/packages/nodes-base/nodes/Lemlist/descriptions/LeadDescription.ts index 9c4fe039e8..d9cb1e5317 100644 --- a/packages/nodes-base/nodes/Lemlist/descriptions/LeadDescription.ts +++ b/packages/nodes-base/nodes/Lemlist/descriptions/LeadDescription.ts @@ -124,6 +124,34 @@ export const leadFields = [ default: '', description: 'Last name of the lead to create.', }, + { + displayName: 'Icebreaker', + name: 'icebreaker', + type: 'string', + default: '', + description: 'Icebreaker of the lead to create.', + }, + { + displayName: 'Phone', + name: 'phone', + type: 'string', + default: '', + description: 'Phone number of the lead to create.', + }, + { + displayName: 'Picture URL', + name: 'picture', + type: 'string', + default: '', + description: 'Picture url of the lead to create.', + }, + { + displayName: 'LinkedIn URL', + name: 'linkedinUrl', + type: 'string', + default: '', + description: 'LinkedIn url of the lead to create.', + }, ], }, diff --git a/packages/nodes-base/nodes/LocalFileTrigger.node.ts b/packages/nodes-base/nodes/LocalFileTrigger.node.ts new file mode 100644 index 0000000000..812a3695e6 --- /dev/null +++ b/packages/nodes-base/nodes/LocalFileTrigger.node.ts @@ -0,0 +1,219 @@ +import { ITriggerFunctions } from 'n8n-core'; +import { + IDataObject, + INodeType, + INodeTypeDescription, + ITriggerResponse, +} from 'n8n-workflow'; + +import { watch } from 'chokidar'; + + +export class LocalFileTrigger implements INodeType { + description: INodeTypeDescription = { + displayName: 'Local File Trigger', + name: 'localFileTrigger', + icon: 'fa:folder-open', + group: ['trigger'], + version: 1, + subtitle: '=Path: {{$parameter["path"]}}', + description: 'Triggers a workflow on file system changes', + defaults: { + name: 'Local File Trigger', + color: '#404040', + }, + inputs: [], + outputs: ['main'], + properties: [ + { + displayName: 'Trigger on', + name: 'triggerOn', + type: 'options', + options: [ + { + name: 'Changes to a Specific File', + value: 'file', + }, + { + name: 'Changes Involving a Specific Folder', + value: 'folder', + }, + ], + required: true, + default: '', + }, + { + displayName: 'File to Watch', + name: 'path', + type: 'string', + displayOptions: { + show: { + triggerOn: [ + 'file', + ], + }, + }, + default: '', + placeholder: '/data/invoices/1.pdf', + }, + { + displayName: 'Folder to Watch', + name: 'path', + type: 'string', + displayOptions: { + show: { + triggerOn: [ + 'folder', + ], + }, + }, + default: '', + placeholder: '/data/invoices', + }, + { + displayName: 'Watch for', + name: 'events', + type: 'multiOptions', + displayOptions: { + show: { + triggerOn: [ + 'folder', + ], + }, + }, + options: [ + { + name: 'File Added', + value: 'add', + description: 'Triggers whenever a new file was added', + }, + { + name: 'File Changed', + value: 'change', + description: 'Triggers whenever a file was changed', + }, + { + name: 'File Deleted', + value: 'unlink', + description: 'Triggers whenever a file was deleted', + }, + { + name: 'Folder Added', + value: 'addDir', + description: 'Triggers whenever a new folder was added', + }, + { + name: 'Folder Deleted', + value: 'unlinkDir', + description: 'Triggers whenever a folder was deleted', + }, + ], + required: true, + default: [], + description: 'The events to listen to', + }, + + { + displayName: 'Options', + name: 'options', + type: 'collection', + placeholder: 'Add Option', + default: {}, + options: [ + { + displayName: 'Include Linked Files/Folders', + name: 'followSymlinks', + type: 'boolean', + default: true, + description: 'When activated, linked files/folders will also be watched (this includes symlinks, aliases on MacOS and shortcuts on Windows). Otherwise only the links themselves will be monitored).', + }, + { + displayName: 'Ignore', + name: 'ignored', + type: 'string', + default: '', + placeholder: '**/*.txt', + description: 'Files or paths to ignore. The whole path is tested, not just the filename. Supports Anymatch- syntax.', + }, + { + displayName: 'Max Folder Depth', + name: 'depth', + type: 'options', + options: [ + { + name: 'Unlimited', + value: -1, + }, + { + name: '5 Levels Down', + value: 5, + }, + { + name: '4 Levels Down', + value: 4, + }, + { + name: '3 Levels Down', + value: 3, + }, + { + name: '2 Levels Down', + value: 2, + }, + { + name: '1 Levels Down', + value: 1, + }, + { + name: 'Top Folder Only', + value: 0, + }, + ], + default: -1, + description: 'How deep into the folder structure to watch for changes', + }, + ], + }, + + ], + }; + + + async trigger(this: ITriggerFunctions): Promise { + const triggerOn = this.getNodeParameter('triggerOn') as string; + const path = this.getNodeParameter('path') as string; + const options = this.getNodeParameter('options', {}) as IDataObject; + + let events: string[]; + if (triggerOn === 'file') { + events = [ 'change' ]; + } else { + events = this.getNodeParameter('events', []) as string[]; + } + + const watcher = watch(path, { + ignored: options.ignored, + persistent: true, + ignoreInitial: true, + followSymlinks: options.followSymlinks === undefined ? true : options.followSymlinks as boolean, + depth: [-1, undefined].includes(options.depth as number) ? undefined : options.depth as number, + }); + + const executeTrigger = (event: string, path: string) => { + this.emit([this.helpers.returnJsonArray([{ event,path }])]); + }; + + for (const eventName of events) { + watcher.on(eventName, path => executeTrigger(eventName, path)); + } + + function closeFunction() { + return watcher.close(); + } + + return { + closeFunction, + }; + + } +} diff --git a/packages/nodes-base/nodes/Slack/GenericFunctions.ts b/packages/nodes-base/nodes/Slack/GenericFunctions.ts index 3d92cdab29..e9ee771177 100644 --- a/packages/nodes-base/nodes/Slack/GenericFunctions.ts +++ b/packages/nodes-base/nodes/Slack/GenericFunctions.ts @@ -58,6 +58,12 @@ export async function slackApiRequest(this: IExecuteFunctions | IExecuteSingleFu } if (response.ok === false) { + if (response.error === 'paid_teams_only') { + throw new NodeOperationError(this.getNode(), `Your current Slack plan does not include the resource '${this.getNodeParameter('resource', 0) as string}'`, { + description: `Hint: Upgrate to the Slack plan that includes the funcionality you want to use.`, + }); + } + throw new NodeOperationError(this.getNode(), 'Slack error response: ' + JSON.stringify(response)); } diff --git a/packages/nodes-base/nodes/Slack/Slack.node.ts b/packages/nodes-base/nodes/Slack/Slack.node.ts index 5213489c7b..cd506473d1 100644 --- a/packages/nodes-base/nodes/Slack/Slack.node.ts +++ b/packages/nodes-base/nodes/Slack/Slack.node.ts @@ -41,6 +41,11 @@ import { reactionOperations, } from './ReactionDescription'; +import { + userGroupFields, + userGroupOperations, +} from './UserGroupDescription'; + import { userFields, userOperations, @@ -191,6 +196,10 @@ export class Slack implements INodeType { name: 'User', value: 'user', }, + { + name: 'User Group', + value: 'userGroup', + }, { name: 'User Profile', value: 'userProfile', @@ -212,6 +221,8 @@ export class Slack implements INodeType { ...reactionFields, ...userOperations, ...userFields, + ...userGroupOperations, + ...userGroupFields, ...userProfileOperations, ...userProfileFields, ], @@ -295,13 +306,14 @@ export class Slack implements INodeType { try { const response = await this.helpers.request(options); + if (!response.ok) { return { status: 'Error', message: `${response.error}`, }; } - } catch(err) { + } catch (err) { return { status: 'Error', message: `${err.message}`, @@ -414,10 +426,10 @@ export class Slack implements INodeType { qs.inclusive = filters.inclusive as boolean; } if (filters.latest) { - qs.latest = new Date(filters.latest as string).getTime()/1000; + qs.latest = new Date(filters.latest as string).getTime() / 1000; } if (filters.oldest) { - qs.oldest = new Date(filters.oldest as string).getTime()/1000; + qs.oldest = new Date(filters.oldest as string).getTime() / 1000; } if (returnAll === true) { responseData = await slackApiRequestAllItems.call(this, 'messages', 'GET', '/conversations.history', {}, qs); @@ -508,10 +520,10 @@ export class Slack implements INodeType { qs.inclusive = filters.inclusive as boolean; } if (filters.latest) { - qs.latest = new Date(filters.latest as string).getTime()/1000; + qs.latest = new Date(filters.latest as string).getTime() / 1000; } if (filters.oldest) { - qs.oldest = new Date(filters.oldest as string).getTime()/1000; + qs.oldest = new Date(filters.oldest as string).getTime() / 1000; } if (returnAll === true) { responseData = await slackApiRequestAllItems.call(this, 'messages', 'GET', '/conversations.replies', {}, qs); @@ -1036,6 +1048,94 @@ export class Slack implements INodeType { responseData = await slackApiRequest.call(this, 'GET', '/users.getPresence', {}, qs); } } + if (resource === 'userGroup') { + //https://api.slack.com/methods/usergroups.create + if (operation === 'create') { + const name = this.getNodeParameter('name', i) as string; + + const additionalFields = this.getNodeParameter('additionalFields', i) as IDataObject; + + const body: IDataObject = { + name, + }; + + Object.assign(body, additionalFields); + + responseData = await slackApiRequest.call(this, 'POST', '/usergroups.create', body, qs); + + responseData = responseData.usergroup; + } + //https://api.slack.com/methods/usergroups.enable + if (operation === 'enable') { + const userGroupId = this.getNodeParameter('userGroupId', i) as string; + + const additionalFields = this.getNodeParameter('additionalFields', i) as IDataObject; + + const body: IDataObject = { + usergroup: userGroupId, + }; + + Object.assign(body, additionalFields); + + responseData = await slackApiRequest.call(this, 'POST', '/usergroups.enable', body, qs); + + responseData = responseData.usergroup; + } + //https://api.slack.com/methods/usergroups.disable + if (operation === 'disable') { + const userGroupId = this.getNodeParameter('userGroupId', i) as string; + + const additionalFields = this.getNodeParameter('additionalFields', i) as IDataObject; + + const body: IDataObject = { + usergroup: userGroupId, + }; + + Object.assign(body, additionalFields); + + responseData = await slackApiRequest.call(this, 'POST', '/usergroups.disable', body, qs); + + responseData = responseData.usergroup; + } + + //https://api.slack.com/methods/usergroups.list + if (operation === 'getAll') { + const returnAll = this.getNodeParameter('returnAll', i) as boolean; + + const additionalFields = this.getNodeParameter('additionalFields', i) as IDataObject; + + const qs: IDataObject = {}; + + Object.assign(qs, additionalFields); + + responseData = await slackApiRequest.call(this, 'GET', '/usergroups.list', {}, qs); + + responseData = responseData.usergroups; + + if (returnAll === false) { + const limit = this.getNodeParameter('limit', i) as number; + + responseData = responseData.slice(0, limit); + } + } + + //https://api.slack.com/methods/usergroups.update + if (operation === 'update') { + const userGroupId = this.getNodeParameter('userGroupId', i) as string; + + const updateFields = this.getNodeParameter('updateFields', i) as IDataObject; + + const body: IDataObject = { + usergroup: userGroupId, + }; + + Object.assign(body, updateFields); + + responseData = await slackApiRequest.call(this, 'POST', '/usergroups.update', body, qs); + + responseData = responseData.usergroup; + } + } if (resource === 'userProfile') { //https://api.slack.com/methods/users.profile.set if (operation === 'update') { diff --git a/packages/nodes-base/nodes/Slack/UserGroupDescription.ts b/packages/nodes-base/nodes/Slack/UserGroupDescription.ts new file mode 100644 index 0000000000..f352e6af23 --- /dev/null +++ b/packages/nodes-base/nodes/Slack/UserGroupDescription.ts @@ -0,0 +1,378 @@ +import { + INodeProperties, +} from 'n8n-workflow'; + +export const userGroupOperations = [ + { + displayName: 'Operation', + name: 'operation', + type: 'options', + displayOptions: { + show: { + resource: [ + 'userGroup', + ], + }, + }, + options: [ + { + name: 'Create', + value: 'create', + description: 'Create a user group', + }, + { + name: 'Disable', + value: 'disable', + description: 'Disable a user group', + }, + { + name: 'Enable', + value: 'enable', + description: 'Enable a user group', + }, + { + name: 'Get All', + value: 'getAll', + description: 'Get all user groups', + }, + { + name: 'Update', + value: 'update', + description: 'Update a user group', + }, + ], + default: 'create', + description: 'The operation to perform.', + }, +] as INodeProperties[]; + +export const userGroupFields = [ + + /* -------------------------------------------------------------------------- */ + /* userGroup:create */ + /* -------------------------------------------------------------------------- */ + { + displayName: 'Name', + name: 'name', + type: 'string', + default: '', + displayOptions: { + show: { + operation: [ + 'create', + ], + resource: [ + 'userGroup', + ], + }, + }, + required: true, + description: 'A name for the User Group. Must be unique among User Groups.', + }, + { + displayName: 'Additional Fields', + name: 'additionalFields', + type: 'collection', + placeholder: 'Add Field', + default: {}, + displayOptions: { + show: { + resource: [ + 'userGroup', + ], + operation: [ + 'create', + ], + }, + }, + options: [ + { + displayName: 'Channel IDs', + name: 'channelIds', + type: 'multiOptions', + typeOptions: { + loadOptionsMethod: 'getChannels', + }, + default: [], + description: 'A comma separated string of encoded channel IDs for which the User Group uses as a default.', + }, + { + displayName: 'Description', + name: 'description', + type: 'string', + default: '', + description: 'A short description of the User Group.', + }, + { + displayName: 'Handle', + name: 'handle', + type: 'string', + default: '', + description: 'A mention handle. Must be unique among channels, users and User Groups.', + }, + { + displayName: 'Include Count', + name: 'include_count', + type: 'boolean', + default: true, + description: 'Include the number of users in each User Group.', + }, + ], + }, + /* ----------------------------------------------------------------------- */ + /* userGroup:disable */ + /* ----------------------------------------------------------------------- */ + { + displayName: 'User Group ID', + name: 'userGroupId', + type: 'string', + default: '', + displayOptions: { + show: { + operation: [ + 'disable', + ], + resource: [ + 'userGroup', + ], + }, + }, + required: true, + description: 'The encoded ID of the User Group to update.', + }, + { + displayName: 'Additional Fields', + name: 'additionalFields', + type: 'collection', + placeholder: 'Add Field', + default: {}, + displayOptions: { + show: { + resource: [ + 'userGroup', + ], + operation: [ + 'disable', + ], + }, + }, + options: [ + { + displayName: 'Include Count', + name: 'include_count', + type: 'boolean', + default: true, + description: 'Include the number of users in each User Group.', + }, + ], + }, + /* ----------------------------------------------------------------------- */ + /* userGroup:enable */ + /* ----------------------------------------------------------------------- */ + { + displayName: 'User Group ID', + name: 'userGroupId', + type: 'string', + default: '', + displayOptions: { + show: { + operation: [ + 'enable', + ], + resource: [ + 'userGroup', + ], + }, + }, + required: true, + description: 'The encoded ID of the User Group to update.', + }, + { + displayName: 'Additional Fields', + name: 'additionalFields', + type: 'collection', + placeholder: 'Add Field', + default: {}, + displayOptions: { + show: { + resource: [ + 'userGroup', + ], + operation: [ + 'enable', + ], + }, + }, + options: [ + { + displayName: 'Include Count', + name: 'include_count', + type: 'boolean', + default: true, + description: 'Include the number of users in each User Group.', + }, + ], + }, + /* -------------------------------------------------------------------------- */ + /* userGroup:getAll */ + /* -------------------------------------------------------------------------- */ + { + displayName: 'Return All', + name: 'returnAll', + type: 'boolean', + displayOptions: { + show: { + operation: [ + 'getAll', + ], + resource: [ + 'userGroup', + ], + }, + }, + default: false, + description: 'If all results should be returned or only up to a given limit.', + }, + { + displayName: 'Limit', + name: 'limit', + type: 'number', + displayOptions: { + show: { + operation: [ + 'getAll', + ], + resource: [ + 'userGroup', + ], + returnAll: [ + false, + ], + }, + }, + typeOptions: { + minValue: 1, + maxValue: 500, + }, + default: 100, + description: 'How many results to return.', + }, + { + displayName: 'Additional Fields', + name: 'additionalFields', + type: 'collection', + placeholder: 'Add Field', + default: {}, + displayOptions: { + show: { + resource: [ + 'userGroup', + ], + operation: [ + 'getAll', + ], + }, + }, + options: [ + { + displayName: 'Include Count', + name: 'include_count', + type: 'boolean', + default: true, + description: 'Include the number of users in each User Group.', + }, + { + displayName: 'Include Disabled', + name: 'include_disabled', + type: 'boolean', + default: true, + description: 'Include disabled User Groups.', + }, + { + displayName: 'Include Users', + name: 'include_users', + type: 'boolean', + default: true, + description: 'Include the list of users for each User Group.', + }, + ], + }, + /* ----------------------------------------------------------------------- */ + /* userGroup:update */ + /* ----------------------------------------------------------------------- */ + { + displayName: 'User Group ID', + name: 'userGroupId', + type: 'string', + default: '', + displayOptions: { + show: { + operation: [ + 'update', + ], + resource: [ + 'userGroup', + ], + }, + }, + required: true, + description: 'The encoded ID of the User Group to update.', + }, + { + displayName: 'Update Fields', + name: 'updateFields', + type: 'collection', + placeholder: 'Add Field', + default: {}, + displayOptions: { + show: { + resource: [ + 'userGroup', + ], + operation: [ + 'update', + ], + }, + }, + options: [ + { + displayName: 'Channel IDs', + name: 'channels', + type: 'multiOptions', + typeOptions: { + loadOptionsMethod: 'getChannels', + }, + default: [], + description: 'A comma separated string of encoded channel IDs for which the User Group uses as a default.', + }, + { + displayName: 'Description', + name: 'description', + type: 'string', + default: '', + description: 'A short description of the User Group.', + }, + { + displayName: 'Handle', + name: 'handle', + type: 'string', + default: '', + description: 'A mention handle. Must be unique among channels, users and User Groups.', + }, + { + displayName: 'Include Count', + name: 'include_count', + type: 'boolean', + default: true, + description: 'Include the number of users in each User Group.', + }, + { + displayName: 'Name', + name: 'name', + type: 'string', + default: '', + description: 'A name for the User Group. Must be unique among User Groups.', + }, + ], + }, +] as INodeProperties[]; \ No newline at end of file diff --git a/packages/nodes-base/nodes/Stripe/Stripe.node.ts b/packages/nodes-base/nodes/Stripe/Stripe.node.ts index 06eb3577ab..6127f1fe16 100644 --- a/packages/nodes-base/nodes/Stripe/Stripe.node.ts +++ b/packages/nodes-base/nodes/Stripe/Stripe.node.ts @@ -255,7 +255,7 @@ export class Stripe implements INodeType { // charge: getAll // ---------------------------------- - responseData = await handleListing.call(this, resource); + responseData = await handleListing.call(this, resource, i); } else if (operation === 'update') { @@ -313,7 +313,7 @@ export class Stripe implements INodeType { // coupon: getAll // ---------------------------------- - responseData = await handleListing.call(this, resource); + responseData = await handleListing.call(this, resource, i); } @@ -374,7 +374,7 @@ export class Stripe implements INodeType { qs.email = filters.email; } - responseData = await handleListing.call(this, resource, qs); + responseData = await handleListing.call(this, resource, i, qs); } else if (operation === 'update') { diff --git a/packages/nodes-base/nodes/Stripe/descriptions/TokenDescription.ts b/packages/nodes-base/nodes/Stripe/descriptions/TokenDescription.ts index 591a438dd0..ae454bf79c 100644 --- a/packages/nodes-base/nodes/Stripe/descriptions/TokenDescription.ts +++ b/packages/nodes-base/nodes/Stripe/descriptions/TokenDescription.ts @@ -43,6 +43,16 @@ export const tokenFields = [ value: 'cardToken', }, ], + displayOptions: { + show: { + resource: [ + 'token', + ], + operation: [ + 'create', + ], + }, + }, }, { displayName: 'Card Number', diff --git a/packages/nodes-base/nodes/Stripe/helpers.ts b/packages/nodes-base/nodes/Stripe/helpers.ts index c7ec812a4a..38823eb12b 100644 --- a/packages/nodes-base/nodes/Stripe/helpers.ts +++ b/packages/nodes-base/nodes/Stripe/helpers.ts @@ -102,10 +102,10 @@ export function adjustMetadata( ) { if (!fields.metadata || isEmpty(fields.metadata)) return fields; - let adjustedMetadata = {}; + const adjustedMetadata: Record = {}; fields.metadata.metadataProperties.forEach(pair => { - adjustedMetadata = { ...adjustedMetadata, ...pair }; + adjustedMetadata[pair.key] = pair.value; }); return { @@ -154,19 +154,25 @@ export async function loadResource( export async function handleListing( this: IExecuteFunctions, resource: string, + i: number, qs: IDataObject = {}, ) { + const returnData: IDataObject[] = []; let responseData; - responseData = await stripeApiRequest.call(this, 'GET', `/${resource}s`, qs, {}); - responseData = responseData.data; + const returnAll = this.getNodeParameter('returnAll', i) as boolean; + const limit = this.getNodeParameter('limit', i, 0) as number; - const returnAll = this.getNodeParameter('returnAll', 0) as boolean; + do { + responseData = await stripeApiRequest.call(this, 'GET', `/${resource}s`, {}, qs); + returnData.push(...responseData.data); - if (!returnAll) { - const limit = this.getNodeParameter('limit', 0) as number; - responseData = responseData.slice(0, limit); - } + if (!returnAll && returnData.length >= limit) { + return returnData.slice(0, limit); + } - return responseData; + qs.starting_after = returnData[returnData.length - 1].id; + } while (responseData.has_more); + + return returnData; } diff --git a/packages/nodes-base/nodes/WooCommerce/descriptions/shared.ts b/packages/nodes-base/nodes/WooCommerce/descriptions/shared.ts index 14e3fa1f94..7800b8554a 100644 --- a/packages/nodes-base/nodes/WooCommerce/descriptions/shared.ts +++ b/packages/nodes-base/nodes/WooCommerce/descriptions/shared.ts @@ -118,6 +118,22 @@ const customerUpdateOptions = [ }, ], }, + { + displayName: 'Password', + name: 'password', + type: 'string', + displayOptions: { + show: { + '/resource': [ + 'customer', + ], + '/operation': [ + 'create', + ], + }, + }, + default: '', + }, { displayName: 'Shipping Address', name: 'shipping', diff --git a/packages/nodes-base/package.json b/packages/nodes-base/package.json index 2760b61294..969fc9971e 100644 --- a/packages/nodes-base/package.json +++ b/packages/nodes-base/package.json @@ -1,6 +1,6 @@ { "name": "n8n-nodes-base", - "version": "0.143.0", + "version": "0.144.1", "description": "Base nodes of n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -469,6 +469,7 @@ "dist/nodes/Line/Line.node.js", "dist/nodes/LingvaNex/LingvaNex.node.js", "dist/nodes/LinkedIn/LinkedIn.node.js", + "dist/nodes/LocalFileTrigger.node.js", "dist/nodes/Magento/Magento2.node.js", "dist/nodes/MailerLite/MailerLite.node.js", "dist/nodes/MailerLite/MailerLiteTrigger.node.js", @@ -682,10 +683,11 @@ "basic-auth": "^2.0.1", "change-case": "^4.1.1", "cheerio": "1.0.0-rc.6", + "chokidar": "^3.5.2", "cron": "~1.7.2", "eventsource": "^1.0.7", - "fflate": "^0.7.0", "fast-glob": "^3.2.5", + "fflate": "^0.7.0", "formidable": "^1.2.1", "get-system-fonts": "^2.0.2", "gm": "^1.23.1", @@ -706,8 +708,8 @@ "mqtt": "4.2.6", "mssql": "^6.2.0", "mysql2": "~2.3.0", - "node-ssh": "^12.0.0", "n8n-core": "~0.91.0", + "node-ssh": "^12.0.0", "nodemailer": "^6.5.0", "pdf-parse": "^1.1.1", "pg": "^8.3.0", diff --git a/packages/nodes-base/test/nodes/Stripe/helpers.test.js b/packages/nodes-base/test/nodes/Stripe/helpers.test.js new file mode 100644 index 0000000000..a5d480768a --- /dev/null +++ b/packages/nodes-base/test/nodes/Stripe/helpers.test.js @@ -0,0 +1,30 @@ +const helpers = require("../../../nodes/Stripe/helpers"); + +describe('adjustMetadata', () => { + it('it should adjust multiple metadata values', async () => { + const additionalFieldsValues = { + metadata: { + metadataProperties: [ + { + key: "keyA", + value: "valueA" + }, + { + key: "keyB", + value: "valueB" + }, + ], + }, + } + + const adjustedMetadata = helpers.adjustMetadata(additionalFieldsValues) + + const expectedAdjustedMetadata = { + metadata: { + keyA: "valueA", + keyB: "valueB" + } + } + expect(adjustedMetadata).toStrictEqual(expectedAdjustedMetadata) + }); +});