fix(editor): Prevent creation of input connections for nodes without input slot (#5425)

* fix(editor): Prevent creation of input connections for nodes without input

* WIP: Workflow checks service and controller

* fix: Created SQLite migration to remove broken connections

* Cleanup & add mysql/posgres migrations

* Linter fixes

* Unify the migration scripts

* Escape migration workflow_entity

* Wrap the migration in try/catch and do not parse nodes and connection if mysql/postgres

* Do migration changes also fro mysql

* refactor: Wrap only the necessary call in try catch block

---------

Co-authored-by: Omar Ajoue <krynble@gmail.com>
This commit is contained in:
OlegIvaniv 2023-02-09 16:04:26 +01:00 committed by Jan Oberhauser
parent 91d9f2d202
commit b57ec1d6ab
8 changed files with 301 additions and 26 deletions

View file

@ -239,6 +239,14 @@ export class Start extends Command {
const { flags } = this.parse(Start); const { flags } = this.parse(Start);
try { try {
// Load all node and credential types
const loadNodesAndCredentials = LoadNodesAndCredentials();
await loadNodesAndCredentials.init();
// Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes(loadNodesAndCredentials);
const credentialTypes = CredentialTypes(loadNodesAndCredentials);
// Start directly with the init of the database to improve startup time // Start directly with the init of the database to improve startup time
await Db.init().catch(async (error: Error) => await Db.init().catch(async (error: Error) =>
exitWithCrash('There was an error initializing DB', error), exitWithCrash('There was an error initializing DB', error),
@ -265,18 +273,10 @@ export class Start extends Command {
await Start.generateStaticAssets(); await Start.generateStaticAssets();
} }
// Load all node and credential types
const loadNodesAndCredentials = LoadNodesAndCredentials();
await loadNodesAndCredentials.init();
// Load all external hooks // Load all external hooks
const externalHooks = ExternalHooks(); const externalHooks = ExternalHooks();
await externalHooks.init(); await externalHooks.init();
// Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes(loadNodesAndCredentials);
const credentialTypes = CredentialTypes(loadNodesAndCredentials);
// Load the credentials overwrites if any exist // Load the credentials overwrites if any exist
CredentialsOverwrites(credentialTypes); CredentialsOverwrites(credentialTypes);

View file

@ -0,0 +1,91 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import { getTablePrefix, logMigrationEnd, logMigrationStart } from '@db/utils/migrationHelpers';
import { NodeTypes } from '@/NodeTypes';
import { IConnections, INode } from 'n8n-workflow';
import { getLogger } from '@/Logger';
export class PurgeInvalidWorkflowConnections1675940580449 implements MigrationInterface {
name = 'PurgeInvalidWorkflowConnections1675940580449';
async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name);
const tablePrefix = getTablePrefix();
const workflows: Array<{ id: number; nodes: INode[]; connections: IConnections }> =
await queryRunner.query(`
SELECT id, nodes, connections
FROM \`${tablePrefix}workflow_entity\`
`);
const nodeTypes = NodeTypes();
workflows.forEach(async (workflow) => {
let connections: IConnections = workflow.connections;
const nodes: INode[] = workflow.nodes;
const nodesThatCannotReceiveInput: string[] = nodes.reduce((acc, node) => {
try {
const nodeType = nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
if ((nodeType.description.inputs?.length ?? []) === 0) {
acc.push(node.name);
}
} catch (error) {
getLogger().warn(`Migration ${this.name} failed with error: ${error.message}`);
}
return acc;
}, [] as string[]);
Object.keys(connections).forEach((sourceNodeName) => {
const connection = connections[sourceNodeName];
const outputs = Object.keys(connection);
outputs.forEach((outputConnectionName /* Like `main` */, idx) => {
const outputConnection = connection[outputConnectionName];
// It filters out all connections that are connected to a node that cannot receive input
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
(outgoingConnections) =>
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
);
});
// Filter out output connection items that are empty
connection[outputConnectionName] = connection[outputConnectionName].filter(
(item) => item.length > 0,
);
// Delete the output connection container if it is empty
if (connection[outputConnectionName].length === 0) {
delete connection[outputConnectionName];
}
});
// Finally delete the source node if it has no output connections
if (Object.keys(connection).length === 0) {
delete connections[sourceNodeName];
}
});
// Update database with new connections
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
`
UPDATE \`${tablePrefix}workflow_entity\`
SET connections = :connections
WHERE id = '${workflow.id}'
`,
{ connections: JSON.stringify(connections) },
{},
);
await queryRunner.query(updateQuery, updateParams);
});
logMigrationEnd(this.name);
}
async down(queryRunner: QueryRunner): Promise<void> {
// No need to revert this migration
}
}

View file

@ -30,6 +30,7 @@ import { RemoveWorkflowDataLoadedFlag1671726148420 } from './1671726148420-Remov
import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations'; import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations';
import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows'; import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows';
import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities'; import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities';
import { PurgeInvalidWorkflowConnections1675940580449 } from './1675940580449-PurgeInvalidWorkflowConnections';
export const mysqlMigrations = [ export const mysqlMigrations = [
InitialMigration1588157391238, InitialMigration1588157391238,
@ -64,4 +65,5 @@ export const mysqlMigrations = [
MessageEventBusDestinations1671535397530, MessageEventBusDestinations1671535397530,
DeleteExecutionsWithWorkflows1673268682475, DeleteExecutionsWithWorkflows1673268682475,
CreateLdapEntities1674509946020, CreateLdapEntities1674509946020,
PurgeInvalidWorkflowConnections1675940580449
]; ];

View file

@ -0,0 +1,90 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import { getTablePrefix, logMigrationEnd, logMigrationStart } from '@db/utils/migrationHelpers';
import { NodeTypes } from '@/NodeTypes';
import { IConnections, INode } from 'n8n-workflow';
import { getLogger } from '@/Logger';
export class PurgeInvalidWorkflowConnections1675940580449 implements MigrationInterface {
name = 'PurgeInvalidWorkflowConnections1675940580449';
async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name);
const tablePrefix = getTablePrefix();
const workflows: Array<{ id: number; nodes: INode[]; connections: IConnections }> =
await queryRunner.query(`
SELECT id, nodes, connections
FROM "${tablePrefix}workflow_entity"
`);
const nodeTypes = NodeTypes();
workflows.forEach(async (workflow) => {
let connections: IConnections = workflow.connections;
const nodes: INode[] = workflow.nodes;
const nodesThatCannotReceiveInput: string[] = nodes.reduce((acc, node) => {
try {
const nodeType = nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
if ((nodeType.description.inputs?.length ?? []) === 0) {
acc.push(node.name);
}
} catch (error) {
getLogger().warn(`Migration ${this.name} failed with error: ${error.message}`);
}
return acc;
}, [] as string[]);
Object.keys(connections).forEach((sourceNodeName) => {
const connection = connections[sourceNodeName];
const outputs = Object.keys(connection);
outputs.forEach((outputConnectionName /* Like `main` */, idx) => {
const outputConnection = connection[outputConnectionName];
// It filters out all connections that are connected to a node that cannot receive input
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
(outgoingConnections) =>
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
);
});
// Filter out output connection items that are empty
connection[outputConnectionName] = connection[outputConnectionName].filter(
(item) => item.length > 0,
);
// Delete the output connection container if it is empty
if (connection[outputConnectionName].length === 0) {
delete connection[outputConnectionName];
}
});
// Finally delete the source node if it has no output connections
if (Object.keys(connection).length === 0) {
delete connections[sourceNodeName];
}
});
// Update database with new connections
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
`
UPDATE "${tablePrefix}workflow_entity"
SET connections = :connections
WHERE id = '${workflow.id}'
`,
{ connections: JSON.stringify(connections) },
{},
);
await queryRunner.query(updateQuery, updateParams);
});
logMigrationEnd(this.name);
}
async down(queryRunner: QueryRunner): Promise<void> {
// No need to revert this migration
}
}

View file

@ -28,6 +28,7 @@ import { RemoveWorkflowDataLoadedFlag1671726148421 } from './1671726148421-Remov
import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations'; import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations';
import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows'; import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows';
import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities'; import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities';
import { PurgeInvalidWorkflowConnections1675940580449 } from './1675940580449-PurgeInvalidWorkflowConnections';
export const postgresMigrations = [ export const postgresMigrations = [
InitialMigration1587669153312, InitialMigration1587669153312,
@ -60,4 +61,5 @@ export const postgresMigrations = [
MessageEventBusDestinations1671535397530, MessageEventBusDestinations1671535397530,
DeleteExecutionsWithWorkflows1673268682475, DeleteExecutionsWithWorkflows1673268682475,
CreateLdapEntities1674509946020, CreateLdapEntities1674509946020,
PurgeInvalidWorkflowConnections1675940580449
]; ];

View file

@ -0,0 +1,91 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import { getTablePrefix, logMigrationEnd, logMigrationStart } from '@db/utils/migrationHelpers';
import { NodeTypes } from '@/NodeTypes';
import { IConnections, INode } from 'n8n-workflow';
import { getLogger } from '@/Logger';
export class PurgeInvalidWorkflowConnections1675940580449 implements MigrationInterface {
name = 'PurgeInvalidWorkflowConnections1675940580449';
async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name);
const tablePrefix = getTablePrefix();
const workflows: Array<{ id: number; nodes: string; connections: string }> =
await queryRunner.query(`
SELECT id, nodes, connections
FROM "${tablePrefix}workflow_entity"
`);
const nodeTypes = NodeTypes();
workflows.forEach(async (workflow) => {
let connections: IConnections = JSON.parse(workflow.connections);
const nodes: INode[] = JSON.parse(workflow.nodes);
const nodesThatCannotReceiveInput: string[] = nodes.reduce((acc, node) => {
try {
const nodeType = nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
if ((nodeType.description.inputs?.length ?? []) === 0) {
acc.push(node.name);
}
} catch (error) {
getLogger().warn(`Migration ${this.name} failed with error: ${error.message}`);
}
return acc;
}, [] as string[]);
Object.keys(connections).forEach((sourceNodeName) => {
const connection = connections[sourceNodeName];
const outputs = Object.keys(connection);
outputs.forEach((outputConnectionName /* Like `main` */, idx) => {
const outputConnection = connection[outputConnectionName];
// It filters out all connections that are connected to a node that cannot receive input
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
(outgoingConnections) =>
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
);
});
// Filter out output connection items that are empty
connection[outputConnectionName] = connection[outputConnectionName].filter(
(item) => item.length > 0,
);
// Delete the output connection container if it is empty
if (connection[outputConnectionName].length === 0) {
delete connection[outputConnectionName];
}
});
// Finally delete the source node if it has no output connections
if (Object.keys(connection).length === 0) {
delete connections[sourceNodeName];
}
});
// Update database with new connections
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
`
UPDATE "${tablePrefix}workflow_entity"
SET connections = :connections
WHERE id = '${workflow.id}'
`,
{ connections: JSON.stringify(connections) },
{},
);
await queryRunner.query(updateQuery, updateParams);
});
logMigrationEnd(this.name);
}
async down(queryRunner: QueryRunner): Promise<void> {
// No need to revert this migration
}
}

View file

@ -27,6 +27,7 @@ import { RemoveWorkflowDataLoadedFlag1671726148419 } from './1671726148419-Remov
import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations'; import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations';
import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows'; import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows';
import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities'; import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities';
import { PurgeInvalidWorkflowConnections1675940580449 } from './1675940580449-PurgeInvalidWorkflowConnections';
const sqliteMigrations = [ const sqliteMigrations = [
InitialMigration1588102412422, InitialMigration1588102412422,
@ -58,6 +59,7 @@ const sqliteMigrations = [
MessageEventBusDestinations1671535397530, MessageEventBusDestinations1671535397530,
DeleteExecutionsWithWorkflows1673268682475, DeleteExecutionsWithWorkflows1673268682475,
CreateLdapEntities1674509946020, CreateLdapEntities1674509946020,
PurgeInvalidWorkflowConnections1675940580449,
]; ];
export { sqliteMigrations }; export { sqliteMigrations };

View file

@ -1995,7 +1995,7 @@ export default mixins(
}, },
] as [IConnection, IConnection]; ] as [IConnection, IConnection];
this.__addConnection(connectionData, true); this.__addConnection(connectionData);
}, },
async addNode( async addNode(
nodeTypeName: string, nodeTypeName: string,
@ -2558,22 +2558,19 @@ export default mixins(
return NodeViewUtils.getInputEndpointUUID(node.id, index); return NodeViewUtils.getInputEndpointUUID(node.id, index);
}, },
__addConnection(connection: [IConnection, IConnection], addVisualConnection = false) { __addConnection(connection: [IConnection, IConnection]) {
if (addVisualConnection) { const outputUuid = this.getOutputEndpointUUID(connection[0].node, connection[0].index);
const outputUuid = this.getOutputEndpointUUID(connection[0].node, connection[0].index); const inputUuid = this.getInputEndpointUUID(connection[1].node, connection[1].index);
const inputUuid = this.getInputEndpointUUID(connection[1].node, connection[1].index); if (!outputUuid || !inputUuid) {
if (!outputUuid || !inputUuid) { return;
return;
}
const uuid: [string, string] = [outputUuid, inputUuid];
// Create connections in DOM
this.instance?.connect({
uuids: uuid,
detachable: !this.isReadOnly,
});
} }
this.workflowsStore.addConnection({ connection });
const uuid: [string, string] = [outputUuid, inputUuid];
// Create connections in DOM
this.instance?.connect({
uuids: uuid,
detachable: !this.isReadOnly,
});
setTimeout(() => { setTimeout(() => {
this.addPinDataConnections(this.workflowsStore.pinData); this.addPinDataConnections(this.workflowsStore.pinData);
@ -3264,7 +3261,7 @@ export default mixins(
}, },
] as [IConnection, IConnection]; ] as [IConnection, IConnection];
this.__addConnection(connectionData, true); this.__addConnection(connectionData);
}); });
} }
} }
@ -3750,7 +3747,7 @@ export default mixins(
}, },
async onRevertRemoveConnection({ connection }: { connection: [IConnection, IConnection] }) { async onRevertRemoveConnection({ connection }: { connection: [IConnection, IConnection] }) {
this.suspendRecordingDetachedConnections = true; this.suspendRecordingDetachedConnections = true;
this.__addConnection(connection, true); this.__addConnection(connection);
this.suspendRecordingDetachedConnections = false; this.suspendRecordingDetachedConnections = false;
}, },
async onRevertNameChange({ currentName, newName }: { currentName: string; newName: string }) { async onRevertNameChange({ currentName, newName }: { currentName: string; newName: string }) {