mirror of
https://github.com/n8n-io/n8n.git
synced 2024-12-24 20:24:05 -08:00
🐛 Fix performance and crashes with latest updates (#1524)
* Removing unnecessary data from process communication
* Wrapping progress save in a try ... catch block to prevent crashes
* Migration to fix mysql problems with big data on executions
* ⚡ Minor formatting fixes
Co-authored-by: Jan Oberhauser <jan.oberhauser@gmail.com>
This commit is contained in:
parent
80dfa75eb5
commit
3d80129a28
|
@ -230,54 +230,64 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const execution = await Db.collections.Execution!.findOne(this.executionId);
|
try {
|
||||||
|
const execution = await Db.collections.Execution!.findOne(this.executionId);
|
||||||
|
|
||||||
if (execution === undefined) {
|
if (execution === undefined) {
|
||||||
// Something went badly wrong if this happens.
|
// Something went badly wrong if this happens.
|
||||||
// This check is here mostly to make typescript happy.
|
// This check is here mostly to make typescript happy.
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const fullExecutionData: IExecutionResponse = ResponseHelper.unflattenExecutionData(execution);
|
const fullExecutionData: IExecutionResponse = ResponseHelper.unflattenExecutionData(execution);
|
||||||
|
|
||||||
if (fullExecutionData.finished) {
|
if (fullExecutionData.finished) {
|
||||||
// We already received ´workflowExecuteAfter´ webhook, so this is just an async call
|
// We already received ´workflowExecuteAfter´ webhook, so this is just an async call
|
||||||
// that was left behind. We skip saving because the other call should have saved everything
|
// that was left behind. We skip saving because the other call should have saved everything
|
||||||
// so this one is safe to ignore
|
// so this one is safe to ignore
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (fullExecutionData.data === undefined) {
|
||||||
|
fullExecutionData.data = {
|
||||||
|
startData: {
|
||||||
|
},
|
||||||
|
resultData: {
|
||||||
|
runData: {},
|
||||||
|
},
|
||||||
|
executionData: {
|
||||||
|
contextData: {},
|
||||||
|
nodeExecutionStack: [],
|
||||||
|
waitingExecution: {},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(fullExecutionData.data.resultData.runData[nodeName])) {
|
||||||
|
// Append data if array exists
|
||||||
|
fullExecutionData.data.resultData.runData[nodeName].push(data);
|
||||||
|
} else {
|
||||||
|
// Initialize array and save data
|
||||||
|
fullExecutionData.data.resultData.runData[nodeName] = [data];
|
||||||
|
}
|
||||||
|
|
||||||
|
fullExecutionData.data.executionData = executionData.executionData;
|
||||||
|
|
||||||
|
// Set last executed node so that it may resume on failure
|
||||||
|
fullExecutionData.data.resultData.lastNodeExecuted = nodeName;
|
||||||
|
|
||||||
|
const flattenedExecutionData = ResponseHelper.flattenExecutionData(fullExecutionData);
|
||||||
|
|
||||||
|
await Db.collections.Execution!.update(this.executionId, flattenedExecutionData as IExecutionFlattedDb);
|
||||||
|
} catch (err) {
|
||||||
|
// TODO: Improve in the future!
|
||||||
|
// Errors here might happen because of database access
|
||||||
|
// For busy machines, we may get "Database is locked" errors.
|
||||||
|
|
||||||
|
// We do this to prevent crashes and executions ending in `unknown` state.
|
||||||
|
console.log(`Failed saving execution progress to database for execution ID ${this.executionId}`, err);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (fullExecutionData.data === undefined) {
|
|
||||||
fullExecutionData.data = {
|
|
||||||
startData: {
|
|
||||||
},
|
|
||||||
resultData: {
|
|
||||||
runData: {},
|
|
||||||
},
|
|
||||||
executionData: {
|
|
||||||
contextData: {},
|
|
||||||
nodeExecutionStack: [],
|
|
||||||
waitingExecution: {},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(fullExecutionData.data.resultData.runData[nodeName])) {
|
|
||||||
// Append data if array exists
|
|
||||||
fullExecutionData.data.resultData.runData[nodeName].push(data);
|
|
||||||
} else {
|
|
||||||
// Initialize array and save data
|
|
||||||
fullExecutionData.data.resultData.runData[nodeName] = [data];
|
|
||||||
}
|
|
||||||
|
|
||||||
fullExecutionData.data.executionData = executionData.executionData;
|
|
||||||
|
|
||||||
// Set last executed node so that it may resume on failure
|
|
||||||
fullExecutionData.data.resultData.lastNodeExecuted = nodeName;
|
|
||||||
|
|
||||||
const flattenedExecutionData = ResponseHelper.flattenExecutionData(fullExecutionData);
|
|
||||||
|
|
||||||
await Db.collections.Execution!.update(this.executionId, flattenedExecutionData as IExecutionFlattedDb);
|
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
|
@ -174,8 +174,8 @@ export class WorkflowRunnerProcess {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
nodeExecuteAfter: [
|
nodeExecuteAfter: [
|
||||||
async (nodeName: string, data: ITaskData, executionData: IRunExecutionData): Promise<void> => {
|
async (nodeName: string, data: ITaskData): Promise<void> => {
|
||||||
this.sendHookToParentProcess('nodeExecuteAfter', [nodeName, data, executionData]);
|
this.sendHookToParentProcess('nodeExecuteAfter', [nodeName, data]);
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
workflowExecuteBefore: [
|
workflowExecuteBefore: [
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
export class ChangeDataSize1615306975123 implements MigrationInterface {
|
||||||
|
name = 'ChangeDataSize1615306975123';
|
||||||
|
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` MEDIUMTEXT NOT NULL');
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` TEXT NOT NULL');
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,6 +3,7 @@ import { WebhookModel1592447867632 } from './1592447867632-WebhookModel';
|
||||||
import { CreateIndexStoppedAt1594902918301 } from './1594902918301-CreateIndexStoppedAt';
|
import { CreateIndexStoppedAt1594902918301 } from './1594902918301-CreateIndexStoppedAt';
|
||||||
import { AddWebhookId1611149998770 } from './1611149998770-AddWebhookId';
|
import { AddWebhookId1611149998770 } from './1611149998770-AddWebhookId';
|
||||||
import { MakeStoppedAtNullable1607431743767 } from './1607431743767-MakeStoppedAtNullable';
|
import { MakeStoppedAtNullable1607431743767 } from './1607431743767-MakeStoppedAtNullable';
|
||||||
|
import { ChangeDataSize1615306975123 } from './1615306975123-ChangeDataSize';
|
||||||
|
|
||||||
export const mysqlMigrations = [
|
export const mysqlMigrations = [
|
||||||
InitialMigration1588157391238,
|
InitialMigration1588157391238,
|
||||||
|
@ -10,4 +11,5 @@ export const mysqlMigrations = [
|
||||||
CreateIndexStoppedAt1594902918301,
|
CreateIndexStoppedAt1594902918301,
|
||||||
AddWebhookId1611149998770,
|
AddWebhookId1611149998770,
|
||||||
MakeStoppedAtNullable1607431743767,
|
MakeStoppedAtNullable1607431743767,
|
||||||
|
ChangeDataSize1615306975123,
|
||||||
];
|
];
|
||||||
|
|
Loading…
Reference in a new issue