2019-08-08 11:38:25 -07:00
|
|
|
import {
|
|
|
|
ActiveExecutions,
|
2020-05-14 05:27:19 -07:00
|
|
|
CredentialsOverwrites,
|
|
|
|
CredentialTypes,
|
2021-02-08 23:59:32 -08:00
|
|
|
Db,
|
2020-05-05 15:59:58 -07:00
|
|
|
ExternalHooks,
|
2021-02-08 23:59:32 -08:00
|
|
|
IBullJobData,
|
|
|
|
IBullJobResponse,
|
2020-05-14 05:27:19 -07:00
|
|
|
ICredentialsOverwrite,
|
|
|
|
ICredentialsTypeData,
|
2021-02-08 23:59:32 -08:00
|
|
|
IExecutionFlattedDb,
|
|
|
|
IExecutionResponse,
|
2019-08-08 11:38:25 -07:00
|
|
|
IProcessMessageDataHook,
|
|
|
|
ITransferNodeTypes,
|
|
|
|
IWorkflowExecutionDataProcess,
|
|
|
|
IWorkflowExecutionDataProcessWithExecution,
|
2020-01-17 17:34:31 -08:00
|
|
|
NodeTypes,
|
2019-08-08 11:38:25 -07:00
|
|
|
Push,
|
2021-02-08 23:59:32 -08:00
|
|
|
ResponseHelper,
|
2019-08-08 11:38:25 -07:00
|
|
|
WorkflowExecuteAdditionalData,
|
2019-10-14 22:36:53 -07:00
|
|
|
WorkflowHelpers,
|
2019-08-08 11:38:25 -07:00
|
|
|
} from './';
|
|
|
|
|
|
|
|
import {
|
|
|
|
IProcessMessage,
|
2020-01-17 17:34:31 -08:00
|
|
|
WorkflowExecute,
|
2019-08-08 11:38:25 -07:00
|
|
|
} from 'n8n-core';
|
|
|
|
|
|
|
|
import {
|
2021-04-16 09:33:36 -07:00
|
|
|
ExecutionError,
|
2019-08-08 11:38:25 -07:00
|
|
|
IRun,
|
2020-01-17 17:34:31 -08:00
|
|
|
Workflow,
|
2019-08-08 11:38:25 -07:00
|
|
|
WorkflowExecuteMode,
|
2020-10-22 06:46:03 -07:00
|
|
|
WorkflowHooks,
|
2021-04-16 09:33:36 -07:00
|
|
|
WorkflowOperationError,
|
2019-08-08 11:38:25 -07:00
|
|
|
} from 'n8n-workflow';
|
|
|
|
|
2020-01-17 17:34:31 -08:00
|
|
|
import * as config from '../config';
|
|
|
|
import * as PCancelable from 'p-cancelable';
|
2019-08-09 04:12:00 -07:00
|
|
|
import { join as pathJoin } from 'path';
|
2019-08-08 11:38:25 -07:00
|
|
|
import { fork } from 'child_process';
|
|
|
|
|
2021-02-08 23:59:32 -08:00
|
|
|
import * as Bull from 'bull';
|
2021-02-09 14:32:40 -08:00
|
|
|
import * as Queue from './Queue';
|
2019-08-08 11:38:25 -07:00
|
|
|
|
|
|
|
export class WorkflowRunner {
|
|
|
|
activeExecutions: ActiveExecutions.ActiveExecutions;
|
2020-05-14 05:27:19 -07:00
|
|
|
credentialsOverwrites: ICredentialsOverwrite;
|
2019-08-08 11:38:25 -07:00
|
|
|
push: Push.Push;
|
2021-02-08 23:59:32 -08:00
|
|
|
jobQueue: Bull.Queue;
|
2019-08-08 11:38:25 -07:00
|
|
|
|
|
|
|
|
|
|
|
constructor() {
|
|
|
|
this.push = Push.getInstance();
|
|
|
|
this.activeExecutions = ActiveExecutions.getInstance();
|
2020-05-14 05:27:19 -07:00
|
|
|
this.credentialsOverwrites = CredentialsOverwrites().getAll();
|
2021-02-08 23:59:32 -08:00
|
|
|
|
|
|
|
const executionsMode = config.get('executions.mode') as string;
|
|
|
|
|
|
|
|
if (executionsMode === 'queue') {
|
2021-02-09 14:32:40 -08:00
|
|
|
this.jobQueue = Queue.getInstance().getBullObjectInstance();
|
2021-02-08 23:59:32 -08:00
|
|
|
}
|
2019-08-08 11:38:25 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The process did send a hook message so execute the appropiate hook
|
|
|
|
*
|
2019-12-19 14:07:55 -08:00
|
|
|
* @param {WorkflowHooks} workflowHooks
|
2019-08-08 11:38:25 -07:00
|
|
|
* @param {IProcessMessageDataHook} hookData
|
|
|
|
* @memberof WorkflowRunner
|
|
|
|
*/
|
2019-12-19 14:07:55 -08:00
|
|
|
processHookMessage(workflowHooks: WorkflowHooks, hookData: IProcessMessageDataHook) {
|
|
|
|
workflowHooks.executeHookFunctions(hookData.hook, hookData.parameters);
|
2019-08-08 11:38:25 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The process did error
|
|
|
|
*
|
2021-04-16 09:33:36 -07:00
|
|
|
* @param {ExecutionError} error
|
2019-08-08 11:38:25 -07:00
|
|
|
* @param {Date} startedAt
|
|
|
|
* @param {WorkflowExecuteMode} executionMode
|
|
|
|
* @param {string} executionId
|
|
|
|
* @memberof WorkflowRunner
|
|
|
|
*/
|
2021-04-16 09:33:36 -07:00
|
|
|
processError(error: ExecutionError, startedAt: Date, executionMode: WorkflowExecuteMode, executionId: string) {
|
2019-08-08 11:38:25 -07:00
|
|
|
const fullRunData: IRun = {
|
|
|
|
data: {
|
|
|
|
resultData: {
|
|
|
|
error,
|
|
|
|
runData: {},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
finished: false,
|
|
|
|
mode: executionMode,
|
|
|
|
startedAt,
|
|
|
|
stoppedAt: new Date(),
|
|
|
|
};
|
|
|
|
|
|
|
|
// Remove from active execution with empty data. That will
|
|
|
|
// set the execution to failed.
|
|
|
|
this.activeExecutions.remove(executionId, fullRunData);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2020-01-17 17:34:31 -08:00
|
|
|
* Run the workflow
|
2019-08-08 11:38:25 -07:00
|
|
|
*
|
|
|
|
* @param {IWorkflowExecutionDataProcess} data
|
2019-10-14 22:36:53 -07:00
|
|
|
* @param {boolean} [loadStaticData] If set will the static data be loaded from
|
|
|
|
* the workflow and added to input data
|
2019-08-08 11:38:25 -07:00
|
|
|
* @returns {Promise<string>}
|
|
|
|
* @memberof WorkflowRunner
|
|
|
|
*/
|
2021-02-08 23:59:32 -08:00
|
|
|
async run(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, realtime?: boolean): Promise<string> {
|
2020-01-17 17:49:31 -08:00
|
|
|
const executionsProcess = config.get('executions.process') as string;
|
2021-02-08 23:59:32 -08:00
|
|
|
const executionsMode = config.get('executions.mode') as string;
|
2020-10-20 10:01:40 -07:00
|
|
|
|
|
|
|
let executionId: string;
|
2021-02-08 23:59:32 -08:00
|
|
|
if (executionsMode === 'queue' && data.executionMode !== 'manual') {
|
|
|
|
// Do not run "manual" executions in bull because sending events to the
|
|
|
|
// frontend would not be possible
|
|
|
|
executionId = await this.runBull(data, loadStaticData, realtime);
|
|
|
|
} else if (executionsProcess === 'main') {
|
2020-10-20 10:01:40 -07:00
|
|
|
executionId = await this.runMainProcess(data, loadStaticData);
|
|
|
|
} else {
|
|
|
|
executionId = await this.runSubprocess(data, loadStaticData);
|
|
|
|
}
|
|
|
|
|
2020-11-13 14:31:27 -08:00
|
|
|
const externalHooks = ExternalHooks();
|
2020-10-20 10:01:40 -07:00
|
|
|
if (externalHooks.exists('workflow.postExecute')) {
|
|
|
|
this.activeExecutions.getPostExecutePromise(executionId)
|
|
|
|
.then(async (executionData) => {
|
|
|
|
await externalHooks.run('workflow.postExecute', [executionData, data.workflowData]);
|
|
|
|
})
|
|
|
|
.catch(error => {
|
|
|
|
console.error('There was a problem running hook "workflow.postExecute"', error);
|
|
|
|
});
|
2020-01-17 17:34:31 -08:00
|
|
|
}
|
|
|
|
|
2020-10-20 10:01:40 -07:00
|
|
|
return executionId;
|
2020-01-17 17:34:31 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Run the workflow in current process
|
|
|
|
*
|
|
|
|
* @param {IWorkflowExecutionDataProcess} data
|
|
|
|
* @param {boolean} [loadStaticData] If set will the static data be loaded from
|
|
|
|
* the workflow and added to input data
|
|
|
|
* @returns {Promise<string>}
|
|
|
|
* @memberof WorkflowRunner
|
|
|
|
*/
|
2020-01-17 17:49:31 -08:00
|
|
|
async runMainProcess(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean): Promise<string> {
|
2020-01-17 17:34:31 -08:00
|
|
|
if (loadStaticData === true && data.workflowData.id) {
|
|
|
|
data.workflowData.staticData = await WorkflowHelpers.getStaticDataById(data.workflowData.id as string);
|
|
|
|
}
|
|
|
|
|
|
|
|
const nodeTypes = NodeTypes();
|
|
|
|
|
2021-04-17 07:44:07 -07:00
|
|
|
|
|
|
|
// Soft timeout to stop workflow execution after current running node
|
|
|
|
// Changes were made by adding the `workflowTimeout` to the `additionalData`
|
|
|
|
// So that the timeout will also work for executions with nested workflows.
|
|
|
|
let executionTimeout: NodeJS.Timeout;
|
|
|
|
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
|
|
|
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
|
|
|
|
workflowTimeout = data.workflowData.settings!.executionTimeout as number; // preference on workflow setting
|
|
|
|
}
|
|
|
|
|
|
|
|
if (workflowTimeout > 0) {
|
|
|
|
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number);
|
|
|
|
}
|
|
|
|
|
2020-02-15 17:07:01 -08:00
|
|
|
const workflow = new Workflow({ id: data.workflowData.id as string | undefined, name: data.workflowData.name, nodes: data.workflowData!.nodes, connections: data.workflowData!.connections, active: data.workflowData!.active, nodeTypes, staticData: data.workflowData!.staticData });
|
2021-04-17 07:44:07 -07:00
|
|
|
const additionalData = await WorkflowExecuteAdditionalData.getBase(data.credentials, undefined, workflowTimeout <= 0 ? undefined : Date.now() + workflowTimeout * 1000);
|
2020-01-17 17:34:31 -08:00
|
|
|
|
|
|
|
// Register the active execution
|
2021-02-08 23:59:32 -08:00
|
|
|
const executionId = await this.activeExecutions.add(data, undefined);
|
2020-01-17 17:34:31 -08:00
|
|
|
|
2020-11-13 14:31:27 -08:00
|
|
|
additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId, true);
|
2020-01-17 17:34:31 -08:00
|
|
|
|
|
|
|
let workflowExecution: PCancelable<IRun>;
|
|
|
|
if (data.executionData !== undefined) {
|
|
|
|
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode, data.executionData);
|
|
|
|
workflowExecution = workflowExecute.processRunExecutionData(workflow);
|
|
|
|
} else if (data.runData === undefined || data.startNodes === undefined || data.startNodes.length === 0 || data.destinationNode === undefined) {
|
|
|
|
// Execute all nodes
|
2021-02-13 11:40:27 -08:00
|
|
|
|
2020-01-17 17:34:31 -08:00
|
|
|
// Can execute without webhook so go on
|
|
|
|
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode);
|
|
|
|
workflowExecution = workflowExecute.run(workflow, undefined, data.destinationNode);
|
|
|
|
} else {
|
|
|
|
// Execute only the nodes between start and destination nodes
|
|
|
|
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode);
|
|
|
|
workflowExecution = workflowExecute.runPartialWorkflow(workflow, data.runData, data.startNodes, data.destinationNode);
|
|
|
|
}
|
|
|
|
|
|
|
|
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
|
|
|
|
|
2021-04-19 23:55:54 -07:00
|
|
|
if (workflowTimeout > 0) {
|
2020-07-29 05:12:54 -07:00
|
|
|
const timeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
|
|
|
|
executionTimeout = setTimeout(() => {
|
2020-07-29 05:19:35 -07:00
|
|
|
this.activeExecutions.stopExecution(executionId, 'timeout');
|
|
|
|
}, timeout);
|
2020-07-29 05:12:54 -07:00
|
|
|
}
|
|
|
|
|
2020-04-29 10:33:03 -07:00
|
|
|
workflowExecution.then((fullRunData) => {
|
2020-07-29 05:12:54 -07:00
|
|
|
clearTimeout(executionTimeout);
|
|
|
|
if (workflowExecution.isCanceled) {
|
|
|
|
fullRunData.finished = false;
|
|
|
|
}
|
2020-04-29 10:33:03 -07:00
|
|
|
this.activeExecutions.remove(executionId, fullRunData);
|
2020-07-29 05:19:35 -07:00
|
|
|
});
|
2020-04-29 10:33:03 -07:00
|
|
|
|
2020-01-17 17:34:31 -08:00
|
|
|
return executionId;
|
|
|
|
}
|
|
|
|
|
2021-02-08 23:59:32 -08:00
|
|
|
async runBull(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, realtime?: boolean): Promise<string> {
|
|
|
|
|
|
|
|
// TODO: If "loadStaticData" is set to true it has to load data new on worker
|
|
|
|
|
|
|
|
// Register the active execution
|
|
|
|
const executionId = await this.activeExecutions.add(data, undefined);
|
|
|
|
|
|
|
|
const jobData: IBullJobData = {
|
|
|
|
executionId,
|
|
|
|
loadStaticData: !!loadStaticData,
|
|
|
|
};
|
|
|
|
|
|
|
|
let priority = 100;
|
|
|
|
if (realtime === true) {
|
|
|
|
// Jobs which require a direct response get a higher priority
|
|
|
|
priority = 50;
|
|
|
|
}
|
|
|
|
// TODO: For realtime jobs should probably also not do retry or not retry if they are older than x seconds.
|
|
|
|
// Check if they get retried by default and how often.
|
|
|
|
const jobOptions = {
|
|
|
|
priority,
|
|
|
|
removeOnComplete: true,
|
|
|
|
removeOnFail: true,
|
|
|
|
};
|
|
|
|
const job = await this.jobQueue.add(jobData, jobOptions);
|
|
|
|
console.log('Started with ID: ' + job.id.toString());
|
|
|
|
|
|
|
|
const hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain(data.executionMode, executionId, data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined });
|
|
|
|
|
|
|
|
// Normally also workflow should be supplied here but as it only used for sending
|
|
|
|
// data to editor-UI is not needed.
|
|
|
|
hooks.executeHookFunctions('workflowExecuteBefore', []);
|
|
|
|
|
|
|
|
const workflowExecution: PCancelable<IRun> = new PCancelable(async (resolve, reject, onCancel) => {
|
|
|
|
onCancel.shouldReject = false;
|
|
|
|
onCancel(async () => {
|
2021-02-09 14:32:40 -08:00
|
|
|
await Queue.getInstance().stopJob(job);
|
|
|
|
|
|
|
|
const fullRunData :IRun = {
|
|
|
|
data: {
|
|
|
|
resultData: {
|
2021-04-16 09:33:36 -07:00
|
|
|
error: new WorkflowOperationError('Workflow has been canceled!'),
|
2021-02-09 14:32:40 -08:00
|
|
|
runData: {},
|
2021-02-08 23:59:32 -08:00
|
|
|
},
|
2021-02-09 14:32:40 -08:00
|
|
|
},
|
|
|
|
mode: data.executionMode,
|
|
|
|
startedAt: new Date(),
|
|
|
|
stoppedAt: new Date(),
|
|
|
|
};
|
|
|
|
this.activeExecutions.remove(executionId, fullRunData);
|
|
|
|
resolve(fullRunData);
|
2021-02-08 23:59:32 -08:00
|
|
|
});
|
|
|
|
|
|
|
|
const jobData: Promise<IBullJobResponse> = job.finished();
|
|
|
|
|
|
|
|
const queueRecoveryInterval = config.get('queue.bull.queueRecoveryInterval') as number;
|
|
|
|
|
|
|
|
if (queueRecoveryInterval > 0) {
|
|
|
|
/*************************************************
|
|
|
|
* Long explanation about what this solves: *
|
|
|
|
* This only happens in a very specific scenario *
|
|
|
|
* when Redis crashes and recovers shortly *
|
|
|
|
* but during this time, some execution(s) *
|
|
|
|
* finished. The end result is that the main *
|
|
|
|
* process will wait indefinitively and never *
|
|
|
|
* get a response. This adds an active polling to*
|
|
|
|
* the queue that allows us to identify that the *
|
|
|
|
* execution finished and get information from *
|
|
|
|
* the database. *
|
|
|
|
*************************************************/
|
|
|
|
let watchDogInterval: NodeJS.Timeout | undefined;
|
|
|
|
|
|
|
|
const watchDog = new Promise((res) => {
|
|
|
|
watchDogInterval = setInterval(async () => {
|
|
|
|
const currentJob = await this.jobQueue.getJob(job.id);
|
|
|
|
// When null means job is finished (not found in queue)
|
|
|
|
if (currentJob === null) {
|
|
|
|
// Mimic worker's success message
|
|
|
|
res({success: true});
|
|
|
|
}
|
|
|
|
}, queueRecoveryInterval * 1000);
|
|
|
|
});
|
|
|
|
|
2021-02-13 11:40:27 -08:00
|
|
|
|
2021-02-08 23:59:32 -08:00
|
|
|
const clearWatchdogInterval = () => {
|
|
|
|
if (watchDogInterval) {
|
|
|
|
clearInterval(watchDogInterval);
|
|
|
|
watchDogInterval = undefined;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-04-17 07:44:07 -07:00
|
|
|
await Promise.race([jobData, watchDog]);
|
|
|
|
clearWatchdogInterval();
|
|
|
|
|
2021-02-08 23:59:32 -08:00
|
|
|
} else {
|
|
|
|
await jobData;
|
|
|
|
}
|
|
|
|
|
2021-02-13 11:40:27 -08:00
|
|
|
|
2021-02-08 23:59:32 -08:00
|
|
|
|
|
|
|
const executionDb = await Db.collections.Execution!.findOne(executionId) as IExecutionFlattedDb;
|
|
|
|
const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse;
|
|
|
|
const runData = {
|
|
|
|
data: fullExecutionData.data,
|
|
|
|
finished: fullExecutionData.finished,
|
|
|
|
mode: fullExecutionData.mode,
|
|
|
|
startedAt: fullExecutionData.startedAt,
|
|
|
|
stoppedAt: fullExecutionData.stoppedAt,
|
|
|
|
} as IRun;
|
|
|
|
|
|
|
|
|
|
|
|
this.activeExecutions.remove(executionId, runData);
|
|
|
|
// Normally also static data should be supplied here but as it only used for sending
|
|
|
|
// data to editor-UI is not needed.
|
|
|
|
hooks.executeHookFunctions('workflowExecuteAfter', [runData]);
|
2021-03-10 06:51:18 -08:00
|
|
|
try {
|
|
|
|
// Check if this execution data has to be removed from database
|
|
|
|
// based on workflow settings.
|
|
|
|
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
|
|
|
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
|
|
|
if (data.workflowData.settings !== undefined) {
|
|
|
|
saveDataErrorExecution = (data.workflowData.settings.saveDataErrorExecution as string) || saveDataErrorExecution;
|
|
|
|
saveDataSuccessExecution = (data.workflowData.settings.saveDataSuccessExecution as string) || saveDataSuccessExecution;
|
|
|
|
}
|
|
|
|
|
|
|
|
const workflowDidSucceed = !runData.data.resultData.error;
|
|
|
|
if (workflowDidSucceed === true && saveDataSuccessExecution === 'none' ||
|
|
|
|
workflowDidSucceed === false && saveDataErrorExecution === 'none'
|
|
|
|
) {
|
|
|
|
await Db.collections.Execution!.delete(executionId);
|
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
// We don't want errors here to crash n8n. Just log and proceed.
|
|
|
|
console.log('Error removing saved execution from database. More details: ', err);
|
|
|
|
}
|
2021-04-17 07:44:07 -07:00
|
|
|
|
2021-02-08 23:59:32 -08:00
|
|
|
resolve(runData);
|
|
|
|
});
|
|
|
|
|
|
|
|
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
|
|
|
|
return executionId;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-01-17 17:34:31 -08:00
|
|
|
/**
|
|
|
|
* Run the workflow
|
|
|
|
*
|
|
|
|
* @param {IWorkflowExecutionDataProcess} data
|
|
|
|
* @param {boolean} [loadStaticData] If set will the static data be loaded from
|
|
|
|
* the workflow and added to input data
|
|
|
|
* @returns {Promise<string>}
|
|
|
|
* @memberof WorkflowRunner
|
|
|
|
*/
|
|
|
|
async runSubprocess(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean): Promise<string> {
|
2021-04-17 07:44:07 -07:00
|
|
|
let startedAt = new Date();
|
2019-08-09 04:12:00 -07:00
|
|
|
const subprocess = fork(pathJoin(__dirname, 'WorkflowRunnerProcess.js'));
|
2019-08-08 11:38:25 -07:00
|
|
|
|
2019-10-14 22:36:53 -07:00
|
|
|
if (loadStaticData === true && data.workflowData.id) {
|
|
|
|
data.workflowData.staticData = await WorkflowHelpers.getStaticDataById(data.workflowData.id as string);
|
|
|
|
}
|
|
|
|
|
2019-08-08 11:38:25 -07:00
|
|
|
// Register the active execution
|
2021-02-08 23:59:32 -08:00
|
|
|
const executionId = await this.activeExecutions.add(data, subprocess);
|
2019-08-08 11:38:25 -07:00
|
|
|
|
2019-12-19 14:07:55 -08:00
|
|
|
// Check if workflow contains a "executeWorkflow" Node as in this
|
2020-05-16 10:05:40 -07:00
|
|
|
// case we can not know which nodeTypes and credentialTypes will
|
|
|
|
// be needed and so have to load all of them in the workflowRunnerProcess
|
2019-12-19 14:07:55 -08:00
|
|
|
let loadAllNodeTypes = false;
|
|
|
|
for (const node of data.workflowData.nodes) {
|
|
|
|
if (node.type === 'n8n-nodes-base.executeWorkflow') {
|
|
|
|
loadAllNodeTypes = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let nodeTypeData: ITransferNodeTypes;
|
2020-05-16 10:05:40 -07:00
|
|
|
let credentialTypeData: ICredentialsTypeData;
|
2020-09-12 12:13:57 -07:00
|
|
|
let credentialsOverwrites = this.credentialsOverwrites;
|
2020-05-16 10:05:40 -07:00
|
|
|
|
2019-12-19 14:07:55 -08:00
|
|
|
if (loadAllNodeTypes === true) {
|
2020-05-16 10:05:40 -07:00
|
|
|
// Supply all nodeTypes and credentialTypes
|
2019-12-19 14:07:55 -08:00
|
|
|
nodeTypeData = WorkflowHelpers.getAllNodeTypeData();
|
2020-05-16 10:05:40 -07:00
|
|
|
const credentialTypes = CredentialTypes();
|
|
|
|
credentialTypeData = credentialTypes.credentialTypes;
|
2019-12-19 14:07:55 -08:00
|
|
|
} else {
|
2020-09-12 12:13:57 -07:00
|
|
|
// Supply only nodeTypes, credentialTypes and overwrites that the workflow needs
|
2019-12-19 14:07:55 -08:00
|
|
|
nodeTypeData = WorkflowHelpers.getNodeTypeData(data.workflowData.nodes);
|
2020-05-16 10:05:40 -07:00
|
|
|
credentialTypeData = WorkflowHelpers.getCredentialsData(data.credentials);
|
2020-09-12 12:13:57 -07:00
|
|
|
|
|
|
|
credentialsOverwrites = {};
|
|
|
|
for (const credentialName of Object.keys(credentialTypeData)) {
|
|
|
|
if (this.credentialsOverwrites[credentialName] !== undefined) {
|
|
|
|
credentialsOverwrites[credentialName] = this.credentialsOverwrites[credentialName];
|
|
|
|
}
|
|
|
|
}
|
2019-12-19 14:07:55 -08:00
|
|
|
}
|
2019-08-08 11:38:25 -07:00
|
|
|
|
|
|
|
(data as unknown as IWorkflowExecutionDataProcessWithExecution).executionId = executionId;
|
|
|
|
(data as unknown as IWorkflowExecutionDataProcessWithExecution).nodeTypeData = nodeTypeData;
|
2020-09-12 12:13:57 -07:00
|
|
|
(data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsOverwrite = credentialsOverwrites;
|
2020-05-14 05:27:19 -07:00
|
|
|
(data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsTypeData = credentialTypeData; // TODO: Still needs correct value
|
2019-08-08 11:38:25 -07:00
|
|
|
|
2019-12-19 14:07:55 -08:00
|
|
|
const workflowHooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId);
|
2019-08-08 11:38:25 -07:00
|
|
|
|
|
|
|
// Send all data to subprocess it needs to run the workflow
|
|
|
|
subprocess.send({ type: 'startWorkflow', data } as IProcessMessage);
|
|
|
|
|
2020-07-29 05:12:54 -07:00
|
|
|
// Start timeout for the execution
|
|
|
|
let executionTimeout: NodeJS.Timeout;
|
2021-04-17 07:44:07 -07:00
|
|
|
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
2020-07-29 05:12:54 -07:00
|
|
|
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
|
2021-04-17 07:44:07 -07:00
|
|
|
workflowTimeout = data.workflowData.settings!.executionTimeout as number; // preference on workflow setting
|
2020-07-29 05:12:54 -07:00
|
|
|
}
|
|
|
|
|
2021-04-17 07:44:07 -07:00
|
|
|
const processTimeoutFunction = (timeout: number) => {
|
|
|
|
this.activeExecutions.stopExecution(executionId, 'timeout');
|
|
|
|
executionTimeout = setTimeout(() => subprocess.kill(), Math.max(timeout * 0.2, 5000)); // minimum 5 seconds
|
2021-04-17 08:23:48 -07:00
|
|
|
};
|
2020-07-29 05:12:54 -07:00
|
|
|
|
2021-04-17 07:44:07 -07:00
|
|
|
if (workflowTimeout > 0) {
|
|
|
|
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
|
|
|
|
// Start timeout already now but give process at least 5 seconds to start.
|
|
|
|
// Without it could would it be possible that the workflow executions times out before it even got started if
|
|
|
|
// the timeout time is very short as the process start time can be quite long.
|
|
|
|
executionTimeout = setTimeout(processTimeoutFunction, Math.max(5000, workflowTimeout), workflowTimeout);
|
2020-07-29 05:12:54 -07:00
|
|
|
}
|
|
|
|
|
2021-04-17 07:44:07 -07:00
|
|
|
// Create a list of child spawned executions
|
|
|
|
// If after the child process exits we have
|
|
|
|
// outstanding executions, we remove them
|
|
|
|
const childExecutionIds: string[] = [];
|
2020-07-29 05:12:54 -07:00
|
|
|
|
2019-08-08 11:38:25 -07:00
|
|
|
// Listen to data from the subprocess
|
2021-02-20 04:51:06 -08:00
|
|
|
subprocess.on('message', async (message: IProcessMessage) => {
|
2021-04-17 07:44:07 -07:00
|
|
|
if (message.type === 'start') {
|
|
|
|
// Now that the execution actually started set the timeout again so that does not time out to early.
|
|
|
|
startedAt = new Date();
|
|
|
|
if (workflowTimeout > 0) {
|
|
|
|
clearTimeout(executionTimeout);
|
|
|
|
executionTimeout = setTimeout(processTimeoutFunction, workflowTimeout, workflowTimeout);
|
|
|
|
}
|
|
|
|
|
|
|
|
} else if (message.type === 'end') {
|
2020-07-29 05:12:54 -07:00
|
|
|
clearTimeout(executionTimeout);
|
2019-08-08 11:38:25 -07:00
|
|
|
this.activeExecutions.remove(executionId!, message.data.runData);
|
|
|
|
|
2020-07-29 05:12:54 -07:00
|
|
|
} else if (message.type === 'processError') {
|
|
|
|
clearTimeout(executionTimeout);
|
2021-04-16 09:33:36 -07:00
|
|
|
const executionError = message.data.executionError as ExecutionError;
|
2019-08-08 11:38:25 -07:00
|
|
|
this.processError(executionError, startedAt, data.executionMode, executionId);
|
|
|
|
|
|
|
|
} else if (message.type === 'processHook') {
|
2019-12-19 14:07:55 -08:00
|
|
|
this.processHookMessage(workflowHooks, message.data as IProcessMessageDataHook);
|
2020-07-29 05:12:54 -07:00
|
|
|
} else if (message.type === 'timeout') {
|
|
|
|
// Execution timed out and its process has been terminated
|
2021-04-16 09:33:36 -07:00
|
|
|
const timeoutError = new WorkflowOperationError('Workflow execution timed out!');
|
2020-07-29 05:12:54 -07:00
|
|
|
|
|
|
|
this.processError(timeoutError, startedAt, data.executionMode, executionId);
|
2021-02-20 04:51:06 -08:00
|
|
|
} else if (message.type === 'startExecution') {
|
|
|
|
const executionId = await this.activeExecutions.add(message.data.runData);
|
2021-04-17 07:44:07 -07:00
|
|
|
childExecutionIds.push(executionId);
|
2021-02-20 04:51:06 -08:00
|
|
|
subprocess.send({ type: 'executionId', data: {executionId} } as IProcessMessage);
|
|
|
|
} else if (message.type === 'finishExecution') {
|
2021-04-17 07:44:07 -07:00
|
|
|
const executionIdIndex = childExecutionIds.indexOf(message.data.executionId);
|
|
|
|
if (executionIdIndex !== -1) {
|
|
|
|
childExecutionIds.splice(executionIdIndex, 1);
|
|
|
|
}
|
|
|
|
|
2021-02-20 04:51:06 -08:00
|
|
|
await this.activeExecutions.remove(message.data.executionId, message.data.result);
|
2019-08-08 11:38:25 -07:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2020-07-29 05:12:54 -07:00
|
|
|
// Also get informed when the processes does exit especially when it did crash or timed out
|
2021-04-17 07:44:07 -07:00
|
|
|
subprocess.on('exit', async (code, signal) => {
|
2020-07-29 05:12:54 -07:00
|
|
|
if (signal === 'SIGTERM'){
|
|
|
|
// Execution timed out and its process has been terminated
|
2021-04-16 09:33:36 -07:00
|
|
|
const timeoutError = new WorkflowOperationError('Workflow execution timed out!');
|
2020-07-29 05:12:54 -07:00
|
|
|
|
|
|
|
this.processError(timeoutError, startedAt, data.executionMode, executionId);
|
|
|
|
} else if (code !== 0) {
|
2019-08-08 11:38:25 -07:00
|
|
|
// Process did exit with error code, so something went wrong.
|
2021-04-16 09:33:36 -07:00
|
|
|
const executionError = new WorkflowOperationError('Workflow execution process did crash for an unknown reason!');
|
2019-08-08 11:38:25 -07:00
|
|
|
|
|
|
|
this.processError(executionError, startedAt, data.executionMode, executionId);
|
|
|
|
}
|
2021-04-17 07:44:07 -07:00
|
|
|
|
|
|
|
for(const executionId of childExecutionIds) {
|
|
|
|
// When the child process exits, if we still have
|
|
|
|
// pending child executions, we mark them as finished
|
|
|
|
// They will display as unknown to the user
|
|
|
|
// Instead of pending forever as executing when it
|
|
|
|
// actually isn't anymore.
|
|
|
|
await this.activeExecutions.remove(executionId);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-07-29 05:12:54 -07:00
|
|
|
clearTimeout(executionTimeout);
|
2019-08-08 11:38:25 -07:00
|
|
|
});
|
|
|
|
|
|
|
|
return executionId;
|
|
|
|
}
|
|
|
|
}
|