diff --git a/.github/workflows/ci-pull-requests.yml b/.github/workflows/ci-pull-requests.yml index 48a34c5eab..1ce7427098 100644 --- a/.github/workflows/ci-pull-requests.yml +++ b/.github/workflows/ci-pull-requests.yml @@ -49,6 +49,9 @@ jobs: with: ref: refs/pull/${{ github.event.pull_request.number }}/merge cacheKey: ${{ github.sha }}-base:build + collectCoverage: true + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} lint: name: Lint diff --git a/package.json b/package.json index bbcfb52778..82130f01b4 100644 --- a/package.json +++ b/package.json @@ -88,8 +88,7 @@ "pyodide@0.23.4": "patches/pyodide@0.23.4.patch", "@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch", "@types/ws@8.5.4": "patches/@types__ws@8.5.4.patch", - "@types/uuencode@0.0.3": "patches/@types__uuencode@0.0.3.patch", - "@langchain/core@0.3.3": "patches/@langchain__core@0.3.3.patch" + "@types/uuencode@0.0.3": "patches/@types__uuencode@0.0.3.patch" } } } diff --git a/packages/@n8n/config/src/configs/pruning.config.ts b/packages/@n8n/config/src/configs/pruning.config.ts new file mode 100644 index 0000000000..109dffc462 --- /dev/null +++ b/packages/@n8n/config/src/configs/pruning.config.ts @@ -0,0 +1,35 @@ +import { Config, Env } from '../decorators'; + +@Config +export class PruningConfig { + /** Whether to delete past executions on a rolling basis. */ + @Env('EXECUTIONS_DATA_PRUNE') + isEnabled: boolean = true; + + /** How old (hours) a finished execution must be to qualify for soft-deletion. */ + @Env('EXECUTIONS_DATA_MAX_AGE') + maxAge: number = 336; + + /** + * Max number of finished executions to keep in database. Does not necessarily + * prune to the exact max number. `0` for unlimited. + */ + @Env('EXECUTIONS_DATA_PRUNE_MAX_COUNT') + maxCount: number = 10_000; + + /** + * How old (hours) a finished execution must be to qualify for hard-deletion. + * This buffer by default excludes recent executions as the user may need + * them while building a workflow. + */ + @Env('EXECUTIONS_DATA_HARD_DELETE_BUFFER') + hardDeleteBuffer: number = 1; + + /** How often (minutes) execution data should be hard-deleted. */ + @Env('EXECUTIONS_DATA_PRUNE_HARD_DELETE_INTERVAL') + hardDeleteInterval: number = 15; + + /** How often (minutes) execution data should be soft-deleted */ + @Env('EXECUTIONS_DATA_PRUNE_SOFT_DELETE_INTERVAL') + softDeleteInterval: number = 60; +} diff --git a/packages/@n8n/config/src/index.ts b/packages/@n8n/config/src/index.ts index c056a1090c..0a89535ee3 100644 --- a/packages/@n8n/config/src/index.ts +++ b/packages/@n8n/config/src/index.ts @@ -10,6 +10,7 @@ import { LicenseConfig } from './configs/license.config'; import { LoggingConfig } from './configs/logging.config'; import { MultiMainSetupConfig } from './configs/multi-main-setup.config'; import { NodesConfig } from './configs/nodes.config'; +import { PruningConfig } from './configs/pruning.config'; import { PublicApiConfig } from './configs/public-api.config'; import { TaskRunnersConfig } from './configs/runners.config'; import { ScalingModeConfig } from './configs/scaling-mode.config'; @@ -24,6 +25,7 @@ import { Config, Env, Nested } from './decorators'; export { Config, Env, Nested } from './decorators'; export { TaskRunnersConfig } from './configs/runners.config'; export { SecurityConfig } from './configs/security.config'; +export { PruningConfig } from './configs/pruning.config'; export { FrontendBetaFeatures, FrontendConfig } from './configs/frontend.config'; export { LOG_SCOPES } from './configs/logging.config'; export type { LogScope } from './configs/logging.config'; @@ -112,4 +114,7 @@ export class GlobalConfig { @Nested security: SecurityConfig; + + @Nested + pruning: PruningConfig; } diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index 07af2c0a0b..bc10028f36 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -271,6 +271,14 @@ describe('GlobalConfig', () => { blockFileAccessToN8nFiles: true, daysAbandonedWorkflow: 90, }, + pruning: { + isEnabled: true, + maxAge: 336, + maxCount: 10_000, + hardDeleteBuffer: 1, + hardDeleteInterval: 15, + softDeleteInterval: 60, + }, }; it('should use all default values when no env variables are defined', () => { diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index 05bf348851..5910094890 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -137,22 +137,23 @@ "@google-cloud/resource-manager": "5.3.0", "@google/generative-ai": "0.19.0", "@huggingface/inference": "2.8.0", - "@langchain/anthropic": "0.3.1", - "@langchain/aws": "0.1.0", - "@langchain/cohere": "0.3.0", - "@langchain/community": "0.3.2", + "@langchain/anthropic": "0.3.7", + "@langchain/aws": "0.1.1", + "@langchain/cohere": "0.3.1", + "@langchain/community": "0.3.11", "@langchain/core": "catalog:", - "@langchain/google-genai": "0.1.0", + "@langchain/google-genai": "0.1.2", "@langchain/google-vertexai": "0.1.0", "@langchain/groq": "0.1.2", "@langchain/mistralai": "0.1.1", - "@langchain/ollama": "0.1.0", - "@langchain/openai": "0.3.0", - "@langchain/pinecone": "0.1.0", + "@langchain/ollama": "0.1.1", + "@langchain/openai": "0.3.11", + "@langchain/pinecone": "0.1.1", "@langchain/qdrant": "0.1.0", "@langchain/redis": "0.1.0", "@langchain/textsplitters": "0.1.0", "@mozilla/readability": "0.5.0", + "@n8n/json-schema-to-zod": "workspace:*", "@n8n/typeorm": "0.3.20-12", "@n8n/vm2": "3.9.25", "@pinecone-database/pinecone": "3.0.3", @@ -168,14 +169,13 @@ "generate-schema": "2.6.0", "html-to-text": "9.0.5", "jsdom": "23.0.1", - "@n8n/json-schema-to-zod": "workspace:*", - "langchain": "0.3.2", + "langchain": "0.3.5", "lodash": "catalog:", "mammoth": "1.7.2", "mime-types": "2.1.35", "n8n-nodes-base": "workspace:*", "n8n-workflow": "workspace:*", - "openai": "4.63.0", + "openai": "4.69.0", "pdf-parse": "1.1.1", "pg": "8.12.0", "redis": "4.6.12", diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json index 74c3e323bb..cb47095c15 100644 --- a/packages/@n8n/task-runner/package.json +++ b/packages/@n8n/task-runner/package.json @@ -23,8 +23,10 @@ ], "dependencies": { "@n8n/config": "workspace:*", - "n8n-workflow": "workspace:*", + "acorn": "8.14.0", + "acorn-walk": "8.3.4", "n8n-core": "workspace:*", + "n8n-workflow": "workspace:*", "nanoid": "^3.3.6", "typedi": "catalog:", "ws": "^8.18.0" diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts index 36f3c5afa2..cd0863b13e 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts @@ -4,14 +4,11 @@ import fs from 'node:fs'; import { builtinModules } from 'node:module'; import { ValidationError } from '@/js-task-runner/errors/validation-error'; -import { - JsTaskRunner, - type AllCodeTaskData, - type JSExecSettings, -} from '@/js-task-runner/js-task-runner'; +import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner'; +import { JsTaskRunner } from '@/js-task-runner/js-task-runner'; import type { Task } from '@/task-runner'; -import { newAllCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data'; +import { newCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data'; import type { JsRunnerConfig } from '../../config/js-runner-config'; import { MainConfig } from '../../config/main-config'; import { ExecutionError } from '../errors/execution-error'; @@ -43,7 +40,7 @@ describe('JsTaskRunner', () => { runner = defaultTaskRunner, }: { task: Task; - taskData: AllCodeTaskData; + taskData: DataRequestResponse; runner?: JsTaskRunner; }) => { jest.spyOn(runner, 'requestData').mockResolvedValue(taskData); @@ -71,7 +68,7 @@ describe('JsTaskRunner', () => { nodeMode: 'runOnceForAllItems', ...settings, }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + taskData: newCodeTaskData(inputItems.map(wrapIntoJson)), runner, }); }; @@ -94,7 +91,7 @@ describe('JsTaskRunner', () => { nodeMode: 'runOnceForEachItem', ...settings, }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + taskData: newCodeTaskData(inputItems.map(wrapIntoJson)), runner, }); }; @@ -111,7 +108,7 @@ describe('JsTaskRunner', () => { await execTaskWithParams({ task, - taskData: newAllCodeTaskData([wrapIntoJson({})]), + taskData: newCodeTaskData([wrapIntoJson({})]), }); expect(defaultTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [ @@ -246,7 +243,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.VAR1 }', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: false, isProcessAvailable: true, @@ -265,7 +262,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.VAR1 }', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: true, isProcessAvailable: true, @@ -282,7 +279,7 @@ describe('JsTaskRunner', () => { code: 'return Object.values($env).concat(Object.keys($env))', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: false, isProcessAvailable: true, @@ -301,7 +298,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.N8N_RUNNERS_N8N_URI }', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -316,7 +313,7 @@ describe('JsTaskRunner', () => { code: 'return { val: Buffer.from("test-buffer").toString() }', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -328,7 +325,7 @@ describe('JsTaskRunner', () => { code: 'return { val: Buffer.from("test-buffer").toString() }', nodeMode: 'runOnceForEachItem', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -774,7 +771,7 @@ describe('JsTaskRunner', () => { code: 'unknown', nodeMode, }), - taskData: newAllCodeTaskData([wrapIntoJson({ a: 1 })]), + taskData: newCodeTaskData([wrapIntoJson({ a: 1 })]), }), ).rejects.toThrow(ExecutionError); }, @@ -796,7 +793,7 @@ describe('JsTaskRunner', () => { jest.spyOn(runner, 'sendOffers').mockImplementation(() => {}); jest .spyOn(runner, 'requestData') - .mockResolvedValue(newAllCodeTaskData([wrapIntoJson({ a: 1 })])); + .mockResolvedValue(newCodeTaskData([wrapIntoJson({ a: 1 })])); await runner.receivedSettings(taskId, task.settings); diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts index b157094619..6de3e6d2b1 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts @@ -2,7 +2,7 @@ import type { IDataObject, INode, INodeExecutionData, ITaskData } from 'n8n-work import { NodeConnectionType } from 'n8n-workflow'; import { nanoid } from 'nanoid'; -import type { AllCodeTaskData, JSExecSettings } from '@/js-task-runner/js-task-runner'; +import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner'; import type { Task } from '@/task-runner'; /** @@ -48,10 +48,10 @@ export const newTaskData = (opts: Partial & Pick /** * Creates a new all code task data with the given options */ -export const newAllCodeTaskData = ( +export const newCodeTaskData = ( codeNodeInputData: INodeExecutionData[], - opts: Partial = {}, -): AllCodeTaskData => { + opts: Partial = {}, +): DataRequestResponse => { const codeNode = newNode({ name: 'JsCode', parameters: { diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser-state.test.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser-state.test.ts new file mode 100644 index 0000000000..0b75ae563e --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser-state.test.ts @@ -0,0 +1,117 @@ +import { BuiltInsParserState } from '../built-ins-parser-state'; + +describe('BuiltInsParserState', () => { + describe('toDataRequestSpecification', () => { + it('should return empty array when no properties are marked as needed', () => { + const state = new BuiltInsParserState(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: [], + env: false, + input: false, + prevNode: false, + }); + }); + + it('should return all nodes and input when markNeedsAllNodes is called', () => { + const state = new BuiltInsParserState(); + state.markNeedsAllNodes(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: 'all', + env: false, + input: true, + prevNode: false, + }); + }); + + it('should return specific node names when nodes are marked as needed individually', () => { + const state = new BuiltInsParserState(); + state.markNodeAsNeeded('Node1'); + state.markNodeAsNeeded('Node2'); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: ['Node1', 'Node2'], + env: false, + input: false, + prevNode: false, + }); + }); + + it('should ignore individual nodes when needsAllNodes is marked as true', () => { + const state = new BuiltInsParserState(); + state.markNodeAsNeeded('Node1'); + state.markNeedsAllNodes(); + state.markNodeAsNeeded('Node2'); // should be ignored since all nodes are needed + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: 'all', + env: false, + input: true, + prevNode: false, + }); + }); + + it('should mark env as needed when markEnvAsNeeded is called', () => { + const state = new BuiltInsParserState(); + state.markEnvAsNeeded(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: [], + env: true, + input: false, + prevNode: false, + }); + }); + + it('should mark input as needed when markInputAsNeeded is called', () => { + const state = new BuiltInsParserState(); + state.markInputAsNeeded(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: [], + env: false, + input: true, + prevNode: false, + }); + }); + + it('should mark prevNode as needed when markPrevNodeAsNeeded is called', () => { + const state = new BuiltInsParserState(); + state.markPrevNodeAsNeeded(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: [], + env: false, + input: false, + prevNode: true, + }); + }); + + it('should return correct specification when multiple properties are marked as needed', () => { + const state = new BuiltInsParserState(); + state.markNeedsAllNodes(); + state.markEnvAsNeeded(); + state.markInputAsNeeded(); + state.markPrevNodeAsNeeded(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }); + }); + + it('should return correct specification when all properties are marked as needed', () => { + const state = BuiltInsParserState.newNeedsAllDataState(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }); + }); + }); +}); diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts new file mode 100644 index 0000000000..399d9e6e2b --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts @@ -0,0 +1,251 @@ +import { getAdditionalKeys } from 'n8n-core'; +import type { IDataObject, INodeType, IWorkflowExecuteAdditionalData } from 'n8n-workflow'; +import { Workflow, WorkflowDataProxy } from 'n8n-workflow'; + +import { newCodeTaskData } from '../../__tests__/test-data'; +import { BuiltInsParser } from '../built-ins-parser'; +import { BuiltInsParserState } from '../built-ins-parser-state'; + +describe('BuiltInsParser', () => { + const parser = new BuiltInsParser(); + + const parseAndExpectOk = (code: string) => { + const result = parser.parseUsedBuiltIns(code); + if (!result.ok) { + fail(result.error); + } + + return result.result; + }; + + describe('Env, input, execution and prevNode', () => { + const cases: Array<[string, BuiltInsParserState]> = [ + ['$env', new BuiltInsParserState({ needs$env: true })], + ['$execution', new BuiltInsParserState({ needs$execution: true })], + ['$prevNode', new BuiltInsParserState({ needs$prevNode: true })], + ]; + + test.each(cases)("should identify built-ins in '%s'", (code, expected) => { + const state = parseAndExpectOk(code); + expect(state).toEqual(expected); + }); + }); + + describe('Input', () => { + it('should mark input as needed when $input is used', () => { + const state = parseAndExpectOk(` + $input.item.json.age = 10 + Math.floor(Math.random() * 30); + $input.item.json.password = $input.item.json.password.split('').map(() => '*').join("") + delete $input.item.json.lastname + const emailParts = $input.item.json.email.split("@") + $input.item.json.emailData = { + user: emailParts[0], + domain: emailParts[1] + } + + return $input.item; + `); + + expect(state).toEqual(new BuiltInsParserState({ needs$input: true })); + }); + + it('should mark input as needed when $json is used', () => { + const state = parseAndExpectOk(` + $json.age = 10 + Math.floor(Math.random() * 30); + return $json; + `); + + expect(state).toEqual(new BuiltInsParserState({ needs$input: true })); + }); + }); + + describe('$(...)', () => { + const cases: Array<[string, BuiltInsParserState]> = [ + [ + '$("nodeName").first()', + new BuiltInsParserState({ neededNodeNames: new Set(['nodeName']) }), + ], + [ + '$("nodeName").all(); $("secondNode").matchingItem()', + new BuiltInsParserState({ neededNodeNames: new Set(['nodeName', 'secondNode']) }), + ], + ]; + + test.each(cases)("should identify nodes in '%s'", (code, expected) => { + const state = parseAndExpectOk(code); + expect(state).toEqual(expected); + }); + + it('should need all nodes when $() is called with a variable', () => { + const state = parseAndExpectOk('var n = "name"; $(n)'); + expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true })); + }); + + it('should require all nodes when there are multiple usages of $() and one is with a variable', () => { + const state = parseAndExpectOk(` + $("nodeName"); + $("secondNode"); + var n = "name"; + $(n) + `); + expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true })); + }); + + test.each([ + ['without parameters', '$()'], + ['number literal', '$(123)'], + ])('should ignore when $ is called %s', (_, code) => { + const state = parseAndExpectOk(code); + expect(state).toEqual(new BuiltInsParserState()); + }); + + test.each([ + '$("node").item', + '$("node")["item"]', + '$("node").pairedItem()', + '$("node")["pairedItem"]()', + '$("node").itemMatching(0)', + '$("node")["itemMatching"](0)', + '$("node")[variable]', + 'var a = $("node")', + 'let a = $("node")', + 'const a = $("node")', + 'a = $("node")', + ])('should require all nodes if %s is used', (code) => { + const state = parseAndExpectOk(code); + expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true })); + }); + + test.each(['$("node").first()', '$("node").last()', '$("node").all()', '$("node").params'])( + 'should require only accessed node if %s is used', + (code) => { + const state = parseAndExpectOk(code); + expect(state).toEqual( + new BuiltInsParserState({ + needsAllNodes: false, + neededNodeNames: new Set(['node']), + }), + ); + }, + ); + }); + + describe('ECMAScript syntax', () => { + describe('ES2020', () => { + it('should parse optional chaining', () => { + parseAndExpectOk(` + const a = { b: { c: 1 } }; + return a.b?.c; + `); + }); + + it('should parse nullish coalescing', () => { + parseAndExpectOk(` + const a = null; + return a ?? 1; + `); + }); + }); + + describe('ES2021', () => { + it('should parse numeric separators', () => { + parseAndExpectOk(` + const a = 1_000_000; + return a; + `); + }); + }); + }); + + describe('WorkflowDataProxy built-ins', () => { + it('should have a known list of built-ins', () => { + const data = newCodeTaskData([]); + const dataProxy = new WorkflowDataProxy( + new Workflow({ + ...data.workflow, + nodeTypes: { + getByName() { + return undefined as unknown as INodeType; + }, + getByNameAndVersion() { + return undefined as unknown as INodeType; + }, + getKnownTypes() { + return undefined as unknown as IDataObject; + }, + }, + }), + data.runExecutionData, + data.runIndex, + 0, + data.activeNodeName, + data.connectionInputData, + data.siblingParameters, + data.mode, + getAdditionalKeys( + data.additionalData as IWorkflowExecuteAdditionalData, + data.mode, + data.runExecutionData, + ), + data.executeData, + data.defaultReturnRunIndex, + data.selfData, + data.contextNodeName, + // Make sure that even if we don't receive the envProviderState for + // whatever reason, we don't expose the task runner's env to the code + data.envProviderState ?? { + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, + }, + ).getDataProxy({ throwOnMissingExecutionData: false }); + + /** + * NOTE! If you are adding new built-ins to the WorkflowDataProxy class + * make sure the built-ins parser and Task Runner handle them properly. + */ + expect(Object.keys(dataProxy)).toStrictEqual([ + '$', + '$input', + '$binary', + '$data', + '$env', + '$evaluateExpression', + '$item', + '$fromAI', + '$fromai', + '$fromAi', + '$items', + '$json', + '$node', + '$self', + '$parameter', + '$prevNode', + '$runIndex', + '$mode', + '$workflow', + '$itemIndex', + '$now', + '$today', + '$jmesPath', + 'DateTime', + 'Interval', + 'Duration', + '$execution', + '$vars', + '$secrets', + '$executionId', + '$resumeWebhookUrl', + '$getPairedItem', + '$jmespath', + '$position', + '$thisItem', + '$thisItemIndex', + '$thisRunIndex', + '$nodeVersion', + '$nodeId', + '$webhookId', + ]); + }); + }); +}); diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/acorn-helpers.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/acorn-helpers.ts new file mode 100644 index 0000000000..ccab4c1527 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/acorn-helpers.ts @@ -0,0 +1,28 @@ +import type { + AssignmentExpression, + Identifier, + Literal, + MemberExpression, + Node, + VariableDeclarator, +} from 'acorn'; + +export function isLiteral(node?: Node): node is Literal { + return node?.type === 'Literal'; +} + +export function isIdentifier(node?: Node): node is Identifier { + return node?.type === 'Identifier'; +} + +export function isMemberExpression(node?: Node): node is MemberExpression { + return node?.type === 'MemberExpression'; +} + +export function isVariableDeclarator(node?: Node): node is VariableDeclarator { + return node?.type === 'VariableDeclarator'; +} + +export function isAssignmentExpression(node?: Node): node is AssignmentExpression { + return node?.type === 'AssignmentExpression'; +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser-state.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser-state.ts new file mode 100644 index 0000000000..112c97ccda --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser-state.ts @@ -0,0 +1,74 @@ +import type { N8nMessage } from '../../runner-types'; + +/** + * Class to keep track of which built-in variables are accessed in the code + */ +export class BuiltInsParserState { + neededNodeNames: Set = new Set(); + + needsAllNodes = false; + + needs$env = false; + + needs$input = false; + + needs$execution = false; + + needs$prevNode = false; + + constructor(opts: Partial = {}) { + Object.assign(this, opts); + } + + /** + * Marks that all nodes are needed, including input data + */ + markNeedsAllNodes() { + this.needsAllNodes = true; + this.needs$input = true; + this.neededNodeNames = new Set(); + } + + markNodeAsNeeded(nodeName: string) { + if (this.needsAllNodes) { + return; + } + + this.neededNodeNames.add(nodeName); + } + + markEnvAsNeeded() { + this.needs$env = true; + } + + markInputAsNeeded() { + this.needs$input = true; + } + + markExecutionAsNeeded() { + this.needs$execution = true; + } + + markPrevNodeAsNeeded() { + this.needs$prevNode = true; + } + + toDataRequestParams(): N8nMessage.ToRequester.TaskDataRequest['requestParams'] { + return { + dataOfNodes: this.needsAllNodes ? 'all' : Array.from(this.neededNodeNames), + env: this.needs$env, + input: this.needs$input, + prevNode: this.needs$prevNode, + }; + } + + static newNeedsAllDataState() { + const obj = new BuiltInsParserState(); + obj.markNeedsAllNodes(); + obj.markEnvAsNeeded(); + obj.markInputAsNeeded(); + obj.markExecutionAsNeeded(); + obj.markPrevNodeAsNeeded(); + return obj; + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser.ts new file mode 100644 index 0000000000..dd2d849c6a --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser.ts @@ -0,0 +1,142 @@ +import type { CallExpression, Identifier, Node, Program } from 'acorn'; +import { parse } from 'acorn'; +import { ancestor } from 'acorn-walk'; +import type { Result } from 'n8n-workflow'; +import { toResult } from 'n8n-workflow'; + +import { + isAssignmentExpression, + isIdentifier, + isLiteral, + isMemberExpression, + isVariableDeclarator, +} from './acorn-helpers'; +import { BuiltInsParserState } from './built-ins-parser-state'; + +/** + * Class for parsing Code Node code to identify which built-in variables + * are accessed + */ +export class BuiltInsParser { + /** + * Parses which built-in variables are accessed in the given code + */ + public parseUsedBuiltIns(code: string): Result { + return toResult(() => { + const wrappedCode = `async function VmCodeWrapper() { ${code} }`; + const ast = parse(wrappedCode, { ecmaVersion: 2025, sourceType: 'module' }); + + return this.identifyBuiltInsByWalkingAst(ast); + }); + } + + /** Traverse the AST of the script and mark any data needed for it to run. */ + private identifyBuiltInsByWalkingAst(ast: Program) { + const accessedBuiltIns = new BuiltInsParserState(); + + ancestor( + ast, + { + CallExpression: this.visitCallExpression, + Identifier: this.visitIdentifier, + }, + undefined, + accessedBuiltIns, + ); + + return accessedBuiltIns; + } + + private visitCallExpression = ( + node: CallExpression, + state: BuiltInsParserState, + ancestors: Node[], + ) => { + // $(...) + const isDollar = node.callee.type === 'Identifier' && node.callee.name === '$'; + if (!isDollar) return; + + // $(): This is not valid, ignore + if (node.arguments.length === 0) { + return; + } + + const firstArg = node.arguments[0]; + if (!isLiteral(firstArg)) { + // $(variable): Can't easily determine statically, mark all nodes as needed + state.markNeedsAllNodes(); + return; + } + + if (typeof firstArg.value !== 'string') { + // $(123): Static value, but not a string --> invalid code --> ignore + return; + } + + // $("node"): Static value, mark 'nodeName' as needed + state.markNodeAsNeeded(firstArg.value); + + // Determine how $("node") is used + this.handlePrevNodeCall(node, state, ancestors); + }; + + private handlePrevNodeCall(_node: CallExpression, state: BuiltInsParserState, ancestors: Node[]) { + // $("node").item, .pairedItem or .itemMatching: In a case like this, the execution + // engine will traverse back from current node (i.e. the Code Node) to + // the "node" node and use `pairedItem`s to find which item is linked + // to the current item. So, we need to mark all nodes as needed. + // TODO: We could also mark all the nodes between the current node and + // the "node" node as needed, but that would require more complex logic. + const directParent = ancestors[ancestors.length - 2]; + if (isMemberExpression(directParent)) { + const accessedProperty = directParent.property; + + if (directParent.computed) { + // $("node")["item"], ["pairedItem"] or ["itemMatching"] + if (isLiteral(accessedProperty)) { + if (this.isPairedItemProperty(accessedProperty.value)) { + state.markNeedsAllNodes(); + } + // Else: $("node")[123]: Static value, but not any of the ones above --> ignore + } + // $("node")[variable] + else if (isIdentifier(accessedProperty)) { + state.markNeedsAllNodes(); + } + } + // $("node").item, .pairedItem or .itemMatching + else if (isIdentifier(accessedProperty) && this.isPairedItemProperty(accessedProperty.name)) { + state.markNeedsAllNodes(); + } + } else if (isVariableDeclarator(directParent) || isAssignmentExpression(directParent)) { + // const variable = $("node") or variable = $("node"): + // In this case we would need to track down all the possible use sites + // of 'variable' and determine if `.item` is accessed on it. This is + // more complex and skipped for now. + // TODO: Optimize for this case + state.markNeedsAllNodes(); + } else { + // Something else than the cases above. Mark all nodes as needed as it + // could be a dynamic access. + state.markNeedsAllNodes(); + } + } + + private visitIdentifier = (node: Identifier, state: BuiltInsParserState) => { + if (node.name === '$env') { + state.markEnvAsNeeded(); + } else if (node.name === '$input' || node.name === '$json') { + state.markInputAsNeeded(); + } else if (node.name === '$execution') { + state.markExecutionAsNeeded(); + } else if (node.name === '$prevNode') { + state.markPrevNodeAsNeeded(); + } + }; + + private isPairedItemProperty( + property?: string | boolean | null | number | RegExp | bigint, + ): boolean { + return property === 'item' || property === 'pairedItem' || property === 'itemMatching'; + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts index 40ee12af2c..e0bebe0521 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts @@ -24,6 +24,8 @@ import { runInNewContext, type Context } from 'node:vm'; import type { TaskResultData } from '@/runner-types'; import { type Task, TaskRunner } from '@/task-runner'; +import { BuiltInsParser } from './built-ins-parser/built-ins-parser'; +import { BuiltInsParserState } from './built-ins-parser/built-ins-parser-state'; import { isErrorLike } from './errors/error-like'; import { ExecutionError } from './errors/execution-error'; import { makeSerializable } from './errors/serializable-error'; @@ -57,7 +59,7 @@ export interface PartialAdditionalData { variables: IDataObject; } -export interface AllCodeTaskData { +export interface DataRequestResponse { workflow: Omit; inputData: ITaskDataConnections; node: INode; @@ -84,6 +86,8 @@ type CustomConsole = { export class JsTaskRunner extends TaskRunner { private readonly requireResolver: RequireResolver; + private readonly builtInsParser = new BuiltInsParser(); + constructor(config: MainConfig, name = 'JS Task Runner') { super({ taskType: 'javascript', @@ -102,12 +106,20 @@ export class JsTaskRunner extends TaskRunner { } async executeTask(task: Task): Promise { - const allData = await this.requestData(task.taskId, 'all'); - const settings = task.settings; a.ok(settings, 'JS Code not sent to runner'); - const workflowParams = allData.workflow; + const neededBuiltInsResult = this.builtInsParser.parseUsedBuiltIns(settings.code); + const neededBuiltIns = neededBuiltInsResult.ok + ? neededBuiltInsResult.result + : BuiltInsParserState.newNeedsAllDataState(); + + const data = await this.requestData( + task.taskId, + neededBuiltIns.toDataRequestParams(), + ); + + const workflowParams = data.workflow; const workflow = new Workflow({ ...workflowParams, nodeTypes: this.nodeTypes, @@ -126,12 +138,12 @@ export class JsTaskRunner extends TaskRunner { const result = settings.nodeMode === 'runOnceForAllItems' - ? await this.runForAllItems(task.taskId, settings, allData, workflow, customConsole) - : await this.runForEachItem(task.taskId, settings, allData, workflow, customConsole); + ? await this.runForAllItems(task.taskId, settings, data, workflow, customConsole) + : await this.runForEachItem(task.taskId, settings, data, workflow, customConsole); return { result, - customData: allData.runExecutionData.resultData.metadata, + customData: data.runExecutionData.resultData.metadata, }; } @@ -165,12 +177,12 @@ export class JsTaskRunner extends TaskRunner { private async runForAllItems( taskId: string, settings: JSExecSettings, - allData: AllCodeTaskData, + data: DataRequestResponse, workflow: Workflow, customConsole: CustomConsole, ): Promise { - const dataProxy = this.createDataProxy(allData, workflow, allData.itemIndex); - const inputItems = allData.connectionInputData; + const dataProxy = this.createDataProxy(data, workflow, data.itemIndex); + const inputItems = data.connectionInputData; const context: Context = { require: this.requireResolver, @@ -212,16 +224,16 @@ export class JsTaskRunner extends TaskRunner { private async runForEachItem( taskId: string, settings: JSExecSettings, - allData: AllCodeTaskData, + data: DataRequestResponse, workflow: Workflow, customConsole: CustomConsole, ): Promise { - const inputItems = allData.connectionInputData; + const inputItems = data.connectionInputData; const returnData: INodeExecutionData[] = []; for (let index = 0; index < inputItems.length; index++) { const item = inputItems[index]; - const dataProxy = this.createDataProxy(allData, workflow, index); + const dataProxy = this.createDataProxy(data, workflow, index); const context: Context = { require: this.requireResolver, module: {}, @@ -279,33 +291,37 @@ export class JsTaskRunner extends TaskRunner { return returnData; } - private createDataProxy(allData: AllCodeTaskData, workflow: Workflow, itemIndex: number) { + private createDataProxy(data: DataRequestResponse, workflow: Workflow, itemIndex: number) { return new WorkflowDataProxy( workflow, - allData.runExecutionData, - allData.runIndex, + data.runExecutionData, + data.runIndex, itemIndex, - allData.activeNodeName, - allData.connectionInputData, - allData.siblingParameters, - allData.mode, + data.activeNodeName, + data.connectionInputData, + data.siblingParameters, + data.mode, getAdditionalKeys( - allData.additionalData as IWorkflowExecuteAdditionalData, - allData.mode, - allData.runExecutionData, + data.additionalData as IWorkflowExecuteAdditionalData, + data.mode, + data.runExecutionData, ), - allData.executeData, - allData.defaultReturnRunIndex, - allData.selfData, - allData.contextNodeName, + data.executeData, + data.defaultReturnRunIndex, + data.selfData, + data.contextNodeName, // Make sure that even if we don't receive the envProviderState for // whatever reason, we don't expose the task runner's env to the code - allData.envProviderState ?? { + data.envProviderState ?? { env: {}, isEnvAccessBlocked: false, isProcessAvailable: true, }, - ).getDataProxy(); + // Because we optimize the needed data, it can be partially available. + // We assign the available built-ins to the execution context, which + // means we run the getter for '$json', and by default $json throws + // if there is no data available. + ).getDataProxy({ throwOnMissingExecutionData: false }); } private toExecutionErrorIfNeeded(error: unknown): Error { diff --git a/packages/@n8n/task-runner/src/runner-types.ts b/packages/@n8n/task-runner/src/runner-types.ts index 1e84843653..898279feac 100644 --- a/packages/@n8n/task-runner/src/runner-types.ts +++ b/packages/@n8n/task-runner/src/runner-types.ts @@ -1,6 +1,11 @@ import type { INodeExecutionData, INodeTypeBaseDescription } from 'n8n-workflow'; -export type DataRequestType = 'input' | 'node' | 'all'; +export interface TaskDataRequestParams { + dataOfNodes: string[] | 'all'; + prevNode: boolean; + input: boolean; + env: boolean; +} export interface TaskResultData { result: INodeExecutionData[]; @@ -89,8 +94,7 @@ export namespace N8nMessage { type: 'broker:taskdatarequest'; taskId: string; requestId: string; - requestType: DataRequestType; - param?: string; + requestParams: TaskDataRequestParams; } export interface RPC { @@ -186,8 +190,7 @@ export namespace RunnerMessage { type: 'runner:taskdatarequest'; taskId: string; requestId: string; - requestType: DataRequestType; - param?: string; + requestParams: TaskDataRequestParams; } export interface RPC { diff --git a/packages/@n8n/task-runner/src/task-runner.ts b/packages/@n8n/task-runner/src/task-runner.ts index 9629cc15d5..b292bd4413 100644 --- a/packages/@n8n/task-runner/src/task-runner.ts +++ b/packages/@n8n/task-runner/src/task-runner.ts @@ -288,8 +288,7 @@ export abstract class TaskRunner { async requestData( taskId: Task['taskId'], - type: RunnerMessage.ToN8n.TaskDataRequest['requestType'], - param?: string, + requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'], ): Promise { const requestId = nanoid(); @@ -305,8 +304,7 @@ export abstract class TaskRunner { type: 'runner:taskdatarequest', taskId, requestId, - requestType: type, - param, + requestParams, }); try { diff --git a/packages/cli/src/__tests__/workflow-runner.test.ts b/packages/cli/src/__tests__/workflow-runner.test.ts index f828880c74..4774746ba7 100644 --- a/packages/cli/src/__tests__/workflow-runner.test.ts +++ b/packages/cli/src/__tests__/workflow-runner.test.ts @@ -4,6 +4,7 @@ import Container from 'typedi'; import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; import type { User } from '@/databases/entities/user'; +import { ExecutionNotFoundError } from '@/errors/execution-not-found-error'; import { Telemetry } from '@/telemetry'; import { WorkflowRunner } from '@/workflow-runner'; import { mockInstance } from '@test/mocking'; @@ -64,6 +65,19 @@ test('processError should return early in Bull stalled edge case', async () => { expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0); }); +test('processError should return early if the error is `ExecutionNotFoundError`', async () => { + const workflow = await createWorkflow({}, owner); + const execution = await createExecution({ status: 'success', finished: true }, workflow); + await runner.processError( + new ExecutionNotFoundError(execution.id), + new Date(), + 'webhook', + execution.id, + new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow), + ); + expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0); +}); + test('processError should process error', async () => { const workflow = await createWorkflow({}, owner); const execution = await createExecution( diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index 189c446b65..22cc0f5700 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -1,6 +1,12 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ -import { ActiveWorkflows, InstanceSettings, NodeExecuteFunctions } from 'n8n-core'; +import { + ActiveWorkflows, + InstanceSettings, + NodeExecuteFunctions, + PollContext, + TriggerContext, +} from 'n8n-core'; import type { ExecutionError, IDeferredPromise, @@ -274,18 +280,11 @@ export class ActiveWorkflowManager { activation: WorkflowActivateMode, ): IGetExecutePollFunctions { return (workflow: Workflow, node: INode) => { - const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions( - workflow, - node, - additionalData, - mode, - activation, - ); - returnFunctions.__emit = ( + const __emit = ( data: INodeExecutionData[][], responsePromise?: IDeferredPromise, donePromise?: IDeferredPromise, - ): void => { + ) => { this.logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`); void this.workflowStaticDataService.saveStaticData(workflow); const executePromise = this.workflowExecutionService.runWorkflow( @@ -309,14 +308,15 @@ export class ActiveWorkflowManager { } }; - returnFunctions.__emitError = (error: ExecutionError): void => { + const __emitError = (error: ExecutionError) => { void this.executionService .createErrorExecution(error, node, workflowData, workflow, mode) .then(() => { this.executeErrorWorkflow(error, workflowData, mode); }); }; - return returnFunctions; + + return new PollContext(workflow, node, additionalData, mode, activation, __emit, __emitError); }; } @@ -331,18 +331,11 @@ export class ActiveWorkflowManager { activation: WorkflowActivateMode, ): IGetExecuteTriggerFunctions { return (workflow: Workflow, node: INode) => { - const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions( - workflow, - node, - additionalData, - mode, - activation, - ); - returnFunctions.emit = ( + const emit = ( data: INodeExecutionData[][], responsePromise?: IDeferredPromise, donePromise?: IDeferredPromise, - ): void => { + ) => { this.logger.debug(`Received trigger for workflow "${workflow.name}"`); void this.workflowStaticDataService.saveStaticData(workflow); @@ -366,7 +359,7 @@ export class ActiveWorkflowManager { executePromise.catch((error: Error) => this.logger.error(error.message, { error })); } }; - returnFunctions.emitError = (error: Error): void => { + const emitError = (error: Error): void => { this.logger.info( `The trigger node "${node.name}" of workflow "${workflowData.name}" failed with the error: "${error.message}". Will try to reactivate.`, { @@ -391,7 +384,7 @@ export class ActiveWorkflowManager { this.addQueuedWorkflowActivation(activation, workflowData as WorkflowEntity); }; - return returnFunctions; + return new TriggerContext(workflow, node, additionalData, mode, activation, emit, emitError); }; } diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index 70f52f8cb8..041b6a8741 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -22,8 +22,6 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus' import { EventService } from '@/events/event.service'; import { ExecutionService } from '@/executions/execution.service'; import { License } from '@/license'; -import { LocalTaskManager } from '@/runners/task-managers/local-task-manager'; -import { TaskManager } from '@/runners/task-managers/task-manager'; import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { Server } from '@/server'; @@ -224,19 +222,9 @@ export class Start extends BaseCommand { const { taskRunners: taskRunnerConfig } = this.globalConfig; if (!taskRunnerConfig.disabled) { - Container.set(TaskManager, new LocalTaskManager()); - const { TaskRunnerServer } = await import('@/runners/task-runner-server'); - const taskRunnerServer = Container.get(TaskRunnerServer); - await taskRunnerServer.start(); - - if ( - taskRunnerConfig.mode === 'internal_childprocess' || - taskRunnerConfig.mode === 'internal_launcher' - ) { - const { TaskRunnerProcess } = await import('@/runners/task-runner-process'); - const runnerProcess = Container.get(TaskRunnerProcess); - await runnerProcess.start(); - } + const { TaskRunnerModule } = await import('@/runners/task-runner-module'); + const taskRunnerModule = Container.get(TaskRunnerModule); + await taskRunnerModule.start(); } } diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index 4ba505a9b8..730c6f6e80 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -8,8 +8,6 @@ import { EventMessageGeneric } from '@/eventbus/event-message-classes/event-mess import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import { Logger } from '@/logging/logger.service'; -import { LocalTaskManager } from '@/runners/task-managers/local-task-manager'; -import { TaskManager } from '@/runners/task-managers/task-manager'; import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import type { ScalingService } from '@/scaling/scaling.service'; @@ -116,19 +114,9 @@ export class Worker extends BaseCommand { const { taskRunners: taskRunnerConfig } = this.globalConfig; if (!taskRunnerConfig.disabled) { - Container.set(TaskManager, new LocalTaskManager()); - const { TaskRunnerServer } = await import('@/runners/task-runner-server'); - const taskRunnerServer = Container.get(TaskRunnerServer); - await taskRunnerServer.start(); - - if ( - taskRunnerConfig.mode === 'internal_childprocess' || - taskRunnerConfig.mode === 'internal_launcher' - ) { - const { TaskRunnerProcess } = await import('@/runners/task-runner-process'); - const runnerProcess = Container.get(TaskRunnerProcess); - await runnerProcess.start(); - } + const { TaskRunnerModule } = await import('@/runners/task-runner-module'); + const taskRunnerModule = Container.get(TaskRunnerModule); + await taskRunnerModule.start(); } } diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index 8bece9199a..9f8bc45232 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -98,54 +98,6 @@ export const schema = { env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS', }, - // To not exceed the database's capacity and keep its size moderate - // the execution data gets pruned regularly (default: 15 minute interval). - // All saved execution data older than the max age will be deleted. - // Pruning is currently not activated by default, which will change in - // a future version. - pruneData: { - doc: 'Delete data of past executions on a rolling basis', - format: Boolean, - default: true, - env: 'EXECUTIONS_DATA_PRUNE', - }, - pruneDataMaxAge: { - doc: 'How old (hours) the finished execution data has to be to get soft-deleted', - format: Number, - default: 336, - env: 'EXECUTIONS_DATA_MAX_AGE', - }, - pruneDataHardDeleteBuffer: { - doc: 'How old (hours) the finished execution data has to be to get hard-deleted. By default, this buffer excludes recent executions as the user may need them while building a workflow.', - format: Number, - default: 1, - env: 'EXECUTIONS_DATA_HARD_DELETE_BUFFER', - }, - pruneDataIntervals: { - hardDelete: { - doc: 'How often (minutes) execution data should be hard-deleted', - format: Number, - default: 15, - env: 'EXECUTIONS_DATA_PRUNE_HARD_DELETE_INTERVAL', - }, - softDelete: { - doc: 'How often (minutes) execution data should be soft-deleted', - format: Number, - default: 60, - env: 'EXECUTIONS_DATA_PRUNE_SOFT_DELETE_INTERVAL', - }, - }, - - // Additional pruning option to delete executions if total count exceeds the configured max. - // Deletes the oldest entries first - // Set to 0 for No limit - pruneDataMaxCount: { - doc: "Maximum number of finished executions to keep in DB. Doesn't necessarily prune exactly to max number. 0 = no limit", - format: Number, - default: 10000, - env: 'EXECUTIONS_DATA_PRUNE_MAX_COUNT', - }, - queueRecovery: { interval: { doc: 'How often (minutes) to check for queue recovery', diff --git a/packages/cli/src/databases/repositories/execution.repository.ts b/packages/cli/src/databases/repositories/execution.repository.ts index ce4bedac2a..39f5e92cad 100644 --- a/packages/cli/src/databases/repositories/execution.repository.ts +++ b/packages/cli/src/databases/repositories/execution.repository.ts @@ -35,7 +35,6 @@ import type { } from 'n8n-workflow'; import { Service } from 'typedi'; -import config from '@/config'; import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; import { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping.ee'; import { ExecutionAnnotation } from '@/databases/entities/execution-annotation.ee'; @@ -460,8 +459,7 @@ export class ExecutionRepository extends Repository { } async softDeletePrunableExecutions() { - const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h - const maxCount = config.getEnv('executions.pruneDataMaxCount'); + const { maxAge, maxCount } = this.globalConfig.pruning; // Sub-query to exclude executions having annotations const annotatedExecutionsSubQuery = this.manager @@ -517,7 +515,7 @@ export class ExecutionRepository extends Repository { async hardDeleteSoftDeletedExecutions() { const date = new Date(); - date.setHours(date.getHours() - config.getEnv('executions.pruneDataHardDeleteBuffer')); + date.setHours(date.getHours() - this.globalConfig.pruning.hardDeleteBuffer); const workflowIdsAndExecutionIds = ( await this.find({ diff --git a/packages/cli/src/events/relays/telemetry.event-relay.ts b/packages/cli/src/events/relays/telemetry.event-relay.ts index fc5cf0a53d..88f954ab93 100644 --- a/packages/cli/src/events/relays/telemetry.event-relay.ts +++ b/packages/cli/src/events/relays/telemetry.event-relay.ts @@ -771,8 +771,8 @@ export class TelemetryEventRelay extends EventRelay { executions_data_save_manual_executions: config.getEnv( 'executions.saveDataManualExecutions', ), - executions_data_prune: config.getEnv('executions.pruneData'), - executions_data_max_age: config.getEnv('executions.pruneDataMaxAge'), + executions_data_prune: this.globalConfig.pruning.isEnabled, + executions_data_max_age: this.globalConfig.pruning.maxAge, }, n8n_deployment_type: config.getEnv('deployment.type'), n8n_binary_data_mode: binaryDataConfig.mode, diff --git a/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts b/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts index c8e6b3e88f..b0db5becac 100644 --- a/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts +++ b/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts @@ -1,4 +1,5 @@ import { + deepCopy, ErrorReporterProxy, type IRunExecutionData, type ITaskData, @@ -57,7 +58,7 @@ test('should ignore on leftover async call', async () => { expect(executionRepository.updateExistingExecution).not.toHaveBeenCalled(); }); -test('should update execution', async () => { +test('should update execution when saving progress is enabled', async () => { jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({ ...commonSettings, progress: true, @@ -86,6 +87,37 @@ test('should update execution', async () => { expect(reporterSpy).not.toHaveBeenCalled(); }); +test('should update execution when saving progress is disabled, but waitTill is defined', async () => { + jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({ + ...commonSettings, + progress: false, + }); + + const reporterSpy = jest.spyOn(ErrorReporterProxy, 'error'); + + executionRepository.findSingleExecution.mockResolvedValue({} as IExecutionResponse); + + const args = deepCopy(commonArgs); + args[4].waitTill = new Date(); + await saveExecutionProgress(...args); + + expect(executionRepository.updateExistingExecution).toHaveBeenCalledWith('some-execution-id', { + data: { + executionData: undefined, + resultData: { + lastNodeExecuted: 'My Node', + runData: { + 'My Node': [{}], + }, + }, + startData: {}, + }, + status: 'running', + }); + + expect(reporterSpy).not.toHaveBeenCalled(); +}); + test('should report error on failure', async () => { jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({ ...commonSettings, diff --git a/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts b/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts index ca9899e1ec..6cd1cfd08f 100644 --- a/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts +++ b/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts @@ -16,7 +16,7 @@ export async function saveExecutionProgress( ) { const saveSettings = toSaveSettings(workflowData.settings); - if (!saveSettings.progress) return; + if (!saveSettings.progress && !executionData.waitTill) return; const logger = Container.get(Logger); diff --git a/packages/cli/src/runners/__tests__/task-broker.test.ts b/packages/cli/src/runners/__tests__/task-broker.test.ts index a90bf7662c..8787ba5955 100644 --- a/packages/cli/src/runners/__tests__/task-broker.test.ts +++ b/packages/cli/src/runners/__tests__/task-broker.test.ts @@ -494,15 +494,18 @@ describe('TaskBroker', () => { const taskId = 'task1'; const requesterId = 'requester1'; const requestId = 'request1'; - const requestType = 'input'; - const param = 'test_param'; + const requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'] = { + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }; const message: RunnerMessage.ToN8n.TaskDataRequest = { type: 'runner:taskdatarequest', taskId, requestId, - requestType, - param, + requestParams, }; const requesterMessageCallback = jest.fn(); @@ -519,8 +522,7 @@ describe('TaskBroker', () => { type: 'broker:taskdatarequest', taskId, requestId, - requestType, - param, + requestParams, }); }); diff --git a/packages/cli/src/runners/default-task-runner-disconnect-analyzer.ts b/packages/cli/src/runners/default-task-runner-disconnect-analyzer.ts new file mode 100644 index 0000000000..e101c65e28 --- /dev/null +++ b/packages/cli/src/runners/default-task-runner-disconnect-analyzer.ts @@ -0,0 +1,16 @@ +import { Service } from 'typedi'; + +import { TaskRunnerDisconnectedError } from './errors/task-runner-disconnected-error'; +import type { DisconnectAnalyzer } from './runner-types'; +import type { TaskRunner } from './task-broker.service'; + +/** + * Analyzes the disconnect reason of a task runner to provide a more + * meaningful error message to the user. + */ +@Service() +export class DefaultTaskRunnerDisconnectAnalyzer implements DisconnectAnalyzer { + async determineDisconnectReason(runnerId: TaskRunner['id']): Promise { + return new TaskRunnerDisconnectedError(runnerId); + } +} diff --git a/packages/cli/src/runners/task-runner-disconnect-analyzer.ts b/packages/cli/src/runners/internal-task-runner-disconnect-analyzer.ts similarity index 88% rename from packages/cli/src/runners/task-runner-disconnect-analyzer.ts rename to packages/cli/src/runners/internal-task-runner-disconnect-analyzer.ts index d75a1b9aad..e3b9520f77 100644 --- a/packages/cli/src/runners/task-runner-disconnect-analyzer.ts +++ b/packages/cli/src/runners/internal-task-runner-disconnect-analyzer.ts @@ -3,7 +3,7 @@ import { Service } from 'typedi'; import config from '@/config'; -import { TaskRunnerDisconnectedError } from './errors/task-runner-disconnected-error'; +import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer'; import { TaskRunnerOomError } from './errors/task-runner-oom-error'; import { SlidingWindowSignal } from './sliding-window-signal'; import type { TaskRunner } from './task-broker.service'; @@ -15,13 +15,19 @@ import { TaskRunnerProcess } from './task-runner-process'; * meaningful error message to the user. */ @Service() -export class TaskRunnerDisconnectAnalyzer { +export class InternalTaskRunnerDisconnectAnalyzer extends DefaultTaskRunnerDisconnectAnalyzer { + private get isCloudDeployment() { + return config.get('deployment.type') === 'cloud'; + } + private readonly exitReasonSignal: SlidingWindowSignal; constructor( private readonly runnerConfig: TaskRunnersConfig, private readonly taskRunnerProcess: TaskRunnerProcess, ) { + super(); + // When the task runner process is running as a child process, there's // no determinate time when it exits compared to when the runner disconnects // (i.e. it's a race condition). Hence we use a sliding window to determine @@ -32,17 +38,13 @@ export class TaskRunnerDisconnectAnalyzer { }); } - private get isCloudDeployment() { - return config.get('deployment.type') === 'cloud'; - } - async determineDisconnectReason(runnerId: TaskRunner['id']): Promise { const exitCode = await this.awaitExitSignal(); if (exitCode === 'oom') { return new TaskRunnerOomError(runnerId, this.isCloudDeployment); } - return new TaskRunnerDisconnectedError(runnerId); + return await super.determineDisconnectReason(runnerId); } private async awaitExitSignal(): Promise { diff --git a/packages/cli/src/runners/runner-types.ts b/packages/cli/src/runners/runner-types.ts index 8b205de801..c5d4eb81c3 100644 --- a/packages/cli/src/runners/runner-types.ts +++ b/packages/cli/src/runners/runner-types.ts @@ -5,6 +5,22 @@ import type WebSocket from 'ws'; import type { TaskRunner } from './task-broker.service'; import type { AuthlessRequest } from '../requests'; +/** + * Specifies what data should be included for a task data request. + */ +export interface TaskDataRequestParams { + dataOfNodes: string[] | 'all'; + prevNode: boolean; + /** Whether input data for the node should be included */ + input: boolean; + /** Whether env provider's state should be included */ + env: boolean; +} + +export interface DisconnectAnalyzer { + determineDisconnectReason(runnerId: TaskRunner['id']): Promise; +} + export type DataRequestType = 'input' | 'node' | 'all'; export interface TaskResultData { @@ -101,8 +117,7 @@ export namespace N8nMessage { type: 'broker:taskdatarequest'; taskId: string; requestId: string; - requestType: DataRequestType; - param?: string; + requestParams: TaskDataRequestParams; } export interface RPC { @@ -198,8 +213,7 @@ export namespace RunnerMessage { type: 'runner:taskdatarequest'; taskId: string; requestId: string; - requestType: DataRequestType; - param?: string; + requestParams: TaskDataRequestParams; } export interface RPC { diff --git a/packages/cli/src/runners/runner-ws-server.ts b/packages/cli/src/runners/runner-ws-server.ts index 59bb92ff76..5d4c4e9607 100644 --- a/packages/cli/src/runners/runner-ws-server.ts +++ b/packages/cli/src/runners/runner-ws-server.ts @@ -3,29 +3,38 @@ import type WebSocket from 'ws'; import { Logger } from '@/logging/logger.service'; +import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer'; import type { RunnerMessage, N8nMessage, TaskRunnerServerInitRequest, TaskRunnerServerInitResponse, + DisconnectAnalyzer, } from './runner-types'; import { TaskBroker, type MessageCallback, type TaskRunner } from './task-broker.service'; -import { TaskRunnerDisconnectAnalyzer } from './task-runner-disconnect-analyzer'; function heartbeat(this: WebSocket) { this.isAlive = true; } @Service() -export class TaskRunnerService { +export class TaskRunnerWsServer { runnerConnections: Map = new Map(); constructor( private readonly logger: Logger, private readonly taskBroker: TaskBroker, - private readonly disconnectAnalyzer: TaskRunnerDisconnectAnalyzer, + private disconnectAnalyzer: DefaultTaskRunnerDisconnectAnalyzer, ) {} + setDisconnectAnalyzer(disconnectAnalyzer: DisconnectAnalyzer) { + this.disconnectAnalyzer = disconnectAnalyzer; + } + + getDisconnectAnalyzer() { + return this.disconnectAnalyzer; + } + sendMessage(id: TaskRunner['id'], message: N8nMessage.ToRunner.All) { this.runnerConnections.get(id)?.send(JSON.stringify(message)); } diff --git a/packages/cli/src/runners/task-broker.service.ts b/packages/cli/src/runners/task-broker.service.ts index d88d677725..754d99ef32 100644 --- a/packages/cli/src/runners/task-broker.service.ts +++ b/packages/cli/src/runners/task-broker.service.ts @@ -178,12 +178,7 @@ export class TaskBroker { await this.taskErrorHandler(message.taskId, message.error); break; case 'runner:taskdatarequest': - await this.handleDataRequest( - message.taskId, - message.requestId, - message.requestType, - message.param, - ); + await this.handleDataRequest(message.taskId, message.requestId, message.requestParams); break; case 'runner:rpc': @@ -233,8 +228,7 @@ export class TaskBroker { async handleDataRequest( taskId: Task['id'], requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'], - requestType: RunnerMessage.ToN8n.TaskDataRequest['requestType'], - param?: string, + requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'], ) { const task = this.tasks.get(taskId); if (!task) { @@ -244,8 +238,7 @@ export class TaskBroker { type: 'broker:taskdatarequest', taskId, requestId, - requestType, - param, + requestParams, }); } diff --git a/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts b/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts new file mode 100644 index 0000000000..8fc0198488 --- /dev/null +++ b/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts @@ -0,0 +1,324 @@ +import { mock } from 'jest-mock-extended'; +import type { IExecuteFunctions, IWorkflowExecuteAdditionalData } from 'n8n-workflow'; +import { type INode, type INodeExecutionData, type Workflow } from 'n8n-workflow'; + +import { DataRequestResponseBuilder } from '../data-request-response-builder'; +import type { TaskData } from '../task-manager'; + +const triggerNode: INode = mock({ + name: 'Trigger', +}); +const debugHelperNode: INode = mock({ + name: 'DebugHelper', +}); +const codeNode: INode = mock({ + name: 'Code', +}); +const workflow: TaskData['workflow'] = mock(); +const debugHelperNodeOutItems: INodeExecutionData[] = [ + { + json: { + uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032', + email: 'Dan.Schmidt31@yahoo.com', + firstname: 'Toni', + lastname: 'Schuster', + password: 'Q!D6C2', + }, + pairedItem: { + item: 0, + }, + }, +]; +const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems; +const connectionInputData: TaskData['connectionInputData'] = codeNodeInputItems; +const envProviderState: TaskData['envProviderState'] = mock({ + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, +}); +const additionalData = mock({ + formWaitingBaseUrl: 'http://localhost:5678/form-waiting', + instanceBaseUrl: 'http://localhost:5678/', + restApiUrl: 'http://localhost:5678/rest', + variables: {}, + webhookBaseUrl: 'http://localhost:5678/webhook', + webhookTestBaseUrl: 'http://localhost:5678/webhook-test', + webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting', + executionId: '45844', + userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5', + currentNodeParameters: undefined, + executionTimeoutTimestamp: undefined, + restartExecutionId: undefined, +}); +const executeFunctions = mock(); + +/** + * Drawn with https://asciiflow.com/#/ + * Task data for an execution of the following WF: + * where ►► denotes the currently being executing node. + * ►► + * ┌───────────┐ ┌─────────────┐ ┌────────┐ + * │ Trigger ├──►│ DebugHelper ├───►│ Code │ + * └───────────┘ └─────────────┘ └────────┘ + */ +const taskData: TaskData = { + executeFunctions, + workflow, + connectionInputData, + inputData: { + main: [codeNodeInputItems], + }, + itemIndex: 0, + activeNodeName: codeNode.name, + contextNodeName: codeNode.name, + defaultReturnRunIndex: -1, + mode: 'manual', + envProviderState, + node: codeNode, + runExecutionData: { + startData: { + destinationNode: codeNode.name, + runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name], + }, + resultData: { + runData: { + [triggerNode.name]: [ + { + hints: [], + startTime: 1730313407328, + executionTime: 1, + source: [], + executionStatus: 'success', + data: { + main: [[]], + }, + }, + ], + [debugHelperNode.name]: [ + { + hints: [], + startTime: 1730313407330, + executionTime: 1, + source: [ + { + previousNode: triggerNode.name, + }, + ], + executionStatus: 'success', + data: { + main: [debugHelperNodeOutItems], + }, + }, + ], + }, + pinData: {}, + }, + executionData: { + contextData: {}, + nodeExecutionStack: [], + metadata: {}, + waitingExecution: { + [codeNode.name]: { + '0': { + main: [codeNodeInputItems], + }, + }, + }, + waitingExecutionSource: { + [codeNode.name]: { + '0': { + main: [ + { + previousNode: debugHelperNode.name, + }, + ], + }, + }, + }, + }, + }, + runIndex: 0, + selfData: {}, + siblingParameters: {}, + executeData: { + node: codeNode, + data: { + main: [codeNodeInputItems], + }, + source: { + main: [ + { + previousNode: debugHelperNode.name, + previousNodeOutput: 0, + }, + ], + }, + }, + additionalData, +} as const; + +describe('DataRequestResponseBuilder', () => { + const allDataParam: DataRequestResponseBuilder['requestParams'] = { + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }; + + const newRequestParam = (opts: Partial) => ({ + ...allDataParam, + ...opts, + }); + + describe('all data', () => { + it('should build the runExecutionData as is when everything is requested', () => { + const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam); + + const { runExecutionData } = dataRequestResponseBuilder.build(); + + expect(runExecutionData).toStrictEqual(taskData.runExecutionData); + }); + }); + + describe('envProviderState', () => { + it("should filter out envProviderState when it's not requested", () => { + const dataRequestResponseBuilder = new DataRequestResponseBuilder( + taskData, + newRequestParam({ + env: false, + }), + ); + + const result = dataRequestResponseBuilder.build(); + + expect(result.envProviderState).toStrictEqual({ + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, + }); + }); + }); + + describe('additionalData', () => { + it('picks only specific properties for additional data', () => { + const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam); + + const result = dataRequestResponseBuilder.build(); + + expect(result.additionalData).toStrictEqual({ + formWaitingBaseUrl: 'http://localhost:5678/form-waiting', + instanceBaseUrl: 'http://localhost:5678/', + restApiUrl: 'http://localhost:5678/rest', + webhookBaseUrl: 'http://localhost:5678/webhook', + webhookTestBaseUrl: 'http://localhost:5678/webhook-test', + webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting', + executionId: '45844', + userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5', + currentNodeParameters: undefined, + executionTimeoutTimestamp: undefined, + restartExecutionId: undefined, + variables: additionalData.variables, + }); + }); + }); + + describe('input data', () => { + const allExceptInputParam = newRequestParam({ + input: false, + }); + + it('drops input data from executeData', () => { + const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); + + expect(result.executeData).toStrictEqual({ + node: taskData.executeData!.node, + source: taskData.executeData!.source, + data: {}, + }); + }); + + it('drops input data from result', () => { + const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); + + expect(result.inputData).toStrictEqual({}); + }); + + it('drops input data from result', () => { + const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); + + expect(result.inputData).toStrictEqual({}); + }); + + it('drops input data from connectionInputData', () => { + const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); + + expect(result.connectionInputData).toStrictEqual([]); + }); + }); + + describe('nodes', () => { + it('should return empty run data when only Code node is requested', () => { + const result = new DataRequestResponseBuilder( + taskData, + newRequestParam({ dataOfNodes: ['Code'], prevNode: false }), + ).build(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({}); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it('should return empty run data when only Code node is requested', () => { + const result = new DataRequestResponseBuilder( + taskData, + newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }), + ).build(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({}); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it("should return only DebugHelper's data when only DebugHelper node is requested", () => { + const result = new DataRequestResponseBuilder( + taskData, + newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }), + ).build(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({ + [debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name], + }); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it("should return DebugHelper's data when only prevNode node is requested", () => { + const result = new DataRequestResponseBuilder( + taskData, + newRequestParam({ dataOfNodes: [], prevNode: true }), + ).build(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({ + [debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name], + }); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + }); +}); diff --git a/packages/cli/src/runners/task-managers/data-request-response-builder.ts b/packages/cli/src/runners/task-managers/data-request-response-builder.ts new file mode 100644 index 0000000000..6f49743aeb --- /dev/null +++ b/packages/cli/src/runners/task-managers/data-request-response-builder.ts @@ -0,0 +1,205 @@ +import type { + EnvProviderState, + IExecuteData, + INodeExecutionData, + IPinData, + IRunData, + IRunExecutionData, + ITaskDataConnections, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowParameters, +} from 'n8n-workflow'; + +import type { DataRequestResponse, PartialAdditionalData, TaskData } from './task-manager'; +import type { N8nMessage } from '../runner-types'; + +/** + * Builds the response to a data request coming from a Task Runner. Tries to minimize + * the amount of data that is sent to the runner by only providing what is requested. + */ +export class DataRequestResponseBuilder { + private requestedNodeNames = new Set(); + + constructor( + private readonly taskData: TaskData, + private readonly requestParams: N8nMessage.ToRequester.TaskDataRequest['requestParams'], + ) { + this.requestedNodeNames = new Set(requestParams.dataOfNodes); + + if (this.requestParams.prevNode && this.requestParams.dataOfNodes !== 'all') { + this.requestedNodeNames.add(this.determinePrevNodeName()); + } + } + + /** + * Builds a response to the data request + */ + build(): DataRequestResponse { + const { taskData: td } = this; + + return { + workflow: this.buildWorkflow(td.workflow), + connectionInputData: this.buildConnectionInputData(td.connectionInputData), + inputData: this.buildInputData(td.inputData), + itemIndex: td.itemIndex, + activeNodeName: td.activeNodeName, + contextNodeName: td.contextNodeName, + defaultReturnRunIndex: td.defaultReturnRunIndex, + mode: td.mode, + envProviderState: this.buildEnvProviderState(td.envProviderState), + node: td.node, // The current node being executed + runExecutionData: this.buildRunExecutionData(td.runExecutionData), + runIndex: td.runIndex, + selfData: td.selfData, + siblingParameters: td.siblingParameters, + executeData: this.buildExecuteData(td.executeData), + additionalData: this.buildAdditionalData(td.additionalData), + }; + } + + private buildAdditionalData( + additionalData: IWorkflowExecuteAdditionalData, + ): PartialAdditionalData { + return { + formWaitingBaseUrl: additionalData.formWaitingBaseUrl, + instanceBaseUrl: additionalData.instanceBaseUrl, + restApiUrl: additionalData.restApiUrl, + variables: additionalData.variables, + webhookBaseUrl: additionalData.webhookBaseUrl, + webhookTestBaseUrl: additionalData.webhookTestBaseUrl, + webhookWaitingBaseUrl: additionalData.webhookWaitingBaseUrl, + currentNodeParameters: additionalData.currentNodeParameters, + executionId: additionalData.executionId, + executionTimeoutTimestamp: additionalData.executionTimeoutTimestamp, + restartExecutionId: additionalData.restartExecutionId, + userId: additionalData.userId, + }; + } + + private buildExecuteData(executeData: IExecuteData | undefined): IExecuteData | undefined { + if (executeData === undefined) { + return undefined; + } + + return { + node: executeData.node, // The current node being executed + data: this.requestParams.input ? executeData.data : {}, + source: executeData.source, + }; + } + + private buildRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData { + if (this.requestParams.dataOfNodes === 'all') { + return runExecutionData; + } + + return { + startData: runExecutionData.startData, + resultData: { + error: runExecutionData.resultData.error, + lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted, + metadata: runExecutionData.resultData.metadata, + runData: this.buildRunData(runExecutionData.resultData.runData), + pinData: this.buildPinData(runExecutionData.resultData.pinData), + }, + executionData: runExecutionData.executionData + ? { + // TODO: Figure out what these two are and can they be filtered + contextData: runExecutionData.executionData?.contextData, + nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack, + + metadata: runExecutionData.executionData.metadata, + waitingExecution: runExecutionData.executionData.waitingExecution, + waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource, + } + : undefined, + }; + } + + private buildRunData(runData: IRunData): IRunData { + return this.filterObjectByNodeNames(runData); + } + + private buildPinData(pinData: IPinData | undefined): IPinData | undefined { + return pinData ? this.filterObjectByNodeNames(pinData) : undefined; + } + + private buildEnvProviderState(envProviderState: EnvProviderState): EnvProviderState { + if (this.requestParams.env) { + // In case `isEnvAccessBlocked` = true, the provider state has already sanitized + // the environment variables and we can return it as is. + return envProviderState; + } + + return { + env: {}, + isEnvAccessBlocked: envProviderState.isEnvAccessBlocked, + isProcessAvailable: envProviderState.isProcessAvailable, + }; + } + + private buildInputData(inputData: ITaskDataConnections): ITaskDataConnections { + if (this.requestParams.input) { + return inputData; + } + + return {}; + } + + private buildConnectionInputData( + connectionInputData: INodeExecutionData[], + ): INodeExecutionData[] { + if (this.requestParams.input) { + return connectionInputData; + } + + return []; + } + + private buildWorkflow(workflow: Workflow): Omit { + return { + id: workflow.id, + name: workflow.name, + active: workflow.active, + connections: workflow.connectionsBySourceNode, + nodes: Object.values(workflow.nodes), + pinData: workflow.pinData, + settings: workflow.settings, + staticData: workflow.staticData, + }; + } + + /** + * Assuming the given `obj` is an object where the keys are node names, + * filters the object to only include the node names that are requested. + */ + private filterObjectByNodeNames>(obj: T): T { + if (this.requestParams.dataOfNodes === 'all') { + return obj; + } + + const filteredObj: T = {} as T; + + for (const nodeName in obj) { + if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) { + continue; + } + + if (this.requestedNodeNames.has(nodeName)) { + filteredObj[nodeName] = obj[nodeName]; + } + } + + return filteredObj; + } + + private determinePrevNodeName(): string { + const sourceData = this.taskData.executeData?.source?.main?.[0]; + if (!sourceData) { + return ''; + } + + return sourceData.previousNode; + } +} diff --git a/packages/cli/src/runners/task-managers/task-manager.ts b/packages/cli/src/runners/task-managers/task-manager.ts index 58d8ade906..d1ef30665a 100644 --- a/packages/cli/src/runners/task-managers/task-manager.ts +++ b/packages/cli/src/runners/task-managers/task-manager.ts @@ -18,6 +18,7 @@ import { } from 'n8n-workflow'; import { nanoid } from 'nanoid'; +import { DataRequestResponseBuilder } from './data-request-response-builder'; import { RPC_ALLOW_LIST, type TaskResultData, @@ -67,7 +68,7 @@ export interface PartialAdditionalData { variables: IDataObject; } -export interface AllCodeTaskData { +export interface DataRequestResponse { workflow: Omit; inputData: ITaskDataConnections; node: INode; @@ -104,19 +105,6 @@ interface ExecuteFunctionObject { [name: string]: ((...args: unknown[]) => unknown) | ExecuteFunctionObject; } -const workflowToParameters = (workflow: Workflow): Omit => { - return { - id: workflow.id, - name: workflow.name, - active: workflow.active, - connections: workflow.connectionsBySourceNode, - nodes: Object.values(workflow.nodes), - pinData: workflow.pinData, - settings: workflow.settings, - staticData: workflow.staticData, - }; -}; - export class TaskManager { requestAcceptRejects: Map = new Map(); @@ -245,7 +233,7 @@ export class TaskManager { this.taskError(message.taskId, message.error); break; case 'broker:taskdatarequest': - this.sendTaskData(message.taskId, message.requestId, message.requestType); + this.sendTaskData(message.taskId, message.requestId, message.requestParams); break; case 'broker:rpc': void this.handleRpc(message.taskId, message.callId, message.name, message.params); @@ -294,54 +282,23 @@ export class TaskManager { sendTaskData( taskId: string, requestId: string, - requestType: N8nMessage.ToRequester.TaskDataRequest['requestType'], + requestParams: N8nMessage.ToRequester.TaskDataRequest['requestParams'], ) { const job = this.tasks.get(taskId); if (!job) { // TODO: logging return; } - if (requestType === 'all') { - const jd = job.data; - const ad = jd.additionalData; - const data: AllCodeTaskData = { - workflow: workflowToParameters(jd.workflow), - connectionInputData: jd.connectionInputData, - inputData: jd.inputData, - itemIndex: jd.itemIndex, - activeNodeName: jd.activeNodeName, - contextNodeName: jd.contextNodeName, - defaultReturnRunIndex: jd.defaultReturnRunIndex, - mode: jd.mode, - envProviderState: jd.envProviderState, - node: jd.node, - runExecutionData: jd.runExecutionData, - runIndex: jd.runIndex, - selfData: jd.selfData, - siblingParameters: jd.siblingParameters, - executeData: jd.executeData, - additionalData: { - formWaitingBaseUrl: ad.formWaitingBaseUrl, - instanceBaseUrl: ad.instanceBaseUrl, - restApiUrl: ad.restApiUrl, - variables: ad.variables, - webhookBaseUrl: ad.webhookBaseUrl, - webhookTestBaseUrl: ad.webhookTestBaseUrl, - webhookWaitingBaseUrl: ad.webhookWaitingBaseUrl, - currentNodeParameters: ad.currentNodeParameters, - executionId: ad.executionId, - executionTimeoutTimestamp: ad.executionTimeoutTimestamp, - restartExecutionId: ad.restartExecutionId, - userId: ad.userId, - }, - }; - this.sendMessage({ - type: 'requester:taskdataresponse', - taskId, - requestId, - data, - }); - } + + const dataRequestResponseBuilder = new DataRequestResponseBuilder(job.data, requestParams); + const requestedData = dataRequestResponseBuilder.build(); + + this.sendMessage({ + type: 'requester:taskdataresponse', + taskId, + requestId, + data: requestedData, + }); } async handleRpc( diff --git a/packages/cli/src/runners/task-runner-module.ts b/packages/cli/src/runners/task-runner-module.ts new file mode 100644 index 0000000000..13521c599b --- /dev/null +++ b/packages/cli/src/runners/task-runner-module.ts @@ -0,0 +1,85 @@ +import { TaskRunnersConfig } from '@n8n/config'; +import * as a from 'node:assert/strict'; +import Container, { Service } from 'typedi'; + +import type { TaskRunnerProcess } from '@/runners/task-runner-process'; + +import { TaskRunnerWsServer } from './runner-ws-server'; +import type { LocalTaskManager } from './task-managers/local-task-manager'; +import type { TaskRunnerServer } from './task-runner-server'; + +/** + * Module responsible for loading and starting task runner. Task runner can be + * run either internally (=launched by n8n as a child process) or externally + * (=launched by some other orchestrator) + */ +@Service() +export class TaskRunnerModule { + private taskRunnerHttpServer: TaskRunnerServer | undefined; + + private taskRunnerWsServer: TaskRunnerWsServer | undefined; + + private taskManager: LocalTaskManager | undefined; + + private taskRunnerProcess: TaskRunnerProcess | undefined; + + constructor(private readonly runnerConfig: TaskRunnersConfig) {} + + async start() { + a.ok(!this.runnerConfig.disabled, 'Task runner is disabled'); + + await this.loadTaskManager(); + await this.loadTaskRunnerServer(); + + if ( + this.runnerConfig.mode === 'internal_childprocess' || + this.runnerConfig.mode === 'internal_launcher' + ) { + await this.startInternalTaskRunner(); + } + } + + async stop() { + if (this.taskRunnerProcess) { + await this.taskRunnerProcess.stop(); + this.taskRunnerProcess = undefined; + } + + if (this.taskRunnerHttpServer) { + await this.taskRunnerHttpServer.stop(); + this.taskRunnerHttpServer = undefined; + } + } + + private async loadTaskManager() { + const { TaskManager } = await import('@/runners/task-managers/task-manager'); + const { LocalTaskManager } = await import('@/runners/task-managers/local-task-manager'); + this.taskManager = new LocalTaskManager(); + Container.set(TaskManager, this.taskManager); + } + + private async loadTaskRunnerServer() { + // These are imported dynamically because we need to set the task manager + // instance before importing them + const { TaskRunnerServer } = await import('@/runners/task-runner-server'); + this.taskRunnerHttpServer = Container.get(TaskRunnerServer); + this.taskRunnerWsServer = Container.get(TaskRunnerWsServer); + + await this.taskRunnerHttpServer.start(); + } + + private async startInternalTaskRunner() { + a.ok(this.taskRunnerWsServer, 'Task Runner WS Server not loaded'); + + const { TaskRunnerProcess } = await import('@/runners/task-runner-process'); + this.taskRunnerProcess = Container.get(TaskRunnerProcess); + await this.taskRunnerProcess.start(); + + const { InternalTaskRunnerDisconnectAnalyzer } = await import( + '@/runners/internal-task-runner-disconnect-analyzer' + ); + this.taskRunnerWsServer.setDisconnectAnalyzer( + Container.get(InternalTaskRunnerDisconnectAnalyzer), + ); + } +} diff --git a/packages/cli/src/runners/task-runner-process.ts b/packages/cli/src/runners/task-runner-process.ts index 5b31a96ba3..2eaa62621c 100644 --- a/packages/cli/src/runners/task-runner-process.ts +++ b/packages/cli/src/runners/task-runner-process.ts @@ -69,8 +69,8 @@ export class TaskRunnerProcess extends TypedEmitter { super(); a.ok( - this.runnerConfig.mode === 'internal_childprocess' || - this.runnerConfig.mode === 'internal_launcher', + this.runnerConfig.mode !== 'external', + 'Task Runner Process cannot be used in external mode', ); this.logger = logger.scoped('task-runner'); diff --git a/packages/cli/src/runners/task-runner-server.ts b/packages/cli/src/runners/task-runner-server.ts index 2199e70b38..6dd0fd5919 100644 --- a/packages/cli/src/runners/task-runner-server.ts +++ b/packages/cli/src/runners/task-runner-server.ts @@ -19,7 +19,7 @@ import type { TaskRunnerServerInitRequest, TaskRunnerServerInitResponse, } from '@/runners/runner-types'; -import { TaskRunnerService } from '@/runners/runner-ws-server'; +import { TaskRunnerWsServer } from '@/runners/runner-ws-server'; /** * Task Runner HTTP & WS server @@ -44,7 +44,7 @@ export class TaskRunnerServer { private readonly logger: Logger, private readonly globalConfig: GlobalConfig, private readonly taskRunnerAuthController: TaskRunnerAuthController, - private readonly taskRunnerService: TaskRunnerService, + private readonly taskRunnerService: TaskRunnerWsServer, ) { this.app = express(); this.app.disable('x-powered-by'); diff --git a/packages/cli/src/services/dynamic-node-parameters.service.ts b/packages/cli/src/services/dynamic-node-parameters.service.ts index 7a641e84a6..dd60c85444 100644 --- a/packages/cli/src/services/dynamic-node-parameters.service.ts +++ b/packages/cli/src/services/dynamic-node-parameters.service.ts @@ -1,4 +1,4 @@ -import { NodeExecuteFunctions } from 'n8n-core'; +import { LoadOptionsContext, NodeExecuteFunctions } from 'n8n-core'; import type { ILoadOptions, ILoadOptionsFunctions, @@ -253,6 +253,6 @@ export class DynamicNodeParametersService { workflow: Workflow, ) { const node = workflow.nodes['Temp-Node']; - return NodeExecuteFunctions.getLoadOptionsFunctions(workflow, node, path, additionalData); + return new LoadOptionsContext(workflow, node, additionalData, path); } } diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index 6cad4a4f24..7d9fe7d2b4 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -222,9 +222,9 @@ export class FrontendService { licensePruneTime: -1, }, pruning: { - isEnabled: config.getEnv('executions.pruneData'), - maxAge: config.getEnv('executions.pruneDataMaxAge'), - maxCount: config.getEnv('executions.pruneDataMaxCount'), + isEnabled: this.globalConfig.pruning.isEnabled, + maxAge: this.globalConfig.pruning.maxAge, + maxCount: this.globalConfig.pruning.maxCount, }, security: { blockFileAccessToN8nFiles: this.securityConfig.blockFileAccessToN8nFiles, diff --git a/packages/cli/src/services/pruning.service.ts b/packages/cli/src/services/pruning.service.ts index 0859dddd39..943f8d30ca 100644 --- a/packages/cli/src/services/pruning.service.ts +++ b/packages/cli/src/services/pruning.service.ts @@ -3,7 +3,6 @@ import { BinaryDataService, InstanceSettings } from 'n8n-core'; import { jsonStringify } from 'n8n-workflow'; import { Service } from 'typedi'; -import config from '@/config'; import { inTest, TIME } from '@/constants'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { OnShutdown } from '@/decorators/on-shutdown'; @@ -16,8 +15,8 @@ export class PruningService { private hardDeletionBatchSize = 100; private rates: Record = { - softDeletion: config.getEnv('executions.pruneDataIntervals.softDelete') * TIME.MINUTE, - hardDeletion: config.getEnv('executions.pruneDataIntervals.hardDelete') * TIME.MINUTE, + softDeletion: this.globalConfig.pruning.softDeleteInterval * TIME.MINUTE, + hardDeletion: this.globalConfig.pruning.hardDeleteInterval * TIME.MINUTE, }; public softDeletionInterval: NodeJS.Timer | undefined; @@ -52,7 +51,7 @@ export class PruningService { private isPruningEnabled() { const { instanceType, isFollower } = this.instanceSettings; - if (!config.getEnv('executions.pruneData') || inTest || instanceType !== 'main') { + if (!this.globalConfig.pruning.isEnabled || inTest || instanceType !== 'main') { return false; } diff --git a/packages/cli/src/workflow-execute-additional-data.ts b/packages/cli/src/workflow-execute-additional-data.ts index 2deae842fc..351cd6f059 100644 --- a/packages/cli/src/workflow-execute-additional-data.ts +++ b/packages/cli/src/workflow-execute-additional-data.ts @@ -468,7 +468,7 @@ function hookFunctionsSave(): IWorkflowExecuteHooks { (executionStatus === 'success' && !saveSettings.success) || (executionStatus !== 'success' && !saveSettings.error); - if (shouldNotSave) { + if (shouldNotSave && !fullRunData.waitTill) { if (!fullRunData.waitTill && !isManualMode) { executeErrorWorkflow( this.workflowData, diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index 02e0b94afd..38ead8896e 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -35,6 +35,7 @@ import * as WorkflowHelpers from '@/workflow-helpers'; import { generateFailedExecutionFromError } from '@/workflow-helpers'; import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service'; +import { ExecutionNotFoundError } from './errors/execution-not-found-error'; import { EventService } from './events/event.service'; @Service() @@ -57,12 +58,21 @@ export class WorkflowRunner { /** The process did error */ async processError( - error: ExecutionError, + error: ExecutionError | ExecutionNotFoundError, startedAt: Date, executionMode: WorkflowExecuteMode, executionId: string, hooks?: WorkflowHooks, ) { + // This means the execution was probably cancelled and has already + // been cleaned up. + // + // FIXME: This is a quick fix. The proper fix would be to not remove + // the execution from the active executions while it's still running. + if (error instanceof ExecutionNotFoundError) { + return; + } + ErrorReporter.error(error, { executionId }); const isQueueMode = config.getEnv('executions.mode') === 'queue'; diff --git a/packages/cli/test/integration/pruning.service.test.ts b/packages/cli/test/integration/pruning.service.test.ts index 5a53d70315..7ade1d3fe5 100644 --- a/packages/cli/test/integration/pruning.service.test.ts +++ b/packages/cli/test/integration/pruning.service.test.ts @@ -1,9 +1,9 @@ +import { GlobalConfig } from '@n8n/config'; import { mock } from 'jest-mock-extended'; import { BinaryDataService, InstanceSettings } from 'n8n-core'; import type { ExecutionStatus } from 'n8n-workflow'; import Container from 'typedi'; -import config from '@/config'; import { TIME } from '@/constants'; import type { ExecutionEntity } from '@/databases/entities/execution-entity'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; @@ -28,17 +28,19 @@ describe('softDeleteOnPruningCycle()', () => { const now = new Date(); const yesterday = new Date(Date.now() - TIME.DAY); let workflow: WorkflowEntity; + let globalConfig: GlobalConfig; beforeAll(async () => { await testDb.init(); + globalConfig = Container.get(GlobalConfig); pruningService = new PruningService( mockInstance(Logger), instanceSettings, Container.get(ExecutionRepository), mockInstance(BinaryDataService), mock(), - mock(), + globalConfig, ); workflow = await createWorkflow(); @@ -52,10 +54,6 @@ describe('softDeleteOnPruningCycle()', () => { await testDb.terminate(); }); - afterEach(() => { - config.load(config.default); - }); - async function findAllExecutions() { return await Container.get(ExecutionRepository).find({ order: { id: 'asc' }, @@ -64,9 +62,9 @@ describe('softDeleteOnPruningCycle()', () => { } describe('when EXECUTIONS_DATA_PRUNE_MAX_COUNT is set', () => { - beforeEach(() => { - config.set('executions.pruneDataMaxCount', 1); - config.set('executions.pruneDataMaxAge', 336); + beforeAll(() => { + globalConfig.pruning.maxAge = 336; + globalConfig.pruning.maxCount = 1; }); test('should mark as deleted based on EXECUTIONS_DATA_PRUNE_MAX_COUNT', async () => { @@ -165,9 +163,9 @@ describe('softDeleteOnPruningCycle()', () => { }); describe('when EXECUTIONS_DATA_MAX_AGE is set', () => { - beforeEach(() => { - config.set('executions.pruneDataMaxAge', 1); // 1h - config.set('executions.pruneDataMaxCount', 0); + beforeAll(() => { + globalConfig.pruning.maxAge = 1; + globalConfig.pruning.maxCount = 0; }); test('should mark as deleted based on EXECUTIONS_DATA_MAX_AGE', async () => { diff --git a/packages/cli/test/integration/runners/task-runner-module.external.test.ts b/packages/cli/test/integration/runners/task-runner-module.external.test.ts new file mode 100644 index 0000000000..e8a7e54f1a --- /dev/null +++ b/packages/cli/test/integration/runners/task-runner-module.external.test.ts @@ -0,0 +1,40 @@ +import { TaskRunnersConfig } from '@n8n/config'; +import Container from 'typedi'; + +import { TaskRunnerModule } from '@/runners/task-runner-module'; + +import { DefaultTaskRunnerDisconnectAnalyzer } from '../../../src/runners/default-task-runner-disconnect-analyzer'; +import { TaskRunnerWsServer } from '../../../src/runners/runner-ws-server'; + +describe('TaskRunnerModule in external mode', () => { + const runnerConfig = Container.get(TaskRunnersConfig); + runnerConfig.mode = 'external'; + runnerConfig.port = 0; + const module = Container.get(TaskRunnerModule); + + afterEach(async () => { + await module.stop(); + }); + + describe('start', () => { + it('should throw if the task runner is disabled', async () => { + runnerConfig.disabled = true; + + // Act + await expect(module.start()).rejects.toThrow('Task runner is disabled'); + }); + + it('should start the task runner', async () => { + runnerConfig.disabled = false; + + // Act + await module.start(); + }); + + it('should use DefaultTaskRunnerDisconnectAnalyzer', () => { + const wsServer = Container.get(TaskRunnerWsServer); + + expect(wsServer.getDisconnectAnalyzer()).toBeInstanceOf(DefaultTaskRunnerDisconnectAnalyzer); + }); + }); +}); diff --git a/packages/cli/test/integration/runners/task-runner-module.internal.test.ts b/packages/cli/test/integration/runners/task-runner-module.internal.test.ts new file mode 100644 index 0000000000..f53adde5e7 --- /dev/null +++ b/packages/cli/test/integration/runners/task-runner-module.internal.test.ts @@ -0,0 +1,39 @@ +import { TaskRunnersConfig } from '@n8n/config'; +import Container from 'typedi'; + +import { TaskRunnerModule } from '@/runners/task-runner-module'; + +import { InternalTaskRunnerDisconnectAnalyzer } from '../../../src/runners/internal-task-runner-disconnect-analyzer'; +import { TaskRunnerWsServer } from '../../../src/runners/runner-ws-server'; + +describe('TaskRunnerModule in internal_childprocess mode', () => { + const runnerConfig = Container.get(TaskRunnersConfig); + runnerConfig.mode = 'internal_childprocess'; + const module = Container.get(TaskRunnerModule); + + afterEach(async () => { + await module.stop(); + }); + + describe('start', () => { + it('should throw if the task runner is disabled', async () => { + runnerConfig.disabled = true; + + // Act + await expect(module.start()).rejects.toThrow('Task runner is disabled'); + }); + + it('should start the task runner', async () => { + runnerConfig.disabled = false; + + // Act + await module.start(); + }); + + it('should use InternalTaskRunnerDisconnectAnalyzer', () => { + const wsServer = Container.get(TaskRunnerWsServer); + + expect(wsServer.getDisconnectAnalyzer()).toBeInstanceOf(InternalTaskRunnerDisconnectAnalyzer); + }); + }); +}); diff --git a/packages/cli/test/integration/runners/task-runner-process.test.ts b/packages/cli/test/integration/runners/task-runner-process.test.ts index c893add440..219fbc8813 100644 --- a/packages/cli/test/integration/runners/task-runner-process.test.ts +++ b/packages/cli/test/integration/runners/task-runner-process.test.ts @@ -1,7 +1,7 @@ import { TaskRunnersConfig } from '@n8n/config'; import Container from 'typedi'; -import { TaskRunnerService } from '@/runners/runner-ws-server'; +import { TaskRunnerWsServer } from '@/runners/runner-ws-server'; import { TaskBroker } from '@/runners/task-broker.service'; import { TaskRunnerProcess } from '@/runners/task-runner-process'; import { TaskRunnerServer } from '@/runners/task-runner-server'; @@ -18,7 +18,7 @@ describe('TaskRunnerProcess', () => { const runnerProcess = Container.get(TaskRunnerProcess); const taskBroker = Container.get(TaskBroker); - const taskRunnerService = Container.get(TaskRunnerService); + const taskRunnerService = Container.get(TaskRunnerWsServer); const startLauncherSpy = jest.spyOn(runnerProcess, 'startLauncher'); const startNodeSpy = jest.spyOn(runnerProcess, 'startNode'); diff --git a/packages/core/package.json b/packages/core/package.json index f9a9d90856..32fa66a297 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -13,13 +13,13 @@ "scripts": { "clean": "rimraf dist .turbo", "typecheck": "tsc --noEmit", - "build": "tsc -p tsconfig.build.json", + "build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json", "dev": "pnpm watch", "format": "biome format --write .", "format:check": "biome ci .", "lint": "eslint . --quiet", "lintfix": "eslint . --fix", - "watch": "tsc -p tsconfig.build.json --watch", + "watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\"", "test": "jest" }, "files": [ diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 10c44efced..e50d049f59 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -23,12 +23,11 @@ import type { } from 'axios'; import axios from 'axios'; import crypto, { createHmac } from 'crypto'; -import type { Request, Response } from 'express'; import FileType from 'file-type'; import FormData from 'form-data'; import { createReadStream } from 'fs'; import { access as fsAccess, writeFile as fsWriteFile } from 'fs/promises'; -import { IncomingMessage, type IncomingHttpHeaders } from 'http'; +import { IncomingMessage } from 'http'; import { Agent, type AgentOptions } from 'https'; import get from 'lodash/get'; import isEmpty from 'lodash/isEmpty'; @@ -60,7 +59,6 @@ import type { IGetNodeParameterOptions, IHookFunctions, IHttpRequestOptions, - ILoadOptionsFunctions, IN8nHttpFullResponse, IN8nHttpResponse, INode, @@ -101,7 +99,6 @@ import type { INodeParameters, EnsureTypeOptions, SSHTunnelFunctions, - SchedulingFunctions, DeduplicationHelperFunctions, IDeduplicationOutput, IDeduplicationOutputItems, @@ -111,6 +108,7 @@ import type { ICheckProcessedContextData, AiEvent, ISupplyDataFunctions, + WebhookType, } from 'n8n-workflow'; import { NodeConnectionType, @@ -167,7 +165,8 @@ import { import { extractValue } from './ExtractValue'; import { InstanceSettings } from './InstanceSettings'; import type { ExtendedValidationResult, IResponseError } from './Interfaces'; -import { ScheduledTaskManager } from './ScheduledTaskManager'; +// eslint-disable-next-line import/no-cycle +import { HookContext, PollContext, TriggerContext, WebhookContext } from './node-execution-context'; import { getSecretsProxy } from './Secrets'; import { SSHClientsManager } from './SSHClientsManager'; @@ -215,7 +214,7 @@ const createFormDataObject = (data: Record) => { return formData; }; -const validateUrl = (url?: string): boolean => { +export const validateUrl = (url?: string): boolean => { if (!url) return false; try { @@ -776,7 +775,7 @@ export function parseIncomingMessage(message: IncomingMessage) { } } -async function binaryToString(body: Buffer | Readable, encoding?: BufferEncoding) { +export async function binaryToString(body: Buffer | Readable, encoding?: BufferEncoding) { const buffer = await binaryToBuffer(body); if (!encoding && body instanceof IncomingMessage) { parseIncomingMessage(body); @@ -1010,7 +1009,7 @@ export const removeEmptyBody = (requestOptions: IHttpRequestOptions | IRequestOp } }; -async function httpRequest( +export async function httpRequest( requestOptions: IHttpRequestOptions, ): Promise { removeEmptyBody(requestOptions); @@ -1205,7 +1204,7 @@ export async function copyBinaryFile( * base64 and adds metadata. */ // eslint-disable-next-line complexity -async function prepareBinaryData( +export async function prepareBinaryData( binaryData: Buffer | Readable, executionId: string, workflowId: string, @@ -1348,6 +1347,7 @@ export async function clearAllProcessedItems( options, ); } + export async function getProcessedDataCount( scope: DeduplicationScope, contextData: ICheckProcessedContextData, @@ -1359,7 +1359,8 @@ export async function getProcessedDataCount( options, ); } -function applyPaginationRequestData( + +export function applyPaginationRequestData( requestData: IRequestOptions, paginationRequestData: PaginationOptions['request'], ): IRequestOptions { @@ -2628,7 +2629,7 @@ export function continueOnFail(node: INode): boolean { * */ export function getNodeWebhookUrl( - name: string, + name: WebhookType, workflow: Workflow, node: INode, additionalData: IWorkflowExecuteAdditionalData, @@ -2673,7 +2674,7 @@ export function getNodeWebhookUrl( * */ export function getWebhookDescription( - name: string, + name: WebhookType, workflow: Workflow, node: INode, ): IWebhookDescription | undefined { @@ -2798,7 +2799,7 @@ const addExecutionDataFunctions = async ( } }; -async function getInputConnectionData( +export async function getInputConnectionData( this: IAllExecuteFunctions, workflow: Workflow, runExecutionData: IRunExecutionData, @@ -3342,14 +3343,6 @@ const getSSHTunnelFunctions = (): SSHTunnelFunctions => ({ await Container.get(SSHClientsManager).getClient(credentials), }); -const getSchedulingFunctions = (workflow: Workflow): SchedulingFunctions => { - const scheduledTaskManager = Container.get(ScheduledTaskManager); - return { - registerCron: (cronExpression, onTick) => - scheduledTaskManager.registerCron(workflow, cronExpression, onTick), - }; -}; - const getAllowedPaths = () => { const restrictFileAccessTo = process.env[RESTRICT_FILE_ACCESS_TO]; if (!restrictFileAccessTo) { @@ -3553,57 +3546,7 @@ export function getExecutePollFunctions( mode: WorkflowExecuteMode, activation: WorkflowActivateMode, ): IPollFunctions { - return ((workflow: Workflow, node: INode) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - __emit: (): void => { - throw new ApplicationError( - 'Overwrite NodeExecuteFunctions.getExecutePollFunctions.__emit function', - ); - }, - __emitError() { - throw new ApplicationError( - 'Overwrite NodeExecuteFunctions.getExecutePollFunctions.__emitError function', - ); - }, - getMode: () => mode, - getActivationMode: () => activation, - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, mode), - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const runExecutionData: IRunExecutionData | null = null; - const itemIndex = 0; - const runIndex = 0; - const connectionInputData: INodeExecutionData[] = []; - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - undefined, - fallbackValue, - options, - ); - }, - helpers: { - createDeferredPromise, - ...getRequestHelperFunctions(workflow, node, additionalData), - ...getBinaryHelperFunctions(additionalData, workflow.id), - ...getSchedulingFunctions(workflow), - returnJsonArray, - }, - }; - })(workflow, node); + return new PollContext(workflow, node, additionalData, mode, activation); } /** @@ -3617,58 +3560,7 @@ export function getExecuteTriggerFunctions( mode: WorkflowExecuteMode, activation: WorkflowActivateMode, ): ITriggerFunctions { - return ((workflow: Workflow, node: INode) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - emit: (): void => { - throw new ApplicationError( - 'Overwrite NodeExecuteFunctions.getExecuteTriggerFunctions.emit function', - ); - }, - emitError: (): void => { - throw new ApplicationError( - 'Overwrite NodeExecuteFunctions.getExecuteTriggerFunctions.emit function', - ); - }, - getMode: () => mode, - getActivationMode: () => activation, - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, mode), - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const runExecutionData: IRunExecutionData | null = null; - const itemIndex = 0; - const runIndex = 0; - const connectionInputData: INodeExecutionData[] = []; - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - undefined, - fallbackValue, - options, - ); - }, - helpers: { - createDeferredPromise, - ...getSSHTunnelFunctions(), - ...getRequestHelperFunctions(workflow, node, additionalData), - ...getBinaryHelperFunctions(additionalData, workflow.id), - ...getSchedulingFunctions(workflow), - returnJsonArray, - }, - }; - })(workflow, node); + return new TriggerContext(workflow, node, additionalData, mode, activation); } /** @@ -4400,6 +4292,7 @@ export function getExecuteSingleFunctions( }, helpers: { createDeferredPromise, + returnJsonArray, ...getRequestHelperFunctions( workflow, node, @@ -4439,85 +4332,6 @@ export function getCredentialTestFunctions(): ICredentialTestFunctions { }; } -/** - * Returns the execute functions regular nodes have access to in load-options-function. - */ -export function getLoadOptionsFunctions( - workflow: Workflow, - node: INode, - path: string, - additionalData: IWorkflowExecuteAdditionalData, -): ILoadOptionsFunctions { - return ((workflow: Workflow, node: INode, path: string) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, 'internal'), - getCurrentNodeParameter: ( - parameterPath: string, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object | undefined => { - const nodeParameters = additionalData.currentNodeParameters; - - if (parameterPath.charAt(0) === '&') { - parameterPath = `${path.split('.').slice(1, -1).join('.')}.${parameterPath.slice(1)}`; - } - - let returnData = get(nodeParameters, parameterPath); - - // This is outside the try/catch because it throws errors with proper messages - if (options?.extractValue) { - const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); - if (nodeType === undefined) { - throw new ApplicationError('Node type is not known so cannot return parameter value', { - tags: { nodeType: node.type }, - }); - } - returnData = extractValue( - returnData, - parameterPath, - node, - nodeType, - ) as NodeParameterValueType; - } - - return returnData; - }, - getCurrentNodeParameters: () => additionalData.currentNodeParameters, - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const runExecutionData: IRunExecutionData | null = null; - const itemIndex = 0; - const runIndex = 0; - const mode = 'internal' as WorkflowExecuteMode; - const connectionInputData: INodeExecutionData[] = []; - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - undefined, - fallbackValue, - options, - ); - }, - helpers: { - ...getSSHTunnelFunctions(), - ...getRequestHelperFunctions(workflow, node, additionalData), - }, - }; - })(workflow, node, path); -} - /** * Returns the execute functions regular nodes have access to in hook-function. */ @@ -4529,59 +4343,7 @@ export function getExecuteHookFunctions( activation: WorkflowActivateMode, webhookData?: IWebhookData, ): IHookFunctions { - return ((workflow: Workflow, node: INode) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, mode), - getMode: () => mode, - getActivationMode: () => activation, - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const runExecutionData: IRunExecutionData | null = null; - const itemIndex = 0; - const runIndex = 0; - const connectionInputData: INodeExecutionData[] = []; - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - undefined, - fallbackValue, - options, - ); - }, - getNodeWebhookUrl: (name: string): string | undefined => { - return getNodeWebhookUrl( - name, - workflow, - node, - additionalData, - mode, - getAdditionalKeys(additionalData, mode, null), - webhookData?.isTest, - ); - }, - getWebhookName(): string { - if (webhookData === undefined) { - throw new ApplicationError('Only supported in webhook functions'); - } - return webhookData.webhookDescription.name; - }, - getWebhookDescription: (name) => getWebhookDescription(name, workflow, node), - helpers: getRequestHelperFunctions(workflow, node, additionalData), - }; - })(workflow, node); + return new HookContext(workflow, node, additionalData, mode, activation, webhookData); } /** @@ -4597,170 +4359,13 @@ export function getExecuteWebhookFunctions( closeFunctions: CloseFunction[], runExecutionData: IRunExecutionData | null, ): IWebhookFunctions { - return ((workflow: Workflow, node: INode, runExecutionData: IRunExecutionData | null) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - getBodyData(): IDataObject { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest.body; - }, - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, mode), - getHeaderData(): IncomingHttpHeaders { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest.headers; - }, - async getInputConnectionData( - inputName: NodeConnectionType, - itemIndex: number, - ): Promise { - // To be able to use expressions like "$json.sessionId" set the - // body data the webhook received to what is normally used for - // incoming node data. - const connectionInputData: INodeExecutionData[] = [ - { json: additionalData.httpRequest?.body || {} }, - ]; - const runExecutionData: IRunExecutionData = { - resultData: { - runData: {}, - }, - }; - const executeData: IExecuteData = { - data: { - main: [connectionInputData], - }, - node, - source: null, - }; - const runIndex = 0; - - return await getInputConnectionData.call( - this, - workflow, - runExecutionData, - runIndex, - connectionInputData, - {} as ITaskDataConnections, - additionalData, - executeData, - mode, - closeFunctions, - inputName, - itemIndex, - ); - }, - getMode: () => mode, - evaluateExpression: (expression: string, evaluateItemIndex?: number) => { - const itemIndex = evaluateItemIndex === undefined ? 0 : evaluateItemIndex; - const runIndex = 0; - - let connectionInputData: INodeExecutionData[] = []; - let executionData: IExecuteData | undefined; - - if (runExecutionData?.executionData !== undefined) { - executionData = runExecutionData.executionData.nodeExecutionStack[0]; - - if (executionData !== undefined) { - connectionInputData = executionData.data.main[0]!; - } - } - - const additionalKeys = getAdditionalKeys(additionalData, mode, runExecutionData); - - return workflow.expression.resolveSimpleParameterValue( - `=${expression}`, - {}, - runExecutionData, - runIndex, - itemIndex, - node.name, - connectionInputData, - mode, - additionalKeys, - executionData, - ); - }, - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const itemIndex = 0; - const runIndex = 0; - - let connectionInputData: INodeExecutionData[] = []; - let executionData: IExecuteData | undefined; - - if (runExecutionData?.executionData !== undefined) { - executionData = runExecutionData.executionData.nodeExecutionStack[0]; - - if (executionData !== undefined) { - connectionInputData = executionData.data.main[0]!; - } - } - - const additionalKeys = getAdditionalKeys(additionalData, mode, runExecutionData); - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - additionalKeys, - executionData, - fallbackValue, - options, - ); - }, - getParamsData(): object { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest.params; - }, - getQueryData(): object { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest.query; - }, - getRequestObject(): Request { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest; - }, - getResponseObject(): Response { - if (additionalData.httpResponse === undefined) { - throw new ApplicationError('Response is missing'); - } - return additionalData.httpResponse; - }, - getNodeWebhookUrl: (name: string): string | undefined => - getNodeWebhookUrl( - name, - workflow, - node, - additionalData, - mode, - getAdditionalKeys(additionalData, mode, null), - ), - getWebhookName: () => webhookData.webhookDescription.name, - helpers: { - createDeferredPromise, - ...getRequestHelperFunctions(workflow, node, additionalData), - ...getBinaryHelperFunctions(additionalData, workflow.id), - returnJsonArray, - }, - nodeHelpers: getNodeHelperFunctions(additionalData, workflow.id), - }; - })(workflow, node, runExecutionData); + return new WebhookContext( + workflow, + node, + additionalData, + mode, + webhookData, + closeFunctions, + runExecutionData, + ); } diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index 23b88abd5b..f8c32351bb 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -45,6 +45,7 @@ import { NodeExecutionOutput, sleep, ErrorReporterProxy, + ExecutionCancelledError, } from 'n8n-workflow'; import PCancelable from 'p-cancelable'; @@ -154,10 +155,6 @@ export class WorkflowExecute { return this.processRunExecutionData(workflow); } - static isAbortError(e?: ExecutionBaseError) { - return e?.message === 'AbortError'; - } - forceInputNodeExecution(workflow: Workflow): boolean { return workflow.settings.executionOrder !== 'v1'; } @@ -1479,7 +1476,7 @@ export class WorkflowExecute { // Add the execution data again so that it can get restarted this.runExecutionData.executionData!.nodeExecutionStack.unshift(executionData); // Only execute the nodeExecuteAfter hook if the node did not get aborted - if (!WorkflowExecute.isAbortError(executionError)) { + if (!this.isCancelled) { await this.executeHook('nodeExecuteAfter', [ executionNode.name, taskData, @@ -1827,7 +1824,7 @@ export class WorkflowExecute { return await this.processSuccessExecution( startedAt, workflow, - new WorkflowOperationError('Workflow has been canceled or timed out'), + new ExecutionCancelledError(this.additionalData.executionId ?? 'unknown'), closeFunction, ); } @@ -1928,7 +1925,7 @@ export class WorkflowExecute { this.moveNodeMetadata(); // Prevent from running the hook if the error is an abort error as it was already handled - if (!WorkflowExecute.isAbortError(executionError)) { + if (!this.isCancelled) { await this.executeHook('workflowExecuteAfter', [fullRunData, newStaticData]); } @@ -1959,4 +1956,8 @@ export class WorkflowExecute { return fullRunData; } + + private get isCancelled() { + return this.abortController.signal.aborted; + } } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index ebe240b51e..cfdae07dc6 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -20,3 +20,4 @@ export { ObjectStoreService } from './ObjectStore/ObjectStore.service.ee'; export { BinaryData } from './BinaryData/types'; export { isStoredMode as isValidNonDefaultMode } from './BinaryData/utils'; export * from './ExecutionMetadata'; +export * from './node-execution-context'; diff --git a/packages/core/src/node-execution-context/__tests__/base-context.test.ts b/packages/core/src/node-execution-context/__tests__/base-context.test.ts new file mode 100644 index 0000000000..aadc630a38 --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/base-context.test.ts @@ -0,0 +1,168 @@ +import { mock } from 'jest-mock-extended'; +import type { + INode, + INodeExecutionData, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { InstanceSettings } from '@/InstanceSettings'; + +import { NodeExecutionContext } from '../node-execution-context'; + +class TestContext extends NodeExecutionContext {} + +describe('BaseContext', () => { + const instanceSettings = mock({ instanceId: 'abc123' }); + Container.set(InstanceSettings, instanceSettings); + + const workflow = mock({ + id: '123', + name: 'Test Workflow', + active: true, + nodeTypes: mock(), + timezone: 'UTC', + }); + const node = mock(); + let additionalData = mock({ + credentialsHelper: mock(), + }); + + const mode: WorkflowExecuteMode = 'manual'; + const testContext = new TestContext(workflow, node, additionalData, mode); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getNode', () => { + it('should return a deep copy of the node', () => { + const result = testContext.getNode(); + expect(result).not.toBe(node); + expect(JSON.stringify(result)).toEqual(JSON.stringify(node)); + }); + }); + + describe('getWorkflow', () => { + it('should return the id, name, and active properties of the workflow', () => { + const result = testContext.getWorkflow(); + + expect(result).toEqual({ id: '123', name: 'Test Workflow', active: true }); + }); + }); + + describe('getMode', () => { + it('should return the mode property', () => { + const result = testContext.getMode(); + expect(result).toBe(mode); + }); + }); + + describe('getWorkflowStaticData', () => { + it('should call getStaticData method of workflow', () => { + testContext.getWorkflowStaticData('testType'); + expect(workflow.getStaticData).toHaveBeenCalledWith('testType', node); + }); + }); + + describe('getChildNodes', () => { + it('should return an array of NodeTypeAndVersion objects for the child nodes of the given node', () => { + const childNode1 = mock({ name: 'Child Node 1', type: 'testType1', typeVersion: 1 }); + const childNode2 = mock({ name: 'Child Node 2', type: 'testType2', typeVersion: 2 }); + workflow.getChildNodes.mockReturnValue(['Child Node 1', 'Child Node 2']); + workflow.nodes = { + 'Child Node 1': childNode1, + 'Child Node 2': childNode2, + }; + + const result = testContext.getChildNodes('Test Node'); + + expect(result).toEqual([ + { name: 'Child Node 1', type: 'testType1', typeVersion: 1 }, + { name: 'Child Node 2', type: 'testType2', typeVersion: 2 }, + ]); + }); + }); + + describe('getParentNodes', () => { + it('should return an array of NodeTypeAndVersion objects for the parent nodes of the given node', () => { + const parentNode1 = mock({ name: 'Parent Node 1', type: 'testType1', typeVersion: 1 }); + const parentNode2 = mock({ name: 'Parent Node 2', type: 'testType2', typeVersion: 2 }); + workflow.getParentNodes.mockReturnValue(['Parent Node 1', 'Parent Node 2']); + workflow.nodes = { + 'Parent Node 1': parentNode1, + 'Parent Node 2': parentNode2, + }; + + const result = testContext.getParentNodes('Test Node'); + + expect(result).toEqual([ + { name: 'Parent Node 1', type: 'testType1', typeVersion: 1 }, + { name: 'Parent Node 2', type: 'testType2', typeVersion: 2 }, + ]); + }); + }); + + describe('getKnownNodeTypes', () => { + it('should call getKnownTypes method of workflow.nodeTypes', () => { + testContext.getKnownNodeTypes(); + expect(workflow.nodeTypes.getKnownTypes).toHaveBeenCalled(); + }); + }); + + describe('getRestApiUrl', () => { + it('should return the restApiUrl property of additionalData', () => { + additionalData.restApiUrl = 'https://example.com/api'; + + const result = testContext.getRestApiUrl(); + + expect(result).toBe('https://example.com/api'); + }); + }); + + describe('getInstanceBaseUrl', () => { + it('should return the instanceBaseUrl property of additionalData', () => { + additionalData.instanceBaseUrl = 'https://example.com'; + + const result = testContext.getInstanceBaseUrl(); + + expect(result).toBe('https://example.com'); + }); + }); + + describe('getInstanceId', () => { + it('should return the instanceId property of instanceSettings', () => { + const result = testContext.getInstanceId(); + + expect(result).toBe('abc123'); + }); + }); + + describe('getTimezone', () => { + it('should return the timezone property of workflow', () => { + const result = testContext.getTimezone(); + expect(result).toBe('UTC'); + }); + }); + + describe('getCredentialsProperties', () => { + it('should call getCredentialsProperties method of additionalData.credentialsHelper', () => { + testContext.getCredentialsProperties('testType'); + expect(additionalData.credentialsHelper.getCredentialsProperties).toHaveBeenCalledWith( + 'testType', + ); + }); + }); + + describe('prepareOutputData', () => { + it('should return the input array wrapped in another array', async () => { + const outputData = [mock(), mock()]; + + const result = await testContext.prepareOutputData(outputData); + + expect(result).toEqual([outputData]); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/__tests__/hook-context.test.ts b/packages/core/src/node-execution-context/__tests__/hook-context.test.ts new file mode 100644 index 0000000000..1092198a23 --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/hook-context.test.ts @@ -0,0 +1,147 @@ +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWebhookDescription, + IWebhookData, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { ApplicationError } from 'n8n-workflow'; + +import { HookContext } from '../hook-context'; + +describe('HookContext', () => { + const testCredentialType = 'testCredential'; + const webhookDescription: IWebhookDescription = { + name: 'default', + httpMethod: 'GET', + responseMode: 'onReceived', + path: 'testPath', + }; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + nodeType.description.webhooks = [webhookDescription]; + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ credentialsHelper }); + const mode: WorkflowExecuteMode = 'manual'; + const activation: WorkflowActivateMode = 'init'; + const webhookData = mock({ + webhookDescription: { + name: 'default', + isFullPath: true, + }, + }); + + const hookContext = new HookContext( + workflow, + node, + additionalData, + mode, + activation, + webhookData, + ); + + beforeEach(() => { + jest.clearAllMocks(); + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + expression.getSimpleParameterValue.mockImplementation((_, value) => value); + }); + + describe('getActivationMode', () => { + it('should return the activation property', () => { + const result = hookContext.getActivationMode(); + expect(result).toBe(activation); + }); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await hookContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getNodeParameter', () => { + it('should return parameter value when it exists', () => { + const parameter = hookContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + }); + + describe('getNodeWebhookUrl', () => { + it('should return node webhook url', () => { + const url = hookContext.getNodeWebhookUrl('default'); + + expect(url).toContain('testPath'); + }); + }); + + describe('getWebhookName', () => { + it('should return webhook name', () => { + const name = hookContext.getWebhookName(); + + expect(name).toBe('default'); + }); + + it('should throw an error if webhookData is undefined', () => { + const hookContextWithoutWebhookData = new HookContext( + workflow, + node, + additionalData, + mode, + activation, + ); + + expect(() => hookContextWithoutWebhookData.getWebhookName()).toThrow(ApplicationError); + }); + }); + + describe('getWebhookDescription', () => { + it('should return webhook description', () => { + const description = hookContext.getWebhookDescription('default'); + + expect(description).toEqual(webhookDescription); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/__tests__/load-options-context.test.ts b/packages/core/src/node-execution-context/__tests__/load-options-context.test.ts new file mode 100644 index 0000000000..5ab03e105d --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/load-options-context.test.ts @@ -0,0 +1,102 @@ +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWorkflowExecuteAdditionalData, + Workflow, +} from 'n8n-workflow'; + +import { LoadOptionsContext } from '../load-options-context'; + +describe('LoadOptionsContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ credentialsHelper }); + const path = 'testPath'; + + const loadOptionsContext = new LoadOptionsContext(workflow, node, additionalData, path); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await loadOptionsContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getCurrentNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + }); + + it('should return parameter value when it exists', () => { + additionalData.currentNodeParameters = { + testParameter: 'testValue', + }; + + const parameter = loadOptionsContext.getCurrentNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = loadOptionsContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = loadOptionsContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/__tests__/poll-context.test.ts b/packages/core/src/node-execution-context/__tests__/poll-context.test.ts new file mode 100644 index 0000000000..f2ca37998b --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/poll-context.test.ts @@ -0,0 +1,96 @@ +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; + +import { PollContext } from '../poll-context'; + +describe('PollContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ credentialsHelper }); + const mode: WorkflowExecuteMode = 'manual'; + const activation: WorkflowActivateMode = 'init'; + + const pollContext = new PollContext(workflow, node, additionalData, mode, activation); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getActivationMode', () => { + it('should return the activation property', () => { + const result = pollContext.getActivationMode(); + expect(result).toBe(activation); + }); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await pollContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = pollContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = pollContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/__tests__/trigger-context.test.ts b/packages/core/src/node-execution-context/__tests__/trigger-context.test.ts new file mode 100644 index 0000000000..3c91d22e6d --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/trigger-context.test.ts @@ -0,0 +1,96 @@ +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; + +import { TriggerContext } from '../trigger-context'; + +describe('TriggerContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ credentialsHelper }); + const mode: WorkflowExecuteMode = 'manual'; + const activation: WorkflowActivateMode = 'init'; + + const triggerContext = new TriggerContext(workflow, node, additionalData, mode, activation); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getActivationMode', () => { + it('should return the activation property', () => { + const result = triggerContext.getActivationMode(); + expect(result).toBe(activation); + }); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await triggerContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = triggerContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = triggerContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/__tests__/webhook-context.test.ts b/packages/core/src/node-execution-context/__tests__/webhook-context.test.ts new file mode 100644 index 0000000000..15d425c988 --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/webhook-context.test.ts @@ -0,0 +1,161 @@ +import type { Request, Response } from 'express'; +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWebhookData, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; + +import { WebhookContext } from '../webhook-context'; + +describe('WebhookContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ + credentialsHelper, + }); + additionalData.httpRequest = { + body: { test: 'body' }, + headers: { test: 'header' }, + params: { test: 'param' }, + query: { test: 'query' }, + } as unknown as Request; + additionalData.httpResponse = mock(); + const mode: WorkflowExecuteMode = 'manual'; + const webhookData = mock({ + webhookDescription: { + name: 'default', + }, + }); + const runExecutionData = null; + + const webhookContext = new WebhookContext( + workflow, + node, + additionalData, + mode, + webhookData, + [], + runExecutionData, + ); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await webhookContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getBodyData', () => { + it('should return the body data of the request', () => { + const bodyData = webhookContext.getBodyData(); + expect(bodyData).toEqual({ test: 'body' }); + }); + }); + + describe('getHeaderData', () => { + it('should return the header data of the request', () => { + const headerData = webhookContext.getHeaderData(); + expect(headerData).toEqual({ test: 'header' }); + }); + }); + + describe('getParamsData', () => { + it('should return the params data of the request', () => { + const paramsData = webhookContext.getParamsData(); + expect(paramsData).toEqual({ test: 'param' }); + }); + }); + + describe('getQueryData', () => { + it('should return the query data of the request', () => { + const queryData = webhookContext.getQueryData(); + expect(queryData).toEqual({ test: 'query' }); + }); + }); + + describe('getRequestObject', () => { + it('should return the request object', () => { + const request = webhookContext.getRequestObject(); + expect(request).toBe(additionalData.httpRequest); + }); + }); + + describe('getResponseObject', () => { + it('should return the response object', () => { + const response = webhookContext.getResponseObject(); + expect(response).toBe(additionalData.httpResponse); + }); + }); + + describe('getWebhookName', () => { + it('should return the name of the webhook', () => { + const webhookName = webhookContext.getWebhookName(); + expect(webhookName).toBe('default'); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = webhookContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = webhookContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts new file mode 100644 index 0000000000..302713954f --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts @@ -0,0 +1,136 @@ +import FileType from 'file-type'; +import { IncomingMessage, type ClientRequest } from 'http'; +import { mock } from 'jest-mock-extended'; +import type { Workflow, IWorkflowExecuteAdditionalData, IBinaryData } from 'n8n-workflow'; +import type { Socket } from 'net'; +import { Container } from 'typedi'; + +import { BinaryDataService } from '@/BinaryData/BinaryData.service'; + +import { BinaryHelpers } from '../binary-helpers'; + +jest.mock('file-type'); + +describe('BinaryHelpers', () => { + let binaryDataService = mock(); + Container.set(BinaryDataService, binaryDataService); + const workflow = mock({ id: '123' }); + const additionalData = mock({ executionId: '456' }); + const binaryHelpers = new BinaryHelpers(workflow, additionalData); + + beforeEach(() => { + jest.clearAllMocks(); + + binaryDataService.store.mockImplementation( + async (_workflowId, _executionId, _buffer, value) => value, + ); + }); + + describe('getBinaryPath', () => { + it('should call getPath method of BinaryDataService', () => { + binaryHelpers.getBinaryPath('mock-binary-data-id'); + expect(binaryDataService.getPath).toHaveBeenCalledWith('mock-binary-data-id'); + }); + }); + + describe('getBinaryMetadata', () => { + it('should call getMetadata method of BinaryDataService', async () => { + await binaryHelpers.getBinaryMetadata('mock-binary-data-id'); + expect(binaryDataService.getMetadata).toHaveBeenCalledWith('mock-binary-data-id'); + }); + }); + + describe('getBinaryStream', () => { + it('should call getStream method of BinaryDataService', async () => { + await binaryHelpers.getBinaryStream('mock-binary-data-id'); + expect(binaryDataService.getAsStream).toHaveBeenCalledWith('mock-binary-data-id', undefined); + }); + }); + + describe('prepareBinaryData', () => { + it('should guess the mime type and file extension if not provided', async () => { + const buffer = Buffer.from('test'); + const fileTypeData = { mime: 'application/pdf', ext: 'pdf' }; + (FileType.fromBuffer as jest.Mock).mockResolvedValue(fileTypeData); + + const binaryData = await binaryHelpers.prepareBinaryData(buffer); + + expect(binaryData.mimeType).toEqual('application/pdf'); + expect(binaryData.fileExtension).toEqual('pdf'); + expect(binaryData.fileType).toEqual('pdf'); + expect(binaryData.fileName).toBeUndefined(); + expect(binaryData.directory).toBeUndefined(); + expect(binaryDataService.store).toHaveBeenCalledWith( + workflow.id, + additionalData.executionId!, + buffer, + binaryData, + ); + }); + + it('should use the provided mime type and file extension if provided', async () => { + const buffer = Buffer.from('test'); + const mimeType = 'application/octet-stream'; + + const binaryData = await binaryHelpers.prepareBinaryData(buffer, undefined, mimeType); + + expect(binaryData.mimeType).toEqual(mimeType); + expect(binaryData.fileExtension).toEqual('bin'); + expect(binaryData.fileType).toBeUndefined(); + expect(binaryData.fileName).toBeUndefined(); + expect(binaryData.directory).toBeUndefined(); + expect(binaryDataService.store).toHaveBeenCalledWith( + workflow.id, + additionalData.executionId!, + buffer, + binaryData, + ); + }); + + const mockSocket = mock({ readableHighWaterMark: 0 }); + + it('should use the contentDisposition.filename, responseUrl, and contentType properties to set the fileName, directory, and mimeType properties of the binaryData object', async () => { + const incomingMessage = new IncomingMessage(mockSocket); + incomingMessage.contentDisposition = { filename: 'test.txt', type: 'attachment' }; + incomingMessage.contentType = 'text/plain'; + incomingMessage.responseUrl = 'https://example.com/test.txt'; + + const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage); + + expect(binaryData.fileName).toEqual('test.txt'); + expect(binaryData.fileType).toEqual('text'); + expect(binaryData.directory).toBeUndefined(); + expect(binaryData.mimeType).toEqual('text/plain'); + expect(binaryData.fileExtension).toEqual('txt'); + }); + + it('should use the req.path property to set the fileName property of the binaryData object if contentDisposition.filename and responseUrl are not provided', async () => { + const incomingMessage = new IncomingMessage(mockSocket); + incomingMessage.contentType = 'text/plain'; + incomingMessage.req = mock({ path: '/test.txt' }); + + const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage); + + expect(binaryData.fileName).toEqual('test.txt'); + expect(binaryData.directory).toBeUndefined(); + expect(binaryData.mimeType).toEqual('text/plain'); + expect(binaryData.fileExtension).toEqual('txt'); + }); + }); + + describe('setBinaryDataBuffer', () => { + it('should call store method of BinaryDataService', async () => { + const binaryData = mock(); + const bufferOrStream = mock(); + + await binaryHelpers.setBinaryDataBuffer(binaryData, bufferOrStream); + + expect(binaryDataService.store).toHaveBeenCalledWith( + workflow.id, + additionalData.executionId, + bufferOrStream, + binaryData, + ); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts new file mode 100644 index 0000000000..06abae8204 --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts @@ -0,0 +1,33 @@ +import { mock } from 'jest-mock-extended'; +import type { Workflow } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { ScheduledTaskManager } from '@/ScheduledTaskManager'; + +import { SchedulingHelpers } from '../scheduling-helpers'; + +describe('SchedulingHelpers', () => { + const scheduledTaskManager = mock(); + Container.set(ScheduledTaskManager, scheduledTaskManager); + const workflow = mock(); + const schedulingHelpers = new SchedulingHelpers(workflow); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('registerCron', () => { + it('should call registerCron method of ScheduledTaskManager', () => { + const cronExpression = '* * * * * *'; + const onTick = jest.fn(); + + schedulingHelpers.registerCron(cronExpression, onTick); + + expect(scheduledTaskManager.registerCron).toHaveBeenCalledWith( + workflow, + cronExpression, + onTick, + ); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts new file mode 100644 index 0000000000..cbe6916eea --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts @@ -0,0 +1,32 @@ +import { mock } from 'jest-mock-extended'; +import type { SSHCredentials } from 'n8n-workflow'; +import type { Client } from 'ssh2'; +import { Container } from 'typedi'; + +import { SSHClientsManager } from '@/SSHClientsManager'; + +import { SSHTunnelHelpers } from '../ssh-tunnel-helpers'; + +describe('SSHTunnelHelpers', () => { + const sshClientsManager = mock(); + Container.set(SSHClientsManager, sshClientsManager); + const sshTunnelHelpers = new SSHTunnelHelpers(); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getSSHClient', () => { + const credentials = mock(); + + it('should call SSHClientsManager.getClient with the given credentials', async () => { + const mockClient = mock(); + sshClientsManager.getClient.mockResolvedValue(mockClient); + + const client = await sshTunnelHelpers.getSSHClient(credentials); + + expect(sshClientsManager.getClient).toHaveBeenCalledWith(credentials); + expect(client).toBe(mockClient); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/binary-helpers.ts b/packages/core/src/node-execution-context/helpers/binary-helpers.ts new file mode 100644 index 0000000000..a15c59139b --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/binary-helpers.ts @@ -0,0 +1,148 @@ +import FileType from 'file-type'; +import { IncomingMessage } from 'http'; +import MimeTypes from 'mime-types'; +import { ApplicationError, fileTypeFromMimeType } from 'n8n-workflow'; +import type { + BinaryHelperFunctions, + IWorkflowExecuteAdditionalData, + Workflow, + IBinaryData, +} from 'n8n-workflow'; +import path from 'path'; +import type { Readable } from 'stream'; +import Container from 'typedi'; + +import { BinaryDataService } from '@/BinaryData/BinaryData.service'; +import { binaryToBuffer } from '@/BinaryData/utils'; +// eslint-disable-next-line import/no-cycle +import { binaryToString } from '@/NodeExecuteFunctions'; + +export class BinaryHelpers { + private readonly binaryDataService = Container.get(BinaryDataService); + + constructor( + private readonly workflow: Workflow, + private readonly additionalData: IWorkflowExecuteAdditionalData, + ) {} + + get exported(): BinaryHelperFunctions { + return { + getBinaryPath: this.getBinaryPath.bind(this), + getBinaryMetadata: this.getBinaryMetadata.bind(this), + getBinaryStream: this.getBinaryStream.bind(this), + binaryToBuffer, + binaryToString, + prepareBinaryData: this.prepareBinaryData.bind(this), + setBinaryDataBuffer: this.setBinaryDataBuffer.bind(this), + copyBinaryFile: this.copyBinaryFile.bind(this), + }; + } + + getBinaryPath(binaryDataId: string) { + return this.binaryDataService.getPath(binaryDataId); + } + + async getBinaryMetadata(binaryDataId: string) { + return await this.binaryDataService.getMetadata(binaryDataId); + } + + async getBinaryStream(binaryDataId: string, chunkSize?: number) { + return await this.binaryDataService.getAsStream(binaryDataId, chunkSize); + } + + // eslint-disable-next-line complexity + async prepareBinaryData(binaryData: Buffer | Readable, filePath?: string, mimeType?: string) { + let fileExtension: string | undefined; + if (binaryData instanceof IncomingMessage) { + if (!filePath) { + try { + const { responseUrl } = binaryData; + filePath = + binaryData.contentDisposition?.filename ?? + ((responseUrl && new URL(responseUrl).pathname) ?? binaryData.req?.path)?.slice(1); + } catch {} + } + if (!mimeType) { + mimeType = binaryData.contentType; + } + } + + if (!mimeType) { + // If no mime type is given figure it out + + if (filePath) { + // Use file path to guess mime type + const mimeTypeLookup = MimeTypes.lookup(filePath); + if (mimeTypeLookup) { + mimeType = mimeTypeLookup; + } + } + + if (!mimeType) { + if (Buffer.isBuffer(binaryData)) { + // Use buffer to guess mime type + const fileTypeData = await FileType.fromBuffer(binaryData); + if (fileTypeData) { + mimeType = fileTypeData.mime; + fileExtension = fileTypeData.ext; + } + } else if (binaryData instanceof IncomingMessage) { + mimeType = binaryData.headers['content-type']; + } else { + // TODO: detect filetype from other kind of streams + } + } + } + + if (!fileExtension && mimeType) { + fileExtension = MimeTypes.extension(mimeType) || undefined; + } + + if (!mimeType) { + // Fall back to text + mimeType = 'text/plain'; + } + + const returnData: IBinaryData = { + mimeType, + fileType: fileTypeFromMimeType(mimeType), + fileExtension, + data: '', + }; + + if (filePath) { + if (filePath.includes('?')) { + // Remove maybe present query parameters + filePath = filePath.split('?').shift(); + } + + const filePathParts = path.parse(filePath as string); + + if (filePathParts.dir !== '') { + returnData.directory = filePathParts.dir; + } + returnData.fileName = filePathParts.base; + + // Remove the dot + const extractedFileExtension = filePathParts.ext.slice(1); + if (extractedFileExtension) { + returnData.fileExtension = extractedFileExtension; + } + } + + return await this.setBinaryDataBuffer(returnData, binaryData); + } + + async setBinaryDataBuffer(binaryData: IBinaryData, bufferOrStream: Buffer | Readable) { + return await this.binaryDataService.store( + this.workflow.id, + this.additionalData.executionId!, + bufferOrStream, + binaryData, + ); + } + + async copyBinaryFile(): Promise { + throw new ApplicationError('`copyBinaryFile` has been removed. Please upgrade this node.'); + } +} diff --git a/packages/core/src/node-execution-context/helpers/request-helpers.ts b/packages/core/src/node-execution-context/helpers/request-helpers.ts new file mode 100644 index 0000000000..2c5eb19290 --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/request-helpers.ts @@ -0,0 +1,381 @@ +import { createHash } from 'crypto'; +import { pick } from 'lodash'; +import { jsonParse, NodeOperationError, sleep } from 'n8n-workflow'; +import type { + RequestHelperFunctions, + IAdditionalCredentialOptions, + IAllExecuteFunctions, + IExecuteData, + IHttpRequestOptions, + IN8nHttpFullResponse, + IN8nHttpResponse, + INode, + INodeExecutionData, + IOAuth2Options, + IRequestOptions, + IRunExecutionData, + IWorkflowDataProxyAdditionalKeys, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + PaginationOptions, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { Readable } from 'stream'; + +// eslint-disable-next-line import/no-cycle +import { + applyPaginationRequestData, + binaryToString, + httpRequest, + httpRequestWithAuthentication, + proxyRequestToAxios, + requestOAuth1, + requestOAuth2, + requestWithAuthentication, + validateUrl, +} from '@/NodeExecuteFunctions'; + +export class RequestHelpers { + constructor( + private readonly context: IAllExecuteFunctions, + private readonly workflow: Workflow, + private readonly node: INode, + private readonly additionalData: IWorkflowExecuteAdditionalData, + private readonly runExecutionData: IRunExecutionData | null = null, + private readonly connectionInputData: INodeExecutionData[] = [], + ) {} + + get exported(): RequestHelperFunctions { + return { + httpRequest, + httpRequestWithAuthentication: this.httpRequestWithAuthentication.bind(this), + requestWithAuthenticationPaginated: this.requestWithAuthenticationPaginated.bind(this), + request: this.request.bind(this), + requestWithAuthentication: this.requestWithAuthentication.bind(this), + requestOAuth1: this.requestOAuth1.bind(this), + requestOAuth2: this.requestOAuth2.bind(this), + }; + } + + get httpRequest() { + return httpRequest; + } + + async httpRequestWithAuthentication( + credentialsType: string, + requestOptions: IHttpRequestOptions, + additionalCredentialOptions?: IAdditionalCredentialOptions, + ) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await httpRequestWithAuthentication.call( + this.context, + credentialsType, + requestOptions, + this.workflow, + this.node, + this.additionalData, + additionalCredentialOptions, + ); + } + + // eslint-disable-next-line complexity + async requestWithAuthenticationPaginated( + requestOptions: IRequestOptions, + itemIndex: number, + paginationOptions: PaginationOptions, + credentialsType?: string, + additionalCredentialOptions?: IAdditionalCredentialOptions, + ): Promise { + const responseData = []; + if (!requestOptions.qs) { + requestOptions.qs = {}; + } + requestOptions.resolveWithFullResponse = true; + requestOptions.simple = false; + + let tempResponseData: IN8nHttpFullResponse; + let makeAdditionalRequest: boolean; + let paginateRequestData: PaginationOptions['request']; + + const runIndex = 0; + + const additionalKeys = { + $request: requestOptions, + $response: {} as IN8nHttpFullResponse, + $version: this.node.typeVersion, + $pageCount: 0, + }; + + const executeData: IExecuteData = { + data: {}, + node: this.node, + source: null, + }; + + const hashData = { + identicalCount: 0, + previousLength: 0, + previousHash: '', + }; + + do { + paginateRequestData = this.getResolvedValue( + paginationOptions.request as unknown as NodeParameterValueType, + itemIndex, + runIndex, + executeData, + additionalKeys, + false, + ) as object as PaginationOptions['request']; + + const tempRequestOptions = applyPaginationRequestData(requestOptions, paginateRequestData); + + if (!validateUrl(tempRequestOptions.uri as string)) { + throw new NodeOperationError( + this.node, + `'${paginateRequestData.url}' is not a valid URL.`, + { + itemIndex, + runIndex, + type: 'invalid_url', + }, + ); + } + + if (credentialsType) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + tempResponseData = await this.requestWithAuthentication( + credentialsType, + tempRequestOptions, + additionalCredentialOptions, + ); + } else { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + tempResponseData = await this.request(tempRequestOptions); + } + + const newResponse: IN8nHttpFullResponse = Object.assign( + { + body: {}, + headers: {}, + statusCode: 0, + }, + pick(tempResponseData, ['body', 'headers', 'statusCode']), + ); + + let contentBody: Exclude; + + if (newResponse.body instanceof Readable && paginationOptions.binaryResult !== true) { + // Keep the original string version that we can use it to hash if needed + contentBody = await binaryToString(newResponse.body as Buffer | Readable); + + const responseContentType = newResponse.headers['content-type']?.toString() ?? ''; + if (responseContentType.includes('application/json')) { + newResponse.body = jsonParse(contentBody, { fallbackValue: {} }); + } else { + newResponse.body = contentBody; + } + tempResponseData.__bodyResolved = true; + tempResponseData.body = newResponse.body; + } else { + contentBody = newResponse.body; + } + + if (paginationOptions.binaryResult !== true || tempResponseData.headers.etag) { + // If the data is not binary (and so not a stream), or an etag is present, + // we check via etag or hash if identical data is received + + let contentLength = 0; + if ('content-length' in tempResponseData.headers) { + contentLength = parseInt(tempResponseData.headers['content-length'] as string) || 0; + } + + if (hashData.previousLength === contentLength) { + let hash: string; + if (tempResponseData.headers.etag) { + // If an etag is provided, we use it as "hash" + hash = tempResponseData.headers.etag as string; + } else { + // If there is no etag, we calculate a hash from the data in the body + if (typeof contentBody !== 'string') { + contentBody = JSON.stringify(contentBody); + } + hash = createHash('md5').update(contentBody).digest('base64'); + } + + if (hashData.previousHash === hash) { + hashData.identicalCount += 1; + if (hashData.identicalCount > 2) { + // Length was identical 5x and hash 3x + throw new NodeOperationError( + this.node, + 'The returned response was identical 5x, so requests got stopped', + { + itemIndex, + description: + 'Check if "Pagination Completed When" has been configured correctly.', + }, + ); + } + } else { + hashData.identicalCount = 0; + } + hashData.previousHash = hash; + } else { + hashData.identicalCount = 0; + } + hashData.previousLength = contentLength; + } + + responseData.push(tempResponseData); + + additionalKeys.$response = newResponse; + additionalKeys.$pageCount = additionalKeys.$pageCount + 1; + + const maxRequests = this.getResolvedValue( + paginationOptions.maxRequests, + itemIndex, + runIndex, + executeData, + additionalKeys, + false, + ) as number; + + if (maxRequests && additionalKeys.$pageCount >= maxRequests) { + break; + } + + makeAdditionalRequest = this.getResolvedValue( + paginationOptions.continue, + itemIndex, + runIndex, + executeData, + additionalKeys, + false, + ) as boolean; + + if (makeAdditionalRequest) { + if (paginationOptions.requestInterval) { + const requestInterval = this.getResolvedValue( + paginationOptions.requestInterval, + itemIndex, + runIndex, + executeData, + additionalKeys, + false, + ) as number; + + await sleep(requestInterval); + } + if (tempResponseData.statusCode < 200 || tempResponseData.statusCode >= 300) { + // We have it configured to let all requests pass no matter the response code + // via "requestOptions.simple = false" to not by default fail if it is for example + // configured to stop on 404 response codes. For that reason we have to throw here + // now an error manually if the response code is not a success one. + let data = tempResponseData.body; + if (data instanceof Readable && paginationOptions.binaryResult !== true) { + data = await binaryToString(data as Buffer | Readable); + } else if (typeof data === 'object') { + data = JSON.stringify(data); + } + + throw Object.assign(new Error(`${tempResponseData.statusCode} - "${data?.toString()}"`), { + statusCode: tempResponseData.statusCode, + error: data, + isAxiosError: true, + response: { + headers: tempResponseData.headers, + status: tempResponseData.statusCode, + statusText: tempResponseData.statusMessage, + }, + }); + } + } + } while (makeAdditionalRequest); + + return responseData; + } + + async request(uriOrObject: string | IRequestOptions, options?: IRequestOptions) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await proxyRequestToAxios( + this.workflow, + this.additionalData, + this.node, + uriOrObject, + options, + ); + } + + async requestWithAuthentication( + credentialsType: string, + requestOptions: IRequestOptions, + additionalCredentialOptions?: IAdditionalCredentialOptions, + itemIndex?: number, + ) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await requestWithAuthentication.call( + this.context, + credentialsType, + requestOptions, + this.workflow, + this.node, + this.additionalData, + additionalCredentialOptions, + itemIndex, + ); + } + + async requestOAuth1(credentialsType: string, requestOptions: IRequestOptions) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await requestOAuth1.call(this.context, credentialsType, requestOptions); + } + + async requestOAuth2( + credentialsType: string, + requestOptions: IRequestOptions, + oAuth2Options?: IOAuth2Options, + ) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await requestOAuth2.call( + this.context, + credentialsType, + requestOptions, + this.node, + this.additionalData, + oAuth2Options, + ); + } + + private getResolvedValue( + parameterValue: NodeParameterValueType, + itemIndex: number, + runIndex: number, + executeData: IExecuteData, + additionalKeys?: IWorkflowDataProxyAdditionalKeys, + returnObjectAsString = false, + ): NodeParameterValueType { + const mode: WorkflowExecuteMode = 'internal'; + + if ( + typeof parameterValue === 'object' || + (typeof parameterValue === 'string' && parameterValue.charAt(0) === '=') + ) { + return this.workflow.expression.getParameterValue( + parameterValue, + this.runExecutionData, + runIndex, + itemIndex, + this.node.name, + this.connectionInputData, + mode, + additionalKeys ?? {}, + executeData, + returnObjectAsString, + ); + } + + return parameterValue; + } +} diff --git a/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts b/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts new file mode 100644 index 0000000000..e193f2beaf --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts @@ -0,0 +1,20 @@ +import type { CronExpression, Workflow, SchedulingFunctions } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { ScheduledTaskManager } from '@/ScheduledTaskManager'; + +export class SchedulingHelpers { + private readonly scheduledTaskManager = Container.get(ScheduledTaskManager); + + constructor(private readonly workflow: Workflow) {} + + get exported(): SchedulingFunctions { + return { + registerCron: this.registerCron.bind(this), + }; + } + + registerCron(cronExpression: CronExpression, onTick: () => void) { + this.scheduledTaskManager.registerCron(this.workflow, cronExpression, onTick); + } +} diff --git a/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts b/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts new file mode 100644 index 0000000000..f44df0e166 --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts @@ -0,0 +1,18 @@ +import type { SSHCredentials, SSHTunnelFunctions } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { SSHClientsManager } from '@/SSHClientsManager'; + +export class SSHTunnelHelpers { + private readonly sshClientsManager = Container.get(SSHClientsManager); + + get exported(): SSHTunnelFunctions { + return { + getSSHClient: this.getSSHClient.bind(this), + }; + } + + async getSSHClient(credentials: SSHCredentials) { + return await this.sshClientsManager.getClient(credentials); + } +} diff --git a/packages/core/src/node-execution-context/hook-context.ts b/packages/core/src/node-execution-context/hook-context.ts new file mode 100644 index 0000000000..7cc6567779 --- /dev/null +++ b/packages/core/src/node-execution-context/hook-context.ts @@ -0,0 +1,103 @@ +import type { + ICredentialDataDecryptedObject, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + IHookFunctions, + IRunExecutionData, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, + IWebhookData, + WebhookType, +} from 'n8n-workflow'; +import { ApplicationError } from 'n8n-workflow'; + +// eslint-disable-next-line import/no-cycle +import { + getAdditionalKeys, + getCredentials, + getNodeParameter, + getNodeWebhookUrl, + getWebhookDescription, +} from '@/NodeExecuteFunctions'; + +import { RequestHelpers } from './helpers/request-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +export class HookContext extends NodeExecutionContext implements IHookFunctions { + readonly helpers: IHookFunctions['helpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + private readonly activation: WorkflowActivateMode, + private readonly webhookData?: IWebhookData, + ) { + super(workflow, node, additionalData, mode); + + this.helpers = new RequestHelpers(this, workflow, node, additionalData); + } + + getActivationMode() { + return this.activation; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const runExecutionData: IRunExecutionData | null = null; + const itemIndex = 0; + const runIndex = 0; + const connectionInputData: INodeExecutionData[] = []; + + return getNodeParameter( + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, runExecutionData), + undefined, + fallbackValue, + options, + ); + } + + getNodeWebhookUrl(name: WebhookType): string | undefined { + return getNodeWebhookUrl( + name, + this.workflow, + this.node, + this.additionalData, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, null), + this.webhookData?.isTest, + ); + } + + getWebhookName(): string { + if (this.webhookData === undefined) { + throw new ApplicationError('Only supported in webhook functions'); + } + return this.webhookData.webhookDescription.name; + } + + getWebhookDescription(name: WebhookType) { + return getWebhookDescription(name, this.workflow, this.node); + } +} diff --git a/packages/core/src/node-execution-context/index.ts b/packages/core/src/node-execution-context/index.ts new file mode 100644 index 0000000000..a6397c60ce --- /dev/null +++ b/packages/core/src/node-execution-context/index.ts @@ -0,0 +1,6 @@ +// eslint-disable-next-line import/no-cycle +export { HookContext } from './hook-context'; +export { LoadOptionsContext } from './load-options-context'; +export { PollContext } from './poll-context'; +export { TriggerContext } from './trigger-context'; +export { WebhookContext } from './webhook-context'; diff --git a/packages/core/src/node-execution-context/load-options-context.ts b/packages/core/src/node-execution-context/load-options-context.ts new file mode 100644 index 0000000000..98dd58210b --- /dev/null +++ b/packages/core/src/node-execution-context/load-options-context.ts @@ -0,0 +1,102 @@ +import { get } from 'lodash'; +import type { + ICredentialDataDecryptedObject, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + ILoadOptionsFunctions, + IRunExecutionData, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + Workflow, +} from 'n8n-workflow'; + +import { extractValue } from '@/ExtractValue'; +// eslint-disable-next-line import/no-cycle +import { getAdditionalKeys, getCredentials, getNodeParameter } from '@/NodeExecuteFunctions'; + +import { RequestHelpers } from './helpers/request-helpers'; +import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +export class LoadOptionsContext extends NodeExecutionContext implements ILoadOptionsFunctions { + readonly helpers: ILoadOptionsFunctions['helpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + private readonly path: string, + ) { + super(workflow, node, additionalData, 'internal'); + + this.helpers = { + ...new RequestHelpers(this, workflow, node, additionalData).exported, + ...new SSHTunnelHelpers().exported, + }; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getCurrentNodeParameter( + parameterPath: string, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object | undefined { + const nodeParameters = this.additionalData.currentNodeParameters; + + if (parameterPath.charAt(0) === '&') { + parameterPath = `${this.path.split('.').slice(1, -1).join('.')}.${parameterPath.slice(1)}`; + } + + let returnData = get(nodeParameters, parameterPath); + + // This is outside the try/catch because it throws errors with proper messages + if (options?.extractValue) { + const nodeType = this.workflow.nodeTypes.getByNameAndVersion( + this.node.type, + this.node.typeVersion, + ); + returnData = extractValue( + returnData, + parameterPath, + this.node, + nodeType, + ) as NodeParameterValueType; + } + + return returnData; + } + + getCurrentNodeParameters() { + return this.additionalData.currentNodeParameters; + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const runExecutionData: IRunExecutionData | null = null; + const itemIndex = 0; + const runIndex = 0; + const connectionInputData: INodeExecutionData[] = []; + + return getNodeParameter( + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, runExecutionData), + undefined, + fallbackValue, + options, + ); + } +} diff --git a/packages/core/src/node-execution-context/node-execution-context.ts b/packages/core/src/node-execution-context/node-execution-context.ts new file mode 100644 index 0000000000..09c21b63a4 --- /dev/null +++ b/packages/core/src/node-execution-context/node-execution-context.ts @@ -0,0 +1,107 @@ +import type { + FunctionsBase, + INode, + INodeExecutionData, + IWorkflowExecuteAdditionalData, + NodeTypeAndVersion, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { deepCopy, LoggerProxy } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { InstanceSettings } from '@/InstanceSettings'; + +export abstract class NodeExecutionContext implements Omit { + protected readonly instanceSettings = Container.get(InstanceSettings); + + constructor( + protected readonly workflow: Workflow, + protected readonly node: INode, + protected readonly additionalData: IWorkflowExecuteAdditionalData, + protected readonly mode: WorkflowExecuteMode, + ) {} + + get logger() { + return LoggerProxy; + } + + getExecutionId() { + return this.additionalData.executionId!; + } + + getNode(): INode { + return deepCopy(this.node); + } + + getWorkflow() { + const { id, name, active } = this.workflow; + return { id, name, active }; + } + + getMode() { + return this.mode; + } + + getWorkflowStaticData(type: string) { + return this.workflow.getStaticData(type, this.node); + } + + getChildNodes(nodeName: string) { + const output: NodeTypeAndVersion[] = []; + const nodeNames = this.workflow.getChildNodes(nodeName); + + for (const n of nodeNames) { + const node = this.workflow.nodes[n]; + output.push({ + name: node.name, + type: node.type, + typeVersion: node.typeVersion, + }); + } + return output; + } + + getParentNodes(nodeName: string) { + const output: NodeTypeAndVersion[] = []; + const nodeNames = this.workflow.getParentNodes(nodeName); + + for (const n of nodeNames) { + const node = this.workflow.nodes[n]; + output.push({ + name: node.name, + type: node.type, + typeVersion: node.typeVersion, + }); + } + return output; + } + + getKnownNodeTypes() { + return this.workflow.nodeTypes.getKnownTypes(); + } + + getRestApiUrl() { + return this.additionalData.restApiUrl; + } + + getInstanceBaseUrl() { + return this.additionalData.instanceBaseUrl; + } + + getInstanceId() { + return this.instanceSettings.instanceId; + } + + getTimezone() { + return this.workflow.timezone; + } + + getCredentialsProperties(type: string) { + return this.additionalData.credentialsHelper.getCredentialsProperties(type); + } + + async prepareOutputData(outputData: INodeExecutionData[]) { + return [outputData]; + } +} diff --git a/packages/core/src/node-execution-context/poll-context.ts b/packages/core/src/node-execution-context/poll-context.ts new file mode 100644 index 0000000000..88e8caafc8 --- /dev/null +++ b/packages/core/src/node-execution-context/poll-context.ts @@ -0,0 +1,94 @@ +import type { + ICredentialDataDecryptedObject, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + IPollFunctions, + IRunExecutionData, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; + +// eslint-disable-next-line import/no-cycle +import { + getAdditionalKeys, + getCredentials, + getNodeParameter, + returnJsonArray, +} from '@/NodeExecuteFunctions'; + +import { BinaryHelpers } from './helpers/binary-helpers'; +import { RequestHelpers } from './helpers/request-helpers'; +import { SchedulingHelpers } from './helpers/scheduling-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +const throwOnEmit = () => { + throw new ApplicationError('Overwrite PollContext.__emit function'); +}; + +const throwOnEmitError = () => { + throw new ApplicationError('Overwrite PollContext.__emitError function'); +}; + +export class PollContext extends NodeExecutionContext implements IPollFunctions { + readonly helpers: IPollFunctions['helpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + private readonly activation: WorkflowActivateMode, + readonly __emit: IPollFunctions['__emit'] = throwOnEmit, + readonly __emitError: IPollFunctions['__emitError'] = throwOnEmitError, + ) { + super(workflow, node, additionalData, mode); + + this.helpers = { + createDeferredPromise, + returnJsonArray, + ...new BinaryHelpers(workflow, additionalData).exported, + ...new RequestHelpers(this, workflow, node, additionalData).exported, + ...new SchedulingHelpers(workflow).exported, + }; + } + + getActivationMode() { + return this.activation; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const runExecutionData: IRunExecutionData | null = null; + const itemIndex = 0; + const runIndex = 0; + const connectionInputData: INodeExecutionData[] = []; + + return getNodeParameter( + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, runExecutionData), + undefined, + fallbackValue, + options, + ); + } +} diff --git a/packages/core/src/node-execution-context/trigger-context.ts b/packages/core/src/node-execution-context/trigger-context.ts new file mode 100644 index 0000000000..8535ccfe6c --- /dev/null +++ b/packages/core/src/node-execution-context/trigger-context.ts @@ -0,0 +1,96 @@ +import type { + ICredentialDataDecryptedObject, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + IRunExecutionData, + ITriggerFunctions, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; + +// eslint-disable-next-line import/no-cycle +import { + getAdditionalKeys, + getCredentials, + getNodeParameter, + returnJsonArray, +} from '@/NodeExecuteFunctions'; + +import { BinaryHelpers } from './helpers/binary-helpers'; +import { RequestHelpers } from './helpers/request-helpers'; +import { SchedulingHelpers } from './helpers/scheduling-helpers'; +import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +const throwOnEmit = () => { + throw new ApplicationError('Overwrite TriggerContext.emit function'); +}; + +const throwOnEmitError = () => { + throw new ApplicationError('Overwrite TriggerContext.emitError function'); +}; + +export class TriggerContext extends NodeExecutionContext implements ITriggerFunctions { + readonly helpers: ITriggerFunctions['helpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + private readonly activation: WorkflowActivateMode, + readonly emit: ITriggerFunctions['emit'] = throwOnEmit, + readonly emitError: ITriggerFunctions['emitError'] = throwOnEmitError, + ) { + super(workflow, node, additionalData, mode); + + this.helpers = { + createDeferredPromise, + returnJsonArray, + ...new BinaryHelpers(workflow, additionalData).exported, + ...new RequestHelpers(this, workflow, node, additionalData).exported, + ...new SchedulingHelpers(workflow).exported, + ...new SSHTunnelHelpers().exported, + }; + } + + getActivationMode() { + return this.activation; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const runExecutionData: IRunExecutionData | null = null; + const itemIndex = 0; + const runIndex = 0; + const connectionInputData: INodeExecutionData[] = []; + + return getNodeParameter( + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, runExecutionData), + undefined, + fallbackValue, + options, + ); + } +} diff --git a/packages/core/src/node-execution-context/webhook-context.ts b/packages/core/src/node-execution-context/webhook-context.ts new file mode 100644 index 0000000000..a7fa7203c8 --- /dev/null +++ b/packages/core/src/node-execution-context/webhook-context.ts @@ -0,0 +1,233 @@ +import type { Request, Response } from 'express'; +import type { + CloseFunction, + ICredentialDataDecryptedObject, + IDataObject, + IExecuteData, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + IRunExecutionData, + ITaskDataConnections, + IWebhookData, + IWebhookFunctions, + IWorkflowExecuteAdditionalData, + NodeConnectionType, + NodeParameterValueType, + WebhookType, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; + +// eslint-disable-next-line import/no-cycle +import { + copyBinaryFile, + getAdditionalKeys, + getCredentials, + getInputConnectionData, + getNodeParameter, + getNodeWebhookUrl, + returnJsonArray, +} from '@/NodeExecuteFunctions'; + +import { BinaryHelpers } from './helpers/binary-helpers'; +import { RequestHelpers } from './helpers/request-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +export class WebhookContext extends NodeExecutionContext implements IWebhookFunctions { + readonly helpers: IWebhookFunctions['helpers']; + + readonly nodeHelpers: IWebhookFunctions['nodeHelpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + private readonly webhookData: IWebhookData, + private readonly closeFunctions: CloseFunction[], + private readonly runExecutionData: IRunExecutionData | null, + ) { + super(workflow, node, additionalData, mode); + + this.helpers = { + createDeferredPromise, + returnJsonArray, + ...new BinaryHelpers(workflow, additionalData).exported, + ...new RequestHelpers(this, workflow, node, additionalData).exported, + }; + + this.nodeHelpers = { + copyBinaryFile: async (filePath, fileName, mimeType) => + await copyBinaryFile( + this.workflow.id, + this.additionalData.executionId!, + filePath, + fileName, + mimeType, + ), + }; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getBodyData() { + return this.assertHttpRequest().body as IDataObject; + } + + getHeaderData() { + return this.assertHttpRequest().headers; + } + + getParamsData(): object { + return this.assertHttpRequest().params; + } + + getQueryData(): object { + return this.assertHttpRequest().query; + } + + getRequestObject(): Request { + return this.assertHttpRequest(); + } + + getResponseObject(): Response { + if (this.additionalData.httpResponse === undefined) { + throw new ApplicationError('Response is missing'); + } + return this.additionalData.httpResponse; + } + + private assertHttpRequest() { + const { httpRequest } = this.additionalData; + if (httpRequest === undefined) { + throw new ApplicationError('Request is missing'); + } + return httpRequest; + } + + getNodeWebhookUrl(name: WebhookType): string | undefined { + return getNodeWebhookUrl( + name, + this.workflow, + this.node, + this.additionalData, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, null), + ); + } + + getWebhookName() { + return this.webhookData.webhookDescription.name; + } + + async getInputConnectionData(inputName: NodeConnectionType, itemIndex: number): Promise { + // To be able to use expressions like "$json.sessionId" set the + // body data the webhook received to what is normally used for + // incoming node data. + const connectionInputData: INodeExecutionData[] = [ + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + { json: this.additionalData.httpRequest?.body || {} }, + ]; + const runExecutionData: IRunExecutionData = { + resultData: { + runData: {}, + }, + }; + const executeData: IExecuteData = { + data: { + main: [connectionInputData], + }, + node: this.node, + source: null, + }; + const runIndex = 0; + + return await getInputConnectionData.call( + this, + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + {} as ITaskDataConnections, + this.additionalData, + executeData, + this.mode, + this.closeFunctions, + inputName, + itemIndex, + ); + } + + evaluateExpression(expression: string, evaluateItemIndex?: number) { + const itemIndex = evaluateItemIndex ?? 0; + const runIndex = 0; + + let connectionInputData: INodeExecutionData[] = []; + let executionData: IExecuteData | undefined; + + if (this.runExecutionData?.executionData !== undefined) { + executionData = this.runExecutionData.executionData.nodeExecutionStack[0]; + + if (executionData !== undefined) { + connectionInputData = executionData.data.main[0]!; + } + } + + const additionalKeys = getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData); + + return this.workflow.expression.resolveSimpleParameterValue( + `=${expression}`, + {}, + this.runExecutionData, + runIndex, + itemIndex, + this.node.name, + connectionInputData, + this.mode, + additionalKeys, + executionData, + ); + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const itemIndex = 0; + const runIndex = 0; + + let connectionInputData: INodeExecutionData[] = []; + let executionData: IExecuteData | undefined; + + if (this.runExecutionData?.executionData !== undefined) { + executionData = this.runExecutionData.executionData.nodeExecutionStack[0]; + + if (executionData !== undefined) { + connectionInputData = executionData.data.main[0]!; + } + } + + const additionalKeys = getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData); + + return getNodeParameter( + this.workflow, + this.runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + additionalKeys, + executionData, + fallbackValue, + options, + ); + } +} diff --git a/packages/editor-ui/index.html b/packages/editor-ui/index.html index 8176735fdd..25bbde030f 100644 --- a/packages/editor-ui/index.html +++ b/packages/editor-ui/index.html @@ -5,6 +5,7 @@ +