From ce963e88240a3ec92121577660a2c4e38e7cc85a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Ovejero?= Date: Mon, 4 Nov 2024 09:09:12 +0100 Subject: [PATCH 01/21] refactor(core): Port `pruning` config (#11507) --- .../@n8n/config/src/configs/pruning.config.ts | 35 ++++++++++++++ packages/@n8n/config/src/index.ts | 5 ++ packages/@n8n/config/test/config.test.ts | 8 ++++ packages/cli/src/config/schema.ts | 48 ------------------- .../repositories/execution.repository.ts | 6 +-- .../events/relays/telemetry.event-relay.ts | 4 +- packages/cli/src/services/frontend.service.ts | 6 +-- packages/cli/src/services/pruning.service.ts | 7 ++- .../test/integration/pruning.service.test.ts | 22 ++++----- 9 files changed, 68 insertions(+), 73 deletions(-) create mode 100644 packages/@n8n/config/src/configs/pruning.config.ts diff --git a/packages/@n8n/config/src/configs/pruning.config.ts b/packages/@n8n/config/src/configs/pruning.config.ts new file mode 100644 index 0000000000..109dffc462 --- /dev/null +++ b/packages/@n8n/config/src/configs/pruning.config.ts @@ -0,0 +1,35 @@ +import { Config, Env } from '../decorators'; + +@Config +export class PruningConfig { + /** Whether to delete past executions on a rolling basis. */ + @Env('EXECUTIONS_DATA_PRUNE') + isEnabled: boolean = true; + + /** How old (hours) a finished execution must be to qualify for soft-deletion. */ + @Env('EXECUTIONS_DATA_MAX_AGE') + maxAge: number = 336; + + /** + * Max number of finished executions to keep in database. Does not necessarily + * prune to the exact max number. `0` for unlimited. + */ + @Env('EXECUTIONS_DATA_PRUNE_MAX_COUNT') + maxCount: number = 10_000; + + /** + * How old (hours) a finished execution must be to qualify for hard-deletion. + * This buffer by default excludes recent executions as the user may need + * them while building a workflow. + */ + @Env('EXECUTIONS_DATA_HARD_DELETE_BUFFER') + hardDeleteBuffer: number = 1; + + /** How often (minutes) execution data should be hard-deleted. */ + @Env('EXECUTIONS_DATA_PRUNE_HARD_DELETE_INTERVAL') + hardDeleteInterval: number = 15; + + /** How often (minutes) execution data should be soft-deleted */ + @Env('EXECUTIONS_DATA_PRUNE_SOFT_DELETE_INTERVAL') + softDeleteInterval: number = 60; +} diff --git a/packages/@n8n/config/src/index.ts b/packages/@n8n/config/src/index.ts index c056a1090c..0a89535ee3 100644 --- a/packages/@n8n/config/src/index.ts +++ b/packages/@n8n/config/src/index.ts @@ -10,6 +10,7 @@ import { LicenseConfig } from './configs/license.config'; import { LoggingConfig } from './configs/logging.config'; import { MultiMainSetupConfig } from './configs/multi-main-setup.config'; import { NodesConfig } from './configs/nodes.config'; +import { PruningConfig } from './configs/pruning.config'; import { PublicApiConfig } from './configs/public-api.config'; import { TaskRunnersConfig } from './configs/runners.config'; import { ScalingModeConfig } from './configs/scaling-mode.config'; @@ -24,6 +25,7 @@ import { Config, Env, Nested } from './decorators'; export { Config, Env, Nested } from './decorators'; export { TaskRunnersConfig } from './configs/runners.config'; export { SecurityConfig } from './configs/security.config'; +export { PruningConfig } from './configs/pruning.config'; export { FrontendBetaFeatures, FrontendConfig } from './configs/frontend.config'; export { LOG_SCOPES } from './configs/logging.config'; export type { LogScope } from './configs/logging.config'; @@ -112,4 +114,7 @@ export class GlobalConfig { @Nested security: SecurityConfig; + + @Nested + pruning: PruningConfig; } diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index 07af2c0a0b..bc10028f36 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -271,6 +271,14 @@ describe('GlobalConfig', () => { blockFileAccessToN8nFiles: true, daysAbandonedWorkflow: 90, }, + pruning: { + isEnabled: true, + maxAge: 336, + maxCount: 10_000, + hardDeleteBuffer: 1, + hardDeleteInterval: 15, + softDeleteInterval: 60, + }, }; it('should use all default values when no env variables are defined', () => { diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index 8bece9199a..9f8bc45232 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -98,54 +98,6 @@ export const schema = { env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS', }, - // To not exceed the database's capacity and keep its size moderate - // the execution data gets pruned regularly (default: 15 minute interval). - // All saved execution data older than the max age will be deleted. - // Pruning is currently not activated by default, which will change in - // a future version. - pruneData: { - doc: 'Delete data of past executions on a rolling basis', - format: Boolean, - default: true, - env: 'EXECUTIONS_DATA_PRUNE', - }, - pruneDataMaxAge: { - doc: 'How old (hours) the finished execution data has to be to get soft-deleted', - format: Number, - default: 336, - env: 'EXECUTIONS_DATA_MAX_AGE', - }, - pruneDataHardDeleteBuffer: { - doc: 'How old (hours) the finished execution data has to be to get hard-deleted. By default, this buffer excludes recent executions as the user may need them while building a workflow.', - format: Number, - default: 1, - env: 'EXECUTIONS_DATA_HARD_DELETE_BUFFER', - }, - pruneDataIntervals: { - hardDelete: { - doc: 'How often (minutes) execution data should be hard-deleted', - format: Number, - default: 15, - env: 'EXECUTIONS_DATA_PRUNE_HARD_DELETE_INTERVAL', - }, - softDelete: { - doc: 'How often (minutes) execution data should be soft-deleted', - format: Number, - default: 60, - env: 'EXECUTIONS_DATA_PRUNE_SOFT_DELETE_INTERVAL', - }, - }, - - // Additional pruning option to delete executions if total count exceeds the configured max. - // Deletes the oldest entries first - // Set to 0 for No limit - pruneDataMaxCount: { - doc: "Maximum number of finished executions to keep in DB. Doesn't necessarily prune exactly to max number. 0 = no limit", - format: Number, - default: 10000, - env: 'EXECUTIONS_DATA_PRUNE_MAX_COUNT', - }, - queueRecovery: { interval: { doc: 'How often (minutes) to check for queue recovery', diff --git a/packages/cli/src/databases/repositories/execution.repository.ts b/packages/cli/src/databases/repositories/execution.repository.ts index ce4bedac2a..39f5e92cad 100644 --- a/packages/cli/src/databases/repositories/execution.repository.ts +++ b/packages/cli/src/databases/repositories/execution.repository.ts @@ -35,7 +35,6 @@ import type { } from 'n8n-workflow'; import { Service } from 'typedi'; -import config from '@/config'; import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; import { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping.ee'; import { ExecutionAnnotation } from '@/databases/entities/execution-annotation.ee'; @@ -460,8 +459,7 @@ export class ExecutionRepository extends Repository { } async softDeletePrunableExecutions() { - const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h - const maxCount = config.getEnv('executions.pruneDataMaxCount'); + const { maxAge, maxCount } = this.globalConfig.pruning; // Sub-query to exclude executions having annotations const annotatedExecutionsSubQuery = this.manager @@ -517,7 +515,7 @@ export class ExecutionRepository extends Repository { async hardDeleteSoftDeletedExecutions() { const date = new Date(); - date.setHours(date.getHours() - config.getEnv('executions.pruneDataHardDeleteBuffer')); + date.setHours(date.getHours() - this.globalConfig.pruning.hardDeleteBuffer); const workflowIdsAndExecutionIds = ( await this.find({ diff --git a/packages/cli/src/events/relays/telemetry.event-relay.ts b/packages/cli/src/events/relays/telemetry.event-relay.ts index fc5cf0a53d..88f954ab93 100644 --- a/packages/cli/src/events/relays/telemetry.event-relay.ts +++ b/packages/cli/src/events/relays/telemetry.event-relay.ts @@ -771,8 +771,8 @@ export class TelemetryEventRelay extends EventRelay { executions_data_save_manual_executions: config.getEnv( 'executions.saveDataManualExecutions', ), - executions_data_prune: config.getEnv('executions.pruneData'), - executions_data_max_age: config.getEnv('executions.pruneDataMaxAge'), + executions_data_prune: this.globalConfig.pruning.isEnabled, + executions_data_max_age: this.globalConfig.pruning.maxAge, }, n8n_deployment_type: config.getEnv('deployment.type'), n8n_binary_data_mode: binaryDataConfig.mode, diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index 6cad4a4f24..7d9fe7d2b4 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -222,9 +222,9 @@ export class FrontendService { licensePruneTime: -1, }, pruning: { - isEnabled: config.getEnv('executions.pruneData'), - maxAge: config.getEnv('executions.pruneDataMaxAge'), - maxCount: config.getEnv('executions.pruneDataMaxCount'), + isEnabled: this.globalConfig.pruning.isEnabled, + maxAge: this.globalConfig.pruning.maxAge, + maxCount: this.globalConfig.pruning.maxCount, }, security: { blockFileAccessToN8nFiles: this.securityConfig.blockFileAccessToN8nFiles, diff --git a/packages/cli/src/services/pruning.service.ts b/packages/cli/src/services/pruning.service.ts index 0859dddd39..943f8d30ca 100644 --- a/packages/cli/src/services/pruning.service.ts +++ b/packages/cli/src/services/pruning.service.ts @@ -3,7 +3,6 @@ import { BinaryDataService, InstanceSettings } from 'n8n-core'; import { jsonStringify } from 'n8n-workflow'; import { Service } from 'typedi'; -import config from '@/config'; import { inTest, TIME } from '@/constants'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { OnShutdown } from '@/decorators/on-shutdown'; @@ -16,8 +15,8 @@ export class PruningService { private hardDeletionBatchSize = 100; private rates: Record = { - softDeletion: config.getEnv('executions.pruneDataIntervals.softDelete') * TIME.MINUTE, - hardDeletion: config.getEnv('executions.pruneDataIntervals.hardDelete') * TIME.MINUTE, + softDeletion: this.globalConfig.pruning.softDeleteInterval * TIME.MINUTE, + hardDeletion: this.globalConfig.pruning.hardDeleteInterval * TIME.MINUTE, }; public softDeletionInterval: NodeJS.Timer | undefined; @@ -52,7 +51,7 @@ export class PruningService { private isPruningEnabled() { const { instanceType, isFollower } = this.instanceSettings; - if (!config.getEnv('executions.pruneData') || inTest || instanceType !== 'main') { + if (!this.globalConfig.pruning.isEnabled || inTest || instanceType !== 'main') { return false; } diff --git a/packages/cli/test/integration/pruning.service.test.ts b/packages/cli/test/integration/pruning.service.test.ts index 5a53d70315..7ade1d3fe5 100644 --- a/packages/cli/test/integration/pruning.service.test.ts +++ b/packages/cli/test/integration/pruning.service.test.ts @@ -1,9 +1,9 @@ +import { GlobalConfig } from '@n8n/config'; import { mock } from 'jest-mock-extended'; import { BinaryDataService, InstanceSettings } from 'n8n-core'; import type { ExecutionStatus } from 'n8n-workflow'; import Container from 'typedi'; -import config from '@/config'; import { TIME } from '@/constants'; import type { ExecutionEntity } from '@/databases/entities/execution-entity'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; @@ -28,17 +28,19 @@ describe('softDeleteOnPruningCycle()', () => { const now = new Date(); const yesterday = new Date(Date.now() - TIME.DAY); let workflow: WorkflowEntity; + let globalConfig: GlobalConfig; beforeAll(async () => { await testDb.init(); + globalConfig = Container.get(GlobalConfig); pruningService = new PruningService( mockInstance(Logger), instanceSettings, Container.get(ExecutionRepository), mockInstance(BinaryDataService), mock(), - mock(), + globalConfig, ); workflow = await createWorkflow(); @@ -52,10 +54,6 @@ describe('softDeleteOnPruningCycle()', () => { await testDb.terminate(); }); - afterEach(() => { - config.load(config.default); - }); - async function findAllExecutions() { return await Container.get(ExecutionRepository).find({ order: { id: 'asc' }, @@ -64,9 +62,9 @@ describe('softDeleteOnPruningCycle()', () => { } describe('when EXECUTIONS_DATA_PRUNE_MAX_COUNT is set', () => { - beforeEach(() => { - config.set('executions.pruneDataMaxCount', 1); - config.set('executions.pruneDataMaxAge', 336); + beforeAll(() => { + globalConfig.pruning.maxAge = 336; + globalConfig.pruning.maxCount = 1; }); test('should mark as deleted based on EXECUTIONS_DATA_PRUNE_MAX_COUNT', async () => { @@ -165,9 +163,9 @@ describe('softDeleteOnPruningCycle()', () => { }); describe('when EXECUTIONS_DATA_MAX_AGE is set', () => { - beforeEach(() => { - config.set('executions.pruneDataMaxAge', 1); // 1h - config.set('executions.pruneDataMaxCount', 0); + beforeAll(() => { + globalConfig.pruning.maxAge = 1; + globalConfig.pruning.maxCount = 0; }); test('should mark as deleted based on EXECUTIONS_DATA_MAX_AGE', async () => { From 2104fa1733f298f2a0b51b42ec012c6393fd1fa6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E0=A4=95=E0=A4=BE=E0=A4=B0=E0=A4=A4=E0=A5=8B=E0=A4=AB?= =?UTF-8?q?=E0=A5=8D=E0=A4=AB=E0=A5=87=E0=A4=B2=E0=A4=B8=E0=A5=8D=E0=A4=95?= =?UTF-8?q?=E0=A5=8D=E0=A4=B0=E0=A4=BF=E0=A4=AA=E0=A5=8D=E0=A4=9F=E2=84=A2?= Date: Mon, 4 Nov 2024 09:49:52 +0100 Subject: [PATCH 02/21] refactor(core): Extract poll context out of NodeExecutionFunctions (no-changelog) (#11449) --- packages/cli/src/active-workflow-manager.ts | 18 +- packages/core/package.json | 4 +- packages/core/src/NodeExecuteFunctions.ts | 67 +-- packages/core/src/index.ts | 1 + .../__tests__/base-context.test.ts | 168 ++++++++ .../__tests__/poll-context.test.ts | 96 +++++ .../helpers/__tests__/binary-helpers.test.ts | 136 +++++++ .../__tests__/scheduling-helpers.test.ts | 33 ++ .../helpers/binary-helpers.ts | 148 +++++++ .../helpers/request-helpers.ts | 381 ++++++++++++++++++ .../helpers/scheduling-helpers.ts | 20 + .../core/src/node-execution-context/index.ts | 2 + .../node-execution-context.ts | 107 +++++ .../node-execution-context/poll-context.ts | 94 +++++ .../nodes-base/test/nodes/TriggerHelpers.ts | 29 +- packages/workflow/src/Interfaces.ts | 21 +- 16 files changed, 1222 insertions(+), 103 deletions(-) create mode 100644 packages/core/src/node-execution-context/__tests__/base-context.test.ts create mode 100644 packages/core/src/node-execution-context/__tests__/poll-context.test.ts create mode 100644 packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts create mode 100644 packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts create mode 100644 packages/core/src/node-execution-context/helpers/binary-helpers.ts create mode 100644 packages/core/src/node-execution-context/helpers/request-helpers.ts create mode 100644 packages/core/src/node-execution-context/helpers/scheduling-helpers.ts create mode 100644 packages/core/src/node-execution-context/index.ts create mode 100644 packages/core/src/node-execution-context/node-execution-context.ts create mode 100644 packages/core/src/node-execution-context/poll-context.ts diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index 189c446b65..de3ff6d25e 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ -import { ActiveWorkflows, InstanceSettings, NodeExecuteFunctions } from 'n8n-core'; +import { ActiveWorkflows, InstanceSettings, NodeExecuteFunctions, PollContext } from 'n8n-core'; import type { ExecutionError, IDeferredPromise, @@ -274,18 +274,11 @@ export class ActiveWorkflowManager { activation: WorkflowActivateMode, ): IGetExecutePollFunctions { return (workflow: Workflow, node: INode) => { - const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions( - workflow, - node, - additionalData, - mode, - activation, - ); - returnFunctions.__emit = ( + const __emit = ( data: INodeExecutionData[][], responsePromise?: IDeferredPromise, donePromise?: IDeferredPromise, - ): void => { + ) => { this.logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`); void this.workflowStaticDataService.saveStaticData(workflow); const executePromise = this.workflowExecutionService.runWorkflow( @@ -309,14 +302,15 @@ export class ActiveWorkflowManager { } }; - returnFunctions.__emitError = (error: ExecutionError): void => { + const __emitError = (error: ExecutionError) => { void this.executionService .createErrorExecution(error, node, workflowData, workflow, mode) .then(() => { this.executeErrorWorkflow(error, workflowData, mode); }); }; - return returnFunctions; + + return new PollContext(workflow, node, additionalData, mode, activation, __emit, __emitError); }; } diff --git a/packages/core/package.json b/packages/core/package.json index f9a9d90856..32fa66a297 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -13,13 +13,13 @@ "scripts": { "clean": "rimraf dist .turbo", "typecheck": "tsc --noEmit", - "build": "tsc -p tsconfig.build.json", + "build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json", "dev": "pnpm watch", "format": "biome format --write .", "format:check": "biome ci .", "lint": "eslint . --quiet", "lintfix": "eslint . --fix", - "watch": "tsc -p tsconfig.build.json --watch", + "watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\"", "test": "jest" }, "files": [ diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 10c44efced..b6028b9194 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -167,6 +167,8 @@ import { import { extractValue } from './ExtractValue'; import { InstanceSettings } from './InstanceSettings'; import type { ExtendedValidationResult, IResponseError } from './Interfaces'; +// eslint-disable-next-line import/no-cycle +import { PollContext } from './node-execution-context'; import { ScheduledTaskManager } from './ScheduledTaskManager'; import { getSecretsProxy } from './Secrets'; import { SSHClientsManager } from './SSHClientsManager'; @@ -215,7 +217,7 @@ const createFormDataObject = (data: Record) => { return formData; }; -const validateUrl = (url?: string): boolean => { +export const validateUrl = (url?: string): boolean => { if (!url) return false; try { @@ -776,7 +778,7 @@ export function parseIncomingMessage(message: IncomingMessage) { } } -async function binaryToString(body: Buffer | Readable, encoding?: BufferEncoding) { +export async function binaryToString(body: Buffer | Readable, encoding?: BufferEncoding) { const buffer = await binaryToBuffer(body); if (!encoding && body instanceof IncomingMessage) { parseIncomingMessage(body); @@ -1010,7 +1012,7 @@ export const removeEmptyBody = (requestOptions: IHttpRequestOptions | IRequestOp } }; -async function httpRequest( +export async function httpRequest( requestOptions: IHttpRequestOptions, ): Promise { removeEmptyBody(requestOptions); @@ -1205,7 +1207,7 @@ export async function copyBinaryFile( * base64 and adds metadata. */ // eslint-disable-next-line complexity -async function prepareBinaryData( +export async function prepareBinaryData( binaryData: Buffer | Readable, executionId: string, workflowId: string, @@ -1348,6 +1350,7 @@ export async function clearAllProcessedItems( options, ); } + export async function getProcessedDataCount( scope: DeduplicationScope, contextData: ICheckProcessedContextData, @@ -1359,7 +1362,8 @@ export async function getProcessedDataCount( options, ); } -function applyPaginationRequestData( + +export function applyPaginationRequestData( requestData: IRequestOptions, paginationRequestData: PaginationOptions['request'], ): IRequestOptions { @@ -3553,57 +3557,7 @@ export function getExecutePollFunctions( mode: WorkflowExecuteMode, activation: WorkflowActivateMode, ): IPollFunctions { - return ((workflow: Workflow, node: INode) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - __emit: (): void => { - throw new ApplicationError( - 'Overwrite NodeExecuteFunctions.getExecutePollFunctions.__emit function', - ); - }, - __emitError() { - throw new ApplicationError( - 'Overwrite NodeExecuteFunctions.getExecutePollFunctions.__emitError function', - ); - }, - getMode: () => mode, - getActivationMode: () => activation, - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, mode), - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const runExecutionData: IRunExecutionData | null = null; - const itemIndex = 0; - const runIndex = 0; - const connectionInputData: INodeExecutionData[] = []; - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - undefined, - fallbackValue, - options, - ); - }, - helpers: { - createDeferredPromise, - ...getRequestHelperFunctions(workflow, node, additionalData), - ...getBinaryHelperFunctions(additionalData, workflow.id), - ...getSchedulingFunctions(workflow), - returnJsonArray, - }, - }; - })(workflow, node); + return new PollContext(workflow, node, additionalData, mode, activation); } /** @@ -4400,6 +4354,7 @@ export function getExecuteSingleFunctions( }, helpers: { createDeferredPromise, + returnJsonArray, ...getRequestHelperFunctions( workflow, node, diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index ebe240b51e..cfdae07dc6 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -20,3 +20,4 @@ export { ObjectStoreService } from './ObjectStore/ObjectStore.service.ee'; export { BinaryData } from './BinaryData/types'; export { isStoredMode as isValidNonDefaultMode } from './BinaryData/utils'; export * from './ExecutionMetadata'; +export * from './node-execution-context'; diff --git a/packages/core/src/node-execution-context/__tests__/base-context.test.ts b/packages/core/src/node-execution-context/__tests__/base-context.test.ts new file mode 100644 index 0000000000..aadc630a38 --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/base-context.test.ts @@ -0,0 +1,168 @@ +import { mock } from 'jest-mock-extended'; +import type { + INode, + INodeExecutionData, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { InstanceSettings } from '@/InstanceSettings'; + +import { NodeExecutionContext } from '../node-execution-context'; + +class TestContext extends NodeExecutionContext {} + +describe('BaseContext', () => { + const instanceSettings = mock({ instanceId: 'abc123' }); + Container.set(InstanceSettings, instanceSettings); + + const workflow = mock({ + id: '123', + name: 'Test Workflow', + active: true, + nodeTypes: mock(), + timezone: 'UTC', + }); + const node = mock(); + let additionalData = mock({ + credentialsHelper: mock(), + }); + + const mode: WorkflowExecuteMode = 'manual'; + const testContext = new TestContext(workflow, node, additionalData, mode); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getNode', () => { + it('should return a deep copy of the node', () => { + const result = testContext.getNode(); + expect(result).not.toBe(node); + expect(JSON.stringify(result)).toEqual(JSON.stringify(node)); + }); + }); + + describe('getWorkflow', () => { + it('should return the id, name, and active properties of the workflow', () => { + const result = testContext.getWorkflow(); + + expect(result).toEqual({ id: '123', name: 'Test Workflow', active: true }); + }); + }); + + describe('getMode', () => { + it('should return the mode property', () => { + const result = testContext.getMode(); + expect(result).toBe(mode); + }); + }); + + describe('getWorkflowStaticData', () => { + it('should call getStaticData method of workflow', () => { + testContext.getWorkflowStaticData('testType'); + expect(workflow.getStaticData).toHaveBeenCalledWith('testType', node); + }); + }); + + describe('getChildNodes', () => { + it('should return an array of NodeTypeAndVersion objects for the child nodes of the given node', () => { + const childNode1 = mock({ name: 'Child Node 1', type: 'testType1', typeVersion: 1 }); + const childNode2 = mock({ name: 'Child Node 2', type: 'testType2', typeVersion: 2 }); + workflow.getChildNodes.mockReturnValue(['Child Node 1', 'Child Node 2']); + workflow.nodes = { + 'Child Node 1': childNode1, + 'Child Node 2': childNode2, + }; + + const result = testContext.getChildNodes('Test Node'); + + expect(result).toEqual([ + { name: 'Child Node 1', type: 'testType1', typeVersion: 1 }, + { name: 'Child Node 2', type: 'testType2', typeVersion: 2 }, + ]); + }); + }); + + describe('getParentNodes', () => { + it('should return an array of NodeTypeAndVersion objects for the parent nodes of the given node', () => { + const parentNode1 = mock({ name: 'Parent Node 1', type: 'testType1', typeVersion: 1 }); + const parentNode2 = mock({ name: 'Parent Node 2', type: 'testType2', typeVersion: 2 }); + workflow.getParentNodes.mockReturnValue(['Parent Node 1', 'Parent Node 2']); + workflow.nodes = { + 'Parent Node 1': parentNode1, + 'Parent Node 2': parentNode2, + }; + + const result = testContext.getParentNodes('Test Node'); + + expect(result).toEqual([ + { name: 'Parent Node 1', type: 'testType1', typeVersion: 1 }, + { name: 'Parent Node 2', type: 'testType2', typeVersion: 2 }, + ]); + }); + }); + + describe('getKnownNodeTypes', () => { + it('should call getKnownTypes method of workflow.nodeTypes', () => { + testContext.getKnownNodeTypes(); + expect(workflow.nodeTypes.getKnownTypes).toHaveBeenCalled(); + }); + }); + + describe('getRestApiUrl', () => { + it('should return the restApiUrl property of additionalData', () => { + additionalData.restApiUrl = 'https://example.com/api'; + + const result = testContext.getRestApiUrl(); + + expect(result).toBe('https://example.com/api'); + }); + }); + + describe('getInstanceBaseUrl', () => { + it('should return the instanceBaseUrl property of additionalData', () => { + additionalData.instanceBaseUrl = 'https://example.com'; + + const result = testContext.getInstanceBaseUrl(); + + expect(result).toBe('https://example.com'); + }); + }); + + describe('getInstanceId', () => { + it('should return the instanceId property of instanceSettings', () => { + const result = testContext.getInstanceId(); + + expect(result).toBe('abc123'); + }); + }); + + describe('getTimezone', () => { + it('should return the timezone property of workflow', () => { + const result = testContext.getTimezone(); + expect(result).toBe('UTC'); + }); + }); + + describe('getCredentialsProperties', () => { + it('should call getCredentialsProperties method of additionalData.credentialsHelper', () => { + testContext.getCredentialsProperties('testType'); + expect(additionalData.credentialsHelper.getCredentialsProperties).toHaveBeenCalledWith( + 'testType', + ); + }); + }); + + describe('prepareOutputData', () => { + it('should return the input array wrapped in another array', async () => { + const outputData = [mock(), mock()]; + + const result = await testContext.prepareOutputData(outputData); + + expect(result).toEqual([outputData]); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/__tests__/poll-context.test.ts b/packages/core/src/node-execution-context/__tests__/poll-context.test.ts new file mode 100644 index 0000000000..f2ca37998b --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/poll-context.test.ts @@ -0,0 +1,96 @@ +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; + +import { PollContext } from '../poll-context'; + +describe('PollContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ credentialsHelper }); + const mode: WorkflowExecuteMode = 'manual'; + const activation: WorkflowActivateMode = 'init'; + + const pollContext = new PollContext(workflow, node, additionalData, mode, activation); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getActivationMode', () => { + it('should return the activation property', () => { + const result = pollContext.getActivationMode(); + expect(result).toBe(activation); + }); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await pollContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = pollContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = pollContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts new file mode 100644 index 0000000000..302713954f --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts @@ -0,0 +1,136 @@ +import FileType from 'file-type'; +import { IncomingMessage, type ClientRequest } from 'http'; +import { mock } from 'jest-mock-extended'; +import type { Workflow, IWorkflowExecuteAdditionalData, IBinaryData } from 'n8n-workflow'; +import type { Socket } from 'net'; +import { Container } from 'typedi'; + +import { BinaryDataService } from '@/BinaryData/BinaryData.service'; + +import { BinaryHelpers } from '../binary-helpers'; + +jest.mock('file-type'); + +describe('BinaryHelpers', () => { + let binaryDataService = mock(); + Container.set(BinaryDataService, binaryDataService); + const workflow = mock({ id: '123' }); + const additionalData = mock({ executionId: '456' }); + const binaryHelpers = new BinaryHelpers(workflow, additionalData); + + beforeEach(() => { + jest.clearAllMocks(); + + binaryDataService.store.mockImplementation( + async (_workflowId, _executionId, _buffer, value) => value, + ); + }); + + describe('getBinaryPath', () => { + it('should call getPath method of BinaryDataService', () => { + binaryHelpers.getBinaryPath('mock-binary-data-id'); + expect(binaryDataService.getPath).toHaveBeenCalledWith('mock-binary-data-id'); + }); + }); + + describe('getBinaryMetadata', () => { + it('should call getMetadata method of BinaryDataService', async () => { + await binaryHelpers.getBinaryMetadata('mock-binary-data-id'); + expect(binaryDataService.getMetadata).toHaveBeenCalledWith('mock-binary-data-id'); + }); + }); + + describe('getBinaryStream', () => { + it('should call getStream method of BinaryDataService', async () => { + await binaryHelpers.getBinaryStream('mock-binary-data-id'); + expect(binaryDataService.getAsStream).toHaveBeenCalledWith('mock-binary-data-id', undefined); + }); + }); + + describe('prepareBinaryData', () => { + it('should guess the mime type and file extension if not provided', async () => { + const buffer = Buffer.from('test'); + const fileTypeData = { mime: 'application/pdf', ext: 'pdf' }; + (FileType.fromBuffer as jest.Mock).mockResolvedValue(fileTypeData); + + const binaryData = await binaryHelpers.prepareBinaryData(buffer); + + expect(binaryData.mimeType).toEqual('application/pdf'); + expect(binaryData.fileExtension).toEqual('pdf'); + expect(binaryData.fileType).toEqual('pdf'); + expect(binaryData.fileName).toBeUndefined(); + expect(binaryData.directory).toBeUndefined(); + expect(binaryDataService.store).toHaveBeenCalledWith( + workflow.id, + additionalData.executionId!, + buffer, + binaryData, + ); + }); + + it('should use the provided mime type and file extension if provided', async () => { + const buffer = Buffer.from('test'); + const mimeType = 'application/octet-stream'; + + const binaryData = await binaryHelpers.prepareBinaryData(buffer, undefined, mimeType); + + expect(binaryData.mimeType).toEqual(mimeType); + expect(binaryData.fileExtension).toEqual('bin'); + expect(binaryData.fileType).toBeUndefined(); + expect(binaryData.fileName).toBeUndefined(); + expect(binaryData.directory).toBeUndefined(); + expect(binaryDataService.store).toHaveBeenCalledWith( + workflow.id, + additionalData.executionId!, + buffer, + binaryData, + ); + }); + + const mockSocket = mock({ readableHighWaterMark: 0 }); + + it('should use the contentDisposition.filename, responseUrl, and contentType properties to set the fileName, directory, and mimeType properties of the binaryData object', async () => { + const incomingMessage = new IncomingMessage(mockSocket); + incomingMessage.contentDisposition = { filename: 'test.txt', type: 'attachment' }; + incomingMessage.contentType = 'text/plain'; + incomingMessage.responseUrl = 'https://example.com/test.txt'; + + const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage); + + expect(binaryData.fileName).toEqual('test.txt'); + expect(binaryData.fileType).toEqual('text'); + expect(binaryData.directory).toBeUndefined(); + expect(binaryData.mimeType).toEqual('text/plain'); + expect(binaryData.fileExtension).toEqual('txt'); + }); + + it('should use the req.path property to set the fileName property of the binaryData object if contentDisposition.filename and responseUrl are not provided', async () => { + const incomingMessage = new IncomingMessage(mockSocket); + incomingMessage.contentType = 'text/plain'; + incomingMessage.req = mock({ path: '/test.txt' }); + + const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage); + + expect(binaryData.fileName).toEqual('test.txt'); + expect(binaryData.directory).toBeUndefined(); + expect(binaryData.mimeType).toEqual('text/plain'); + expect(binaryData.fileExtension).toEqual('txt'); + }); + }); + + describe('setBinaryDataBuffer', () => { + it('should call store method of BinaryDataService', async () => { + const binaryData = mock(); + const bufferOrStream = mock(); + + await binaryHelpers.setBinaryDataBuffer(binaryData, bufferOrStream); + + expect(binaryDataService.store).toHaveBeenCalledWith( + workflow.id, + additionalData.executionId, + bufferOrStream, + binaryData, + ); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts new file mode 100644 index 0000000000..06abae8204 --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts @@ -0,0 +1,33 @@ +import { mock } from 'jest-mock-extended'; +import type { Workflow } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { ScheduledTaskManager } from '@/ScheduledTaskManager'; + +import { SchedulingHelpers } from '../scheduling-helpers'; + +describe('SchedulingHelpers', () => { + const scheduledTaskManager = mock(); + Container.set(ScheduledTaskManager, scheduledTaskManager); + const workflow = mock(); + const schedulingHelpers = new SchedulingHelpers(workflow); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('registerCron', () => { + it('should call registerCron method of ScheduledTaskManager', () => { + const cronExpression = '* * * * * *'; + const onTick = jest.fn(); + + schedulingHelpers.registerCron(cronExpression, onTick); + + expect(scheduledTaskManager.registerCron).toHaveBeenCalledWith( + workflow, + cronExpression, + onTick, + ); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/binary-helpers.ts b/packages/core/src/node-execution-context/helpers/binary-helpers.ts new file mode 100644 index 0000000000..a15c59139b --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/binary-helpers.ts @@ -0,0 +1,148 @@ +import FileType from 'file-type'; +import { IncomingMessage } from 'http'; +import MimeTypes from 'mime-types'; +import { ApplicationError, fileTypeFromMimeType } from 'n8n-workflow'; +import type { + BinaryHelperFunctions, + IWorkflowExecuteAdditionalData, + Workflow, + IBinaryData, +} from 'n8n-workflow'; +import path from 'path'; +import type { Readable } from 'stream'; +import Container from 'typedi'; + +import { BinaryDataService } from '@/BinaryData/BinaryData.service'; +import { binaryToBuffer } from '@/BinaryData/utils'; +// eslint-disable-next-line import/no-cycle +import { binaryToString } from '@/NodeExecuteFunctions'; + +export class BinaryHelpers { + private readonly binaryDataService = Container.get(BinaryDataService); + + constructor( + private readonly workflow: Workflow, + private readonly additionalData: IWorkflowExecuteAdditionalData, + ) {} + + get exported(): BinaryHelperFunctions { + return { + getBinaryPath: this.getBinaryPath.bind(this), + getBinaryMetadata: this.getBinaryMetadata.bind(this), + getBinaryStream: this.getBinaryStream.bind(this), + binaryToBuffer, + binaryToString, + prepareBinaryData: this.prepareBinaryData.bind(this), + setBinaryDataBuffer: this.setBinaryDataBuffer.bind(this), + copyBinaryFile: this.copyBinaryFile.bind(this), + }; + } + + getBinaryPath(binaryDataId: string) { + return this.binaryDataService.getPath(binaryDataId); + } + + async getBinaryMetadata(binaryDataId: string) { + return await this.binaryDataService.getMetadata(binaryDataId); + } + + async getBinaryStream(binaryDataId: string, chunkSize?: number) { + return await this.binaryDataService.getAsStream(binaryDataId, chunkSize); + } + + // eslint-disable-next-line complexity + async prepareBinaryData(binaryData: Buffer | Readable, filePath?: string, mimeType?: string) { + let fileExtension: string | undefined; + if (binaryData instanceof IncomingMessage) { + if (!filePath) { + try { + const { responseUrl } = binaryData; + filePath = + binaryData.contentDisposition?.filename ?? + ((responseUrl && new URL(responseUrl).pathname) ?? binaryData.req?.path)?.slice(1); + } catch {} + } + if (!mimeType) { + mimeType = binaryData.contentType; + } + } + + if (!mimeType) { + // If no mime type is given figure it out + + if (filePath) { + // Use file path to guess mime type + const mimeTypeLookup = MimeTypes.lookup(filePath); + if (mimeTypeLookup) { + mimeType = mimeTypeLookup; + } + } + + if (!mimeType) { + if (Buffer.isBuffer(binaryData)) { + // Use buffer to guess mime type + const fileTypeData = await FileType.fromBuffer(binaryData); + if (fileTypeData) { + mimeType = fileTypeData.mime; + fileExtension = fileTypeData.ext; + } + } else if (binaryData instanceof IncomingMessage) { + mimeType = binaryData.headers['content-type']; + } else { + // TODO: detect filetype from other kind of streams + } + } + } + + if (!fileExtension && mimeType) { + fileExtension = MimeTypes.extension(mimeType) || undefined; + } + + if (!mimeType) { + // Fall back to text + mimeType = 'text/plain'; + } + + const returnData: IBinaryData = { + mimeType, + fileType: fileTypeFromMimeType(mimeType), + fileExtension, + data: '', + }; + + if (filePath) { + if (filePath.includes('?')) { + // Remove maybe present query parameters + filePath = filePath.split('?').shift(); + } + + const filePathParts = path.parse(filePath as string); + + if (filePathParts.dir !== '') { + returnData.directory = filePathParts.dir; + } + returnData.fileName = filePathParts.base; + + // Remove the dot + const extractedFileExtension = filePathParts.ext.slice(1); + if (extractedFileExtension) { + returnData.fileExtension = extractedFileExtension; + } + } + + return await this.setBinaryDataBuffer(returnData, binaryData); + } + + async setBinaryDataBuffer(binaryData: IBinaryData, bufferOrStream: Buffer | Readable) { + return await this.binaryDataService.store( + this.workflow.id, + this.additionalData.executionId!, + bufferOrStream, + binaryData, + ); + } + + async copyBinaryFile(): Promise { + throw new ApplicationError('`copyBinaryFile` has been removed. Please upgrade this node.'); + } +} diff --git a/packages/core/src/node-execution-context/helpers/request-helpers.ts b/packages/core/src/node-execution-context/helpers/request-helpers.ts new file mode 100644 index 0000000000..2c5eb19290 --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/request-helpers.ts @@ -0,0 +1,381 @@ +import { createHash } from 'crypto'; +import { pick } from 'lodash'; +import { jsonParse, NodeOperationError, sleep } from 'n8n-workflow'; +import type { + RequestHelperFunctions, + IAdditionalCredentialOptions, + IAllExecuteFunctions, + IExecuteData, + IHttpRequestOptions, + IN8nHttpFullResponse, + IN8nHttpResponse, + INode, + INodeExecutionData, + IOAuth2Options, + IRequestOptions, + IRunExecutionData, + IWorkflowDataProxyAdditionalKeys, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + PaginationOptions, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { Readable } from 'stream'; + +// eslint-disable-next-line import/no-cycle +import { + applyPaginationRequestData, + binaryToString, + httpRequest, + httpRequestWithAuthentication, + proxyRequestToAxios, + requestOAuth1, + requestOAuth2, + requestWithAuthentication, + validateUrl, +} from '@/NodeExecuteFunctions'; + +export class RequestHelpers { + constructor( + private readonly context: IAllExecuteFunctions, + private readonly workflow: Workflow, + private readonly node: INode, + private readonly additionalData: IWorkflowExecuteAdditionalData, + private readonly runExecutionData: IRunExecutionData | null = null, + private readonly connectionInputData: INodeExecutionData[] = [], + ) {} + + get exported(): RequestHelperFunctions { + return { + httpRequest, + httpRequestWithAuthentication: this.httpRequestWithAuthentication.bind(this), + requestWithAuthenticationPaginated: this.requestWithAuthenticationPaginated.bind(this), + request: this.request.bind(this), + requestWithAuthentication: this.requestWithAuthentication.bind(this), + requestOAuth1: this.requestOAuth1.bind(this), + requestOAuth2: this.requestOAuth2.bind(this), + }; + } + + get httpRequest() { + return httpRequest; + } + + async httpRequestWithAuthentication( + credentialsType: string, + requestOptions: IHttpRequestOptions, + additionalCredentialOptions?: IAdditionalCredentialOptions, + ) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await httpRequestWithAuthentication.call( + this.context, + credentialsType, + requestOptions, + this.workflow, + this.node, + this.additionalData, + additionalCredentialOptions, + ); + } + + // eslint-disable-next-line complexity + async requestWithAuthenticationPaginated( + requestOptions: IRequestOptions, + itemIndex: number, + paginationOptions: PaginationOptions, + credentialsType?: string, + additionalCredentialOptions?: IAdditionalCredentialOptions, + ): Promise { + const responseData = []; + if (!requestOptions.qs) { + requestOptions.qs = {}; + } + requestOptions.resolveWithFullResponse = true; + requestOptions.simple = false; + + let tempResponseData: IN8nHttpFullResponse; + let makeAdditionalRequest: boolean; + let paginateRequestData: PaginationOptions['request']; + + const runIndex = 0; + + const additionalKeys = { + $request: requestOptions, + $response: {} as IN8nHttpFullResponse, + $version: this.node.typeVersion, + $pageCount: 0, + }; + + const executeData: IExecuteData = { + data: {}, + node: this.node, + source: null, + }; + + const hashData = { + identicalCount: 0, + previousLength: 0, + previousHash: '', + }; + + do { + paginateRequestData = this.getResolvedValue( + paginationOptions.request as unknown as NodeParameterValueType, + itemIndex, + runIndex, + executeData, + additionalKeys, + false, + ) as object as PaginationOptions['request']; + + const tempRequestOptions = applyPaginationRequestData(requestOptions, paginateRequestData); + + if (!validateUrl(tempRequestOptions.uri as string)) { + throw new NodeOperationError( + this.node, + `'${paginateRequestData.url}' is not a valid URL.`, + { + itemIndex, + runIndex, + type: 'invalid_url', + }, + ); + } + + if (credentialsType) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + tempResponseData = await this.requestWithAuthentication( + credentialsType, + tempRequestOptions, + additionalCredentialOptions, + ); + } else { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + tempResponseData = await this.request(tempRequestOptions); + } + + const newResponse: IN8nHttpFullResponse = Object.assign( + { + body: {}, + headers: {}, + statusCode: 0, + }, + pick(tempResponseData, ['body', 'headers', 'statusCode']), + ); + + let contentBody: Exclude; + + if (newResponse.body instanceof Readable && paginationOptions.binaryResult !== true) { + // Keep the original string version that we can use it to hash if needed + contentBody = await binaryToString(newResponse.body as Buffer | Readable); + + const responseContentType = newResponse.headers['content-type']?.toString() ?? ''; + if (responseContentType.includes('application/json')) { + newResponse.body = jsonParse(contentBody, { fallbackValue: {} }); + } else { + newResponse.body = contentBody; + } + tempResponseData.__bodyResolved = true; + tempResponseData.body = newResponse.body; + } else { + contentBody = newResponse.body; + } + + if (paginationOptions.binaryResult !== true || tempResponseData.headers.etag) { + // If the data is not binary (and so not a stream), or an etag is present, + // we check via etag or hash if identical data is received + + let contentLength = 0; + if ('content-length' in tempResponseData.headers) { + contentLength = parseInt(tempResponseData.headers['content-length'] as string) || 0; + } + + if (hashData.previousLength === contentLength) { + let hash: string; + if (tempResponseData.headers.etag) { + // If an etag is provided, we use it as "hash" + hash = tempResponseData.headers.etag as string; + } else { + // If there is no etag, we calculate a hash from the data in the body + if (typeof contentBody !== 'string') { + contentBody = JSON.stringify(contentBody); + } + hash = createHash('md5').update(contentBody).digest('base64'); + } + + if (hashData.previousHash === hash) { + hashData.identicalCount += 1; + if (hashData.identicalCount > 2) { + // Length was identical 5x and hash 3x + throw new NodeOperationError( + this.node, + 'The returned response was identical 5x, so requests got stopped', + { + itemIndex, + description: + 'Check if "Pagination Completed When" has been configured correctly.', + }, + ); + } + } else { + hashData.identicalCount = 0; + } + hashData.previousHash = hash; + } else { + hashData.identicalCount = 0; + } + hashData.previousLength = contentLength; + } + + responseData.push(tempResponseData); + + additionalKeys.$response = newResponse; + additionalKeys.$pageCount = additionalKeys.$pageCount + 1; + + const maxRequests = this.getResolvedValue( + paginationOptions.maxRequests, + itemIndex, + runIndex, + executeData, + additionalKeys, + false, + ) as number; + + if (maxRequests && additionalKeys.$pageCount >= maxRequests) { + break; + } + + makeAdditionalRequest = this.getResolvedValue( + paginationOptions.continue, + itemIndex, + runIndex, + executeData, + additionalKeys, + false, + ) as boolean; + + if (makeAdditionalRequest) { + if (paginationOptions.requestInterval) { + const requestInterval = this.getResolvedValue( + paginationOptions.requestInterval, + itemIndex, + runIndex, + executeData, + additionalKeys, + false, + ) as number; + + await sleep(requestInterval); + } + if (tempResponseData.statusCode < 200 || tempResponseData.statusCode >= 300) { + // We have it configured to let all requests pass no matter the response code + // via "requestOptions.simple = false" to not by default fail if it is for example + // configured to stop on 404 response codes. For that reason we have to throw here + // now an error manually if the response code is not a success one. + let data = tempResponseData.body; + if (data instanceof Readable && paginationOptions.binaryResult !== true) { + data = await binaryToString(data as Buffer | Readable); + } else if (typeof data === 'object') { + data = JSON.stringify(data); + } + + throw Object.assign(new Error(`${tempResponseData.statusCode} - "${data?.toString()}"`), { + statusCode: tempResponseData.statusCode, + error: data, + isAxiosError: true, + response: { + headers: tempResponseData.headers, + status: tempResponseData.statusCode, + statusText: tempResponseData.statusMessage, + }, + }); + } + } + } while (makeAdditionalRequest); + + return responseData; + } + + async request(uriOrObject: string | IRequestOptions, options?: IRequestOptions) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await proxyRequestToAxios( + this.workflow, + this.additionalData, + this.node, + uriOrObject, + options, + ); + } + + async requestWithAuthentication( + credentialsType: string, + requestOptions: IRequestOptions, + additionalCredentialOptions?: IAdditionalCredentialOptions, + itemIndex?: number, + ) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await requestWithAuthentication.call( + this.context, + credentialsType, + requestOptions, + this.workflow, + this.node, + this.additionalData, + additionalCredentialOptions, + itemIndex, + ); + } + + async requestOAuth1(credentialsType: string, requestOptions: IRequestOptions) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await requestOAuth1.call(this.context, credentialsType, requestOptions); + } + + async requestOAuth2( + credentialsType: string, + requestOptions: IRequestOptions, + oAuth2Options?: IOAuth2Options, + ) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await requestOAuth2.call( + this.context, + credentialsType, + requestOptions, + this.node, + this.additionalData, + oAuth2Options, + ); + } + + private getResolvedValue( + parameterValue: NodeParameterValueType, + itemIndex: number, + runIndex: number, + executeData: IExecuteData, + additionalKeys?: IWorkflowDataProxyAdditionalKeys, + returnObjectAsString = false, + ): NodeParameterValueType { + const mode: WorkflowExecuteMode = 'internal'; + + if ( + typeof parameterValue === 'object' || + (typeof parameterValue === 'string' && parameterValue.charAt(0) === '=') + ) { + return this.workflow.expression.getParameterValue( + parameterValue, + this.runExecutionData, + runIndex, + itemIndex, + this.node.name, + this.connectionInputData, + mode, + additionalKeys ?? {}, + executeData, + returnObjectAsString, + ); + } + + return parameterValue; + } +} diff --git a/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts b/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts new file mode 100644 index 0000000000..e193f2beaf --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts @@ -0,0 +1,20 @@ +import type { CronExpression, Workflow, SchedulingFunctions } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { ScheduledTaskManager } from '@/ScheduledTaskManager'; + +export class SchedulingHelpers { + private readonly scheduledTaskManager = Container.get(ScheduledTaskManager); + + constructor(private readonly workflow: Workflow) {} + + get exported(): SchedulingFunctions { + return { + registerCron: this.registerCron.bind(this), + }; + } + + registerCron(cronExpression: CronExpression, onTick: () => void) { + this.scheduledTaskManager.registerCron(this.workflow, cronExpression, onTick); + } +} diff --git a/packages/core/src/node-execution-context/index.ts b/packages/core/src/node-execution-context/index.ts new file mode 100644 index 0000000000..5182804dee --- /dev/null +++ b/packages/core/src/node-execution-context/index.ts @@ -0,0 +1,2 @@ +// eslint-disable-next-line import/no-cycle +export { PollContext } from './poll-context'; diff --git a/packages/core/src/node-execution-context/node-execution-context.ts b/packages/core/src/node-execution-context/node-execution-context.ts new file mode 100644 index 0000000000..09c21b63a4 --- /dev/null +++ b/packages/core/src/node-execution-context/node-execution-context.ts @@ -0,0 +1,107 @@ +import type { + FunctionsBase, + INode, + INodeExecutionData, + IWorkflowExecuteAdditionalData, + NodeTypeAndVersion, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { deepCopy, LoggerProxy } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { InstanceSettings } from '@/InstanceSettings'; + +export abstract class NodeExecutionContext implements Omit { + protected readonly instanceSettings = Container.get(InstanceSettings); + + constructor( + protected readonly workflow: Workflow, + protected readonly node: INode, + protected readonly additionalData: IWorkflowExecuteAdditionalData, + protected readonly mode: WorkflowExecuteMode, + ) {} + + get logger() { + return LoggerProxy; + } + + getExecutionId() { + return this.additionalData.executionId!; + } + + getNode(): INode { + return deepCopy(this.node); + } + + getWorkflow() { + const { id, name, active } = this.workflow; + return { id, name, active }; + } + + getMode() { + return this.mode; + } + + getWorkflowStaticData(type: string) { + return this.workflow.getStaticData(type, this.node); + } + + getChildNodes(nodeName: string) { + const output: NodeTypeAndVersion[] = []; + const nodeNames = this.workflow.getChildNodes(nodeName); + + for (const n of nodeNames) { + const node = this.workflow.nodes[n]; + output.push({ + name: node.name, + type: node.type, + typeVersion: node.typeVersion, + }); + } + return output; + } + + getParentNodes(nodeName: string) { + const output: NodeTypeAndVersion[] = []; + const nodeNames = this.workflow.getParentNodes(nodeName); + + for (const n of nodeNames) { + const node = this.workflow.nodes[n]; + output.push({ + name: node.name, + type: node.type, + typeVersion: node.typeVersion, + }); + } + return output; + } + + getKnownNodeTypes() { + return this.workflow.nodeTypes.getKnownTypes(); + } + + getRestApiUrl() { + return this.additionalData.restApiUrl; + } + + getInstanceBaseUrl() { + return this.additionalData.instanceBaseUrl; + } + + getInstanceId() { + return this.instanceSettings.instanceId; + } + + getTimezone() { + return this.workflow.timezone; + } + + getCredentialsProperties(type: string) { + return this.additionalData.credentialsHelper.getCredentialsProperties(type); + } + + async prepareOutputData(outputData: INodeExecutionData[]) { + return [outputData]; + } +} diff --git a/packages/core/src/node-execution-context/poll-context.ts b/packages/core/src/node-execution-context/poll-context.ts new file mode 100644 index 0000000000..88e8caafc8 --- /dev/null +++ b/packages/core/src/node-execution-context/poll-context.ts @@ -0,0 +1,94 @@ +import type { + ICredentialDataDecryptedObject, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + IPollFunctions, + IRunExecutionData, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; + +// eslint-disable-next-line import/no-cycle +import { + getAdditionalKeys, + getCredentials, + getNodeParameter, + returnJsonArray, +} from '@/NodeExecuteFunctions'; + +import { BinaryHelpers } from './helpers/binary-helpers'; +import { RequestHelpers } from './helpers/request-helpers'; +import { SchedulingHelpers } from './helpers/scheduling-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +const throwOnEmit = () => { + throw new ApplicationError('Overwrite PollContext.__emit function'); +}; + +const throwOnEmitError = () => { + throw new ApplicationError('Overwrite PollContext.__emitError function'); +}; + +export class PollContext extends NodeExecutionContext implements IPollFunctions { + readonly helpers: IPollFunctions['helpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + private readonly activation: WorkflowActivateMode, + readonly __emit: IPollFunctions['__emit'] = throwOnEmit, + readonly __emitError: IPollFunctions['__emitError'] = throwOnEmitError, + ) { + super(workflow, node, additionalData, mode); + + this.helpers = { + createDeferredPromise, + returnJsonArray, + ...new BinaryHelpers(workflow, additionalData).exported, + ...new RequestHelpers(this, workflow, node, additionalData).exported, + ...new SchedulingHelpers(workflow).exported, + }; + } + + getActivationMode() { + return this.activation; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const runExecutionData: IRunExecutionData | null = null; + const itemIndex = 0; + const runIndex = 0; + const connectionInputData: INodeExecutionData[] = []; + + return getNodeParameter( + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, runExecutionData), + undefined, + fallbackValue, + options, + ); + } +} diff --git a/packages/nodes-base/test/nodes/TriggerHelpers.ts b/packages/nodes-base/test/nodes/TriggerHelpers.ts index 93b4dfae9e..7e04dcaa86 100644 --- a/packages/nodes-base/test/nodes/TriggerHelpers.ts +++ b/packages/nodes-base/test/nodes/TriggerHelpers.ts @@ -3,7 +3,7 @@ import { mock } from 'jest-mock-extended'; import get from 'lodash/get'; import merge from 'lodash/merge'; import set from 'lodash/set'; -import { getExecutePollFunctions, returnJsonArray, type InstanceSettings } from 'n8n-core'; +import { PollContext, returnJsonArray, type InstanceSettings } from 'n8n-core'; import { ScheduledTaskManager } from 'n8n-core/dist/ScheduledTaskManager'; import type { IBinaryData, @@ -13,7 +13,6 @@ import type { INode, INodeType, INodeTypes, - IPollFunctions, ITriggerFunctions, IWebhookFunctions, IWorkflowExecuteAdditionalData, @@ -193,14 +192,15 @@ export async function testPollingTriggerNode( options.node, ) as INode; const workflow = mock({ - timezone: options.timezone ?? 'Europe/Berlin', + timezone, nodeTypes: mock({ getByNameAndVersion: () => mock({ description: trigger.description }), }), + getStaticData: () => options.workflowStaticData ?? {}, }); const mode = options.mode ?? 'trigger'; - const originalPollingFunctions = getExecutePollFunctions( + const pollContext = new PollContext( workflow, node, mock({ @@ -218,22 +218,13 @@ export async function testPollingTriggerNode( 'init', ); - async function getCredentials(): Promise { - return (options.credential ?? {}) as T; - } + pollContext.getNode = () => node; + pollContext.getCredentials = async () => + (options.credential ?? {}) as T; + pollContext.getNodeParameter = (parameterName, fallback) => + get(node.parameters, parameterName) ?? fallback; - const pollingFunctions = mock({ - ...originalPollingFunctions, - getCredentials, - getTimezone: () => timezone, - getNode: () => node, - getMode: () => mode, - getInstanceId: () => 'instanceId', - getWorkflowStaticData: () => options.workflowStaticData ?? {}, - getNodeParameter: (parameterName, fallback) => get(node.parameters, parameterName) ?? fallback, - }); - - const response = await trigger.poll?.call(pollingFunctions); + const response = await trigger.poll?.call(pollContext); return { response, diff --git a/packages/workflow/src/Interfaces.ts b/packages/workflow/src/Interfaces.ts index 4f92a219e6..80140d93a2 100644 --- a/packages/workflow/src/Interfaces.ts +++ b/packages/workflow/src/Interfaces.ts @@ -731,11 +731,8 @@ export interface ICredentialTestFunctions { }; } -interface BaseHelperFunctions { +export interface BaseHelperFunctions { createDeferredPromise: () => IDeferredPromise; -} - -interface JsonHelperFunctions { returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[]; } @@ -756,6 +753,7 @@ export interface BinaryHelperFunctions { mimeType?: string, ): Promise; setBinaryDataBuffer(data: IBinaryData, binaryData: Buffer): Promise; + /** @deprecated */ copyBinaryFile(): Promise; binaryToBuffer(body: Buffer | Readable): Promise; binaryToString(body: Buffer | Readable, encoding?: BufferEncoding): Promise; @@ -985,8 +983,7 @@ export type IExecuteFunctions = ExecuteFunctions.GetNodeParameterFn & BinaryHelperFunctions & DeduplicationHelperFunctions & FileSystemHelperFunctions & - SSHTunnelFunctions & - JsonHelperFunctions & { + SSHTunnelFunctions & { normalizeItems(items: INodeExecutionData | INodeExecutionData[]): INodeExecutionData[]; constructExecutionMetaData( inputData: INodeExecutionData[], @@ -1081,8 +1078,7 @@ export interface IPollFunctions helpers: RequestHelperFunctions & BaseHelperFunctions & BinaryHelperFunctions & - SchedulingFunctions & - JsonHelperFunctions; + SchedulingFunctions; } export interface ITriggerFunctions @@ -1102,8 +1098,7 @@ export interface ITriggerFunctions BaseHelperFunctions & BinaryHelperFunctions & SSHTunnelFunctions & - SchedulingFunctions & - JsonHelperFunctions; + SchedulingFunctions; } export interface IHookFunctions @@ -1140,10 +1135,7 @@ export interface IWebhookFunctions extends FunctionsBaseWithRequiredKeys<'getMod getResponseObject(): express.Response; getWebhookName(): string; nodeHelpers: NodeHelperFunctions; - helpers: RequestHelperFunctions & - BaseHelperFunctions & - BinaryHelperFunctions & - JsonHelperFunctions; + helpers: RequestHelperFunctions & BaseHelperFunctions & BinaryHelperFunctions; } export interface INodeCredentialsDetails { @@ -1641,6 +1633,7 @@ export abstract class Node { abstract description: INodeTypeDescription; execute?(context: IExecuteFunctions): Promise; webhook?(context: IWebhookFunctions): Promise; + poll?(context: IPollFunctions): Promise; } export interface IVersionedNodeType { From e4aa1d01f3ca37ff17098108753ec531dc759f6d Mon Sep 17 00:00:00 2001 From: Tomi Turtiainen <10324676+tomi@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:13:09 +0200 Subject: [PATCH 03/21] feat: Only send needed data to task runner (no-changelog) (#11487) --- packages/@n8n/task-runner/package.json | 4 +- .../__tests__/js-task-runner.test.ts | 33 +- .../src/js-task-runner/__tests__/test-data.ts | 8 +- .../__tests__/built-ins-parser-state.test.ts | 117 +++++++ .../__tests__/built-ins-parser.test.ts | 251 ++++++++++++++ .../built-ins-parser/acorn-helpers.ts | 28 ++ .../built-ins-parser-state.ts | 74 ++++ .../built-ins-parser/built-ins-parser.ts | 142 ++++++++ .../src/js-task-runner/js-task-runner.ts | 74 ++-- packages/@n8n/task-runner/src/runner-types.ts | 13 +- packages/@n8n/task-runner/src/task-runner.ts | 6 +- .../src/runners/__tests__/task-broker.test.ts | 14 +- .../__tests__/task-runner-process.test.ts | 4 +- packages/cli/src/runners/runner-types.ts | 18 +- .../cli/src/runners/task-broker.service.ts | 13 +- .../data-request-response-builder.test.ts | 324 ++++++++++++++++++ .../data-request-response-builder.ts | 205 +++++++++++ .../src/runners/task-managers/task-manager.ts | 71 +--- .../cli/src/runners/task-runner-process.ts | 9 +- packages/workflow/src/WorkflowDataProxy.ts | 19 +- .../workflow/test/WorkflowDataProxy.test.ts | 41 ++- .../WorkflowDataProxy/partial_data_run.json | 71 ++++ .../partial_data_workflow.json | 86 +++++ pnpm-lock.yaml | 138 ++------ 24 files changed, 1511 insertions(+), 252 deletions(-) create mode 100644 packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser-state.test.ts create mode 100644 packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts create mode 100644 packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/acorn-helpers.ts create mode 100644 packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser-state.ts create mode 100644 packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser.ts create mode 100644 packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts create mode 100644 packages/cli/src/runners/task-managers/data-request-response-builder.ts create mode 100644 packages/workflow/test/fixtures/WorkflowDataProxy/partial_data_run.json create mode 100644 packages/workflow/test/fixtures/WorkflowDataProxy/partial_data_workflow.json diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json index 74c3e323bb..cb47095c15 100644 --- a/packages/@n8n/task-runner/package.json +++ b/packages/@n8n/task-runner/package.json @@ -23,8 +23,10 @@ ], "dependencies": { "@n8n/config": "workspace:*", - "n8n-workflow": "workspace:*", + "acorn": "8.14.0", + "acorn-walk": "8.3.4", "n8n-core": "workspace:*", + "n8n-workflow": "workspace:*", "nanoid": "^3.3.6", "typedi": "catalog:", "ws": "^8.18.0" diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts index 36f3c5afa2..cd0863b13e 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts @@ -4,14 +4,11 @@ import fs from 'node:fs'; import { builtinModules } from 'node:module'; import { ValidationError } from '@/js-task-runner/errors/validation-error'; -import { - JsTaskRunner, - type AllCodeTaskData, - type JSExecSettings, -} from '@/js-task-runner/js-task-runner'; +import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner'; +import { JsTaskRunner } from '@/js-task-runner/js-task-runner'; import type { Task } from '@/task-runner'; -import { newAllCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data'; +import { newCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data'; import type { JsRunnerConfig } from '../../config/js-runner-config'; import { MainConfig } from '../../config/main-config'; import { ExecutionError } from '../errors/execution-error'; @@ -43,7 +40,7 @@ describe('JsTaskRunner', () => { runner = defaultTaskRunner, }: { task: Task; - taskData: AllCodeTaskData; + taskData: DataRequestResponse; runner?: JsTaskRunner; }) => { jest.spyOn(runner, 'requestData').mockResolvedValue(taskData); @@ -71,7 +68,7 @@ describe('JsTaskRunner', () => { nodeMode: 'runOnceForAllItems', ...settings, }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + taskData: newCodeTaskData(inputItems.map(wrapIntoJson)), runner, }); }; @@ -94,7 +91,7 @@ describe('JsTaskRunner', () => { nodeMode: 'runOnceForEachItem', ...settings, }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + taskData: newCodeTaskData(inputItems.map(wrapIntoJson)), runner, }); }; @@ -111,7 +108,7 @@ describe('JsTaskRunner', () => { await execTaskWithParams({ task, - taskData: newAllCodeTaskData([wrapIntoJson({})]), + taskData: newCodeTaskData([wrapIntoJson({})]), }); expect(defaultTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [ @@ -246,7 +243,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.VAR1 }', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: false, isProcessAvailable: true, @@ -265,7 +262,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.VAR1 }', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: true, isProcessAvailable: true, @@ -282,7 +279,7 @@ describe('JsTaskRunner', () => { code: 'return Object.values($env).concat(Object.keys($env))', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: false, isProcessAvailable: true, @@ -301,7 +298,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.N8N_RUNNERS_N8N_URI }', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -316,7 +313,7 @@ describe('JsTaskRunner', () => { code: 'return { val: Buffer.from("test-buffer").toString() }', nodeMode: 'runOnceForAllItems', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -328,7 +325,7 @@ describe('JsTaskRunner', () => { code: 'return { val: Buffer.from("test-buffer").toString() }', nodeMode: 'runOnceForEachItem', }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -774,7 +771,7 @@ describe('JsTaskRunner', () => { code: 'unknown', nodeMode, }), - taskData: newAllCodeTaskData([wrapIntoJson({ a: 1 })]), + taskData: newCodeTaskData([wrapIntoJson({ a: 1 })]), }), ).rejects.toThrow(ExecutionError); }, @@ -796,7 +793,7 @@ describe('JsTaskRunner', () => { jest.spyOn(runner, 'sendOffers').mockImplementation(() => {}); jest .spyOn(runner, 'requestData') - .mockResolvedValue(newAllCodeTaskData([wrapIntoJson({ a: 1 })])); + .mockResolvedValue(newCodeTaskData([wrapIntoJson({ a: 1 })])); await runner.receivedSettings(taskId, task.settings); diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts index b157094619..6de3e6d2b1 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts @@ -2,7 +2,7 @@ import type { IDataObject, INode, INodeExecutionData, ITaskData } from 'n8n-work import { NodeConnectionType } from 'n8n-workflow'; import { nanoid } from 'nanoid'; -import type { AllCodeTaskData, JSExecSettings } from '@/js-task-runner/js-task-runner'; +import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner'; import type { Task } from '@/task-runner'; /** @@ -48,10 +48,10 @@ export const newTaskData = (opts: Partial & Pick /** * Creates a new all code task data with the given options */ -export const newAllCodeTaskData = ( +export const newCodeTaskData = ( codeNodeInputData: INodeExecutionData[], - opts: Partial = {}, -): AllCodeTaskData => { + opts: Partial = {}, +): DataRequestResponse => { const codeNode = newNode({ name: 'JsCode', parameters: { diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser-state.test.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser-state.test.ts new file mode 100644 index 0000000000..0b75ae563e --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser-state.test.ts @@ -0,0 +1,117 @@ +import { BuiltInsParserState } from '../built-ins-parser-state'; + +describe('BuiltInsParserState', () => { + describe('toDataRequestSpecification', () => { + it('should return empty array when no properties are marked as needed', () => { + const state = new BuiltInsParserState(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: [], + env: false, + input: false, + prevNode: false, + }); + }); + + it('should return all nodes and input when markNeedsAllNodes is called', () => { + const state = new BuiltInsParserState(); + state.markNeedsAllNodes(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: 'all', + env: false, + input: true, + prevNode: false, + }); + }); + + it('should return specific node names when nodes are marked as needed individually', () => { + const state = new BuiltInsParserState(); + state.markNodeAsNeeded('Node1'); + state.markNodeAsNeeded('Node2'); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: ['Node1', 'Node2'], + env: false, + input: false, + prevNode: false, + }); + }); + + it('should ignore individual nodes when needsAllNodes is marked as true', () => { + const state = new BuiltInsParserState(); + state.markNodeAsNeeded('Node1'); + state.markNeedsAllNodes(); + state.markNodeAsNeeded('Node2'); // should be ignored since all nodes are needed + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: 'all', + env: false, + input: true, + prevNode: false, + }); + }); + + it('should mark env as needed when markEnvAsNeeded is called', () => { + const state = new BuiltInsParserState(); + state.markEnvAsNeeded(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: [], + env: true, + input: false, + prevNode: false, + }); + }); + + it('should mark input as needed when markInputAsNeeded is called', () => { + const state = new BuiltInsParserState(); + state.markInputAsNeeded(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: [], + env: false, + input: true, + prevNode: false, + }); + }); + + it('should mark prevNode as needed when markPrevNodeAsNeeded is called', () => { + const state = new BuiltInsParserState(); + state.markPrevNodeAsNeeded(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: [], + env: false, + input: false, + prevNode: true, + }); + }); + + it('should return correct specification when multiple properties are marked as needed', () => { + const state = new BuiltInsParserState(); + state.markNeedsAllNodes(); + state.markEnvAsNeeded(); + state.markInputAsNeeded(); + state.markPrevNodeAsNeeded(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }); + }); + + it('should return correct specification when all properties are marked as needed', () => { + const state = BuiltInsParserState.newNeedsAllDataState(); + + expect(state.toDataRequestParams()).toEqual({ + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }); + }); + }); +}); diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts new file mode 100644 index 0000000000..399d9e6e2b --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts @@ -0,0 +1,251 @@ +import { getAdditionalKeys } from 'n8n-core'; +import type { IDataObject, INodeType, IWorkflowExecuteAdditionalData } from 'n8n-workflow'; +import { Workflow, WorkflowDataProxy } from 'n8n-workflow'; + +import { newCodeTaskData } from '../../__tests__/test-data'; +import { BuiltInsParser } from '../built-ins-parser'; +import { BuiltInsParserState } from '../built-ins-parser-state'; + +describe('BuiltInsParser', () => { + const parser = new BuiltInsParser(); + + const parseAndExpectOk = (code: string) => { + const result = parser.parseUsedBuiltIns(code); + if (!result.ok) { + fail(result.error); + } + + return result.result; + }; + + describe('Env, input, execution and prevNode', () => { + const cases: Array<[string, BuiltInsParserState]> = [ + ['$env', new BuiltInsParserState({ needs$env: true })], + ['$execution', new BuiltInsParserState({ needs$execution: true })], + ['$prevNode', new BuiltInsParserState({ needs$prevNode: true })], + ]; + + test.each(cases)("should identify built-ins in '%s'", (code, expected) => { + const state = parseAndExpectOk(code); + expect(state).toEqual(expected); + }); + }); + + describe('Input', () => { + it('should mark input as needed when $input is used', () => { + const state = parseAndExpectOk(` + $input.item.json.age = 10 + Math.floor(Math.random() * 30); + $input.item.json.password = $input.item.json.password.split('').map(() => '*').join("") + delete $input.item.json.lastname + const emailParts = $input.item.json.email.split("@") + $input.item.json.emailData = { + user: emailParts[0], + domain: emailParts[1] + } + + return $input.item; + `); + + expect(state).toEqual(new BuiltInsParserState({ needs$input: true })); + }); + + it('should mark input as needed when $json is used', () => { + const state = parseAndExpectOk(` + $json.age = 10 + Math.floor(Math.random() * 30); + return $json; + `); + + expect(state).toEqual(new BuiltInsParserState({ needs$input: true })); + }); + }); + + describe('$(...)', () => { + const cases: Array<[string, BuiltInsParserState]> = [ + [ + '$("nodeName").first()', + new BuiltInsParserState({ neededNodeNames: new Set(['nodeName']) }), + ], + [ + '$("nodeName").all(); $("secondNode").matchingItem()', + new BuiltInsParserState({ neededNodeNames: new Set(['nodeName', 'secondNode']) }), + ], + ]; + + test.each(cases)("should identify nodes in '%s'", (code, expected) => { + const state = parseAndExpectOk(code); + expect(state).toEqual(expected); + }); + + it('should need all nodes when $() is called with a variable', () => { + const state = parseAndExpectOk('var n = "name"; $(n)'); + expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true })); + }); + + it('should require all nodes when there are multiple usages of $() and one is with a variable', () => { + const state = parseAndExpectOk(` + $("nodeName"); + $("secondNode"); + var n = "name"; + $(n) + `); + expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true })); + }); + + test.each([ + ['without parameters', '$()'], + ['number literal', '$(123)'], + ])('should ignore when $ is called %s', (_, code) => { + const state = parseAndExpectOk(code); + expect(state).toEqual(new BuiltInsParserState()); + }); + + test.each([ + '$("node").item', + '$("node")["item"]', + '$("node").pairedItem()', + '$("node")["pairedItem"]()', + '$("node").itemMatching(0)', + '$("node")["itemMatching"](0)', + '$("node")[variable]', + 'var a = $("node")', + 'let a = $("node")', + 'const a = $("node")', + 'a = $("node")', + ])('should require all nodes if %s is used', (code) => { + const state = parseAndExpectOk(code); + expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true })); + }); + + test.each(['$("node").first()', '$("node").last()', '$("node").all()', '$("node").params'])( + 'should require only accessed node if %s is used', + (code) => { + const state = parseAndExpectOk(code); + expect(state).toEqual( + new BuiltInsParserState({ + needsAllNodes: false, + neededNodeNames: new Set(['node']), + }), + ); + }, + ); + }); + + describe('ECMAScript syntax', () => { + describe('ES2020', () => { + it('should parse optional chaining', () => { + parseAndExpectOk(` + const a = { b: { c: 1 } }; + return a.b?.c; + `); + }); + + it('should parse nullish coalescing', () => { + parseAndExpectOk(` + const a = null; + return a ?? 1; + `); + }); + }); + + describe('ES2021', () => { + it('should parse numeric separators', () => { + parseAndExpectOk(` + const a = 1_000_000; + return a; + `); + }); + }); + }); + + describe('WorkflowDataProxy built-ins', () => { + it('should have a known list of built-ins', () => { + const data = newCodeTaskData([]); + const dataProxy = new WorkflowDataProxy( + new Workflow({ + ...data.workflow, + nodeTypes: { + getByName() { + return undefined as unknown as INodeType; + }, + getByNameAndVersion() { + return undefined as unknown as INodeType; + }, + getKnownTypes() { + return undefined as unknown as IDataObject; + }, + }, + }), + data.runExecutionData, + data.runIndex, + 0, + data.activeNodeName, + data.connectionInputData, + data.siblingParameters, + data.mode, + getAdditionalKeys( + data.additionalData as IWorkflowExecuteAdditionalData, + data.mode, + data.runExecutionData, + ), + data.executeData, + data.defaultReturnRunIndex, + data.selfData, + data.contextNodeName, + // Make sure that even if we don't receive the envProviderState for + // whatever reason, we don't expose the task runner's env to the code + data.envProviderState ?? { + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, + }, + ).getDataProxy({ throwOnMissingExecutionData: false }); + + /** + * NOTE! If you are adding new built-ins to the WorkflowDataProxy class + * make sure the built-ins parser and Task Runner handle them properly. + */ + expect(Object.keys(dataProxy)).toStrictEqual([ + '$', + '$input', + '$binary', + '$data', + '$env', + '$evaluateExpression', + '$item', + '$fromAI', + '$fromai', + '$fromAi', + '$items', + '$json', + '$node', + '$self', + '$parameter', + '$prevNode', + '$runIndex', + '$mode', + '$workflow', + '$itemIndex', + '$now', + '$today', + '$jmesPath', + 'DateTime', + 'Interval', + 'Duration', + '$execution', + '$vars', + '$secrets', + '$executionId', + '$resumeWebhookUrl', + '$getPairedItem', + '$jmespath', + '$position', + '$thisItem', + '$thisItemIndex', + '$thisRunIndex', + '$nodeVersion', + '$nodeId', + '$webhookId', + ]); + }); + }); +}); diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/acorn-helpers.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/acorn-helpers.ts new file mode 100644 index 0000000000..ccab4c1527 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/acorn-helpers.ts @@ -0,0 +1,28 @@ +import type { + AssignmentExpression, + Identifier, + Literal, + MemberExpression, + Node, + VariableDeclarator, +} from 'acorn'; + +export function isLiteral(node?: Node): node is Literal { + return node?.type === 'Literal'; +} + +export function isIdentifier(node?: Node): node is Identifier { + return node?.type === 'Identifier'; +} + +export function isMemberExpression(node?: Node): node is MemberExpression { + return node?.type === 'MemberExpression'; +} + +export function isVariableDeclarator(node?: Node): node is VariableDeclarator { + return node?.type === 'VariableDeclarator'; +} + +export function isAssignmentExpression(node?: Node): node is AssignmentExpression { + return node?.type === 'AssignmentExpression'; +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser-state.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser-state.ts new file mode 100644 index 0000000000..112c97ccda --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser-state.ts @@ -0,0 +1,74 @@ +import type { N8nMessage } from '../../runner-types'; + +/** + * Class to keep track of which built-in variables are accessed in the code + */ +export class BuiltInsParserState { + neededNodeNames: Set = new Set(); + + needsAllNodes = false; + + needs$env = false; + + needs$input = false; + + needs$execution = false; + + needs$prevNode = false; + + constructor(opts: Partial = {}) { + Object.assign(this, opts); + } + + /** + * Marks that all nodes are needed, including input data + */ + markNeedsAllNodes() { + this.needsAllNodes = true; + this.needs$input = true; + this.neededNodeNames = new Set(); + } + + markNodeAsNeeded(nodeName: string) { + if (this.needsAllNodes) { + return; + } + + this.neededNodeNames.add(nodeName); + } + + markEnvAsNeeded() { + this.needs$env = true; + } + + markInputAsNeeded() { + this.needs$input = true; + } + + markExecutionAsNeeded() { + this.needs$execution = true; + } + + markPrevNodeAsNeeded() { + this.needs$prevNode = true; + } + + toDataRequestParams(): N8nMessage.ToRequester.TaskDataRequest['requestParams'] { + return { + dataOfNodes: this.needsAllNodes ? 'all' : Array.from(this.neededNodeNames), + env: this.needs$env, + input: this.needs$input, + prevNode: this.needs$prevNode, + }; + } + + static newNeedsAllDataState() { + const obj = new BuiltInsParserState(); + obj.markNeedsAllNodes(); + obj.markEnvAsNeeded(); + obj.markInputAsNeeded(); + obj.markExecutionAsNeeded(); + obj.markPrevNodeAsNeeded(); + return obj; + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser.ts new file mode 100644 index 0000000000..dd2d849c6a --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/built-ins-parser.ts @@ -0,0 +1,142 @@ +import type { CallExpression, Identifier, Node, Program } from 'acorn'; +import { parse } from 'acorn'; +import { ancestor } from 'acorn-walk'; +import type { Result } from 'n8n-workflow'; +import { toResult } from 'n8n-workflow'; + +import { + isAssignmentExpression, + isIdentifier, + isLiteral, + isMemberExpression, + isVariableDeclarator, +} from './acorn-helpers'; +import { BuiltInsParserState } from './built-ins-parser-state'; + +/** + * Class for parsing Code Node code to identify which built-in variables + * are accessed + */ +export class BuiltInsParser { + /** + * Parses which built-in variables are accessed in the given code + */ + public parseUsedBuiltIns(code: string): Result { + return toResult(() => { + const wrappedCode = `async function VmCodeWrapper() { ${code} }`; + const ast = parse(wrappedCode, { ecmaVersion: 2025, sourceType: 'module' }); + + return this.identifyBuiltInsByWalkingAst(ast); + }); + } + + /** Traverse the AST of the script and mark any data needed for it to run. */ + private identifyBuiltInsByWalkingAst(ast: Program) { + const accessedBuiltIns = new BuiltInsParserState(); + + ancestor( + ast, + { + CallExpression: this.visitCallExpression, + Identifier: this.visitIdentifier, + }, + undefined, + accessedBuiltIns, + ); + + return accessedBuiltIns; + } + + private visitCallExpression = ( + node: CallExpression, + state: BuiltInsParserState, + ancestors: Node[], + ) => { + // $(...) + const isDollar = node.callee.type === 'Identifier' && node.callee.name === '$'; + if (!isDollar) return; + + // $(): This is not valid, ignore + if (node.arguments.length === 0) { + return; + } + + const firstArg = node.arguments[0]; + if (!isLiteral(firstArg)) { + // $(variable): Can't easily determine statically, mark all nodes as needed + state.markNeedsAllNodes(); + return; + } + + if (typeof firstArg.value !== 'string') { + // $(123): Static value, but not a string --> invalid code --> ignore + return; + } + + // $("node"): Static value, mark 'nodeName' as needed + state.markNodeAsNeeded(firstArg.value); + + // Determine how $("node") is used + this.handlePrevNodeCall(node, state, ancestors); + }; + + private handlePrevNodeCall(_node: CallExpression, state: BuiltInsParserState, ancestors: Node[]) { + // $("node").item, .pairedItem or .itemMatching: In a case like this, the execution + // engine will traverse back from current node (i.e. the Code Node) to + // the "node" node and use `pairedItem`s to find which item is linked + // to the current item. So, we need to mark all nodes as needed. + // TODO: We could also mark all the nodes between the current node and + // the "node" node as needed, but that would require more complex logic. + const directParent = ancestors[ancestors.length - 2]; + if (isMemberExpression(directParent)) { + const accessedProperty = directParent.property; + + if (directParent.computed) { + // $("node")["item"], ["pairedItem"] or ["itemMatching"] + if (isLiteral(accessedProperty)) { + if (this.isPairedItemProperty(accessedProperty.value)) { + state.markNeedsAllNodes(); + } + // Else: $("node")[123]: Static value, but not any of the ones above --> ignore + } + // $("node")[variable] + else if (isIdentifier(accessedProperty)) { + state.markNeedsAllNodes(); + } + } + // $("node").item, .pairedItem or .itemMatching + else if (isIdentifier(accessedProperty) && this.isPairedItemProperty(accessedProperty.name)) { + state.markNeedsAllNodes(); + } + } else if (isVariableDeclarator(directParent) || isAssignmentExpression(directParent)) { + // const variable = $("node") or variable = $("node"): + // In this case we would need to track down all the possible use sites + // of 'variable' and determine if `.item` is accessed on it. This is + // more complex and skipped for now. + // TODO: Optimize for this case + state.markNeedsAllNodes(); + } else { + // Something else than the cases above. Mark all nodes as needed as it + // could be a dynamic access. + state.markNeedsAllNodes(); + } + } + + private visitIdentifier = (node: Identifier, state: BuiltInsParserState) => { + if (node.name === '$env') { + state.markEnvAsNeeded(); + } else if (node.name === '$input' || node.name === '$json') { + state.markInputAsNeeded(); + } else if (node.name === '$execution') { + state.markExecutionAsNeeded(); + } else if (node.name === '$prevNode') { + state.markPrevNodeAsNeeded(); + } + }; + + private isPairedItemProperty( + property?: string | boolean | null | number | RegExp | bigint, + ): boolean { + return property === 'item' || property === 'pairedItem' || property === 'itemMatching'; + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts index 40ee12af2c..e0bebe0521 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts @@ -24,6 +24,8 @@ import { runInNewContext, type Context } from 'node:vm'; import type { TaskResultData } from '@/runner-types'; import { type Task, TaskRunner } from '@/task-runner'; +import { BuiltInsParser } from './built-ins-parser/built-ins-parser'; +import { BuiltInsParserState } from './built-ins-parser/built-ins-parser-state'; import { isErrorLike } from './errors/error-like'; import { ExecutionError } from './errors/execution-error'; import { makeSerializable } from './errors/serializable-error'; @@ -57,7 +59,7 @@ export interface PartialAdditionalData { variables: IDataObject; } -export interface AllCodeTaskData { +export interface DataRequestResponse { workflow: Omit; inputData: ITaskDataConnections; node: INode; @@ -84,6 +86,8 @@ type CustomConsole = { export class JsTaskRunner extends TaskRunner { private readonly requireResolver: RequireResolver; + private readonly builtInsParser = new BuiltInsParser(); + constructor(config: MainConfig, name = 'JS Task Runner') { super({ taskType: 'javascript', @@ -102,12 +106,20 @@ export class JsTaskRunner extends TaskRunner { } async executeTask(task: Task): Promise { - const allData = await this.requestData(task.taskId, 'all'); - const settings = task.settings; a.ok(settings, 'JS Code not sent to runner'); - const workflowParams = allData.workflow; + const neededBuiltInsResult = this.builtInsParser.parseUsedBuiltIns(settings.code); + const neededBuiltIns = neededBuiltInsResult.ok + ? neededBuiltInsResult.result + : BuiltInsParserState.newNeedsAllDataState(); + + const data = await this.requestData( + task.taskId, + neededBuiltIns.toDataRequestParams(), + ); + + const workflowParams = data.workflow; const workflow = new Workflow({ ...workflowParams, nodeTypes: this.nodeTypes, @@ -126,12 +138,12 @@ export class JsTaskRunner extends TaskRunner { const result = settings.nodeMode === 'runOnceForAllItems' - ? await this.runForAllItems(task.taskId, settings, allData, workflow, customConsole) - : await this.runForEachItem(task.taskId, settings, allData, workflow, customConsole); + ? await this.runForAllItems(task.taskId, settings, data, workflow, customConsole) + : await this.runForEachItem(task.taskId, settings, data, workflow, customConsole); return { result, - customData: allData.runExecutionData.resultData.metadata, + customData: data.runExecutionData.resultData.metadata, }; } @@ -165,12 +177,12 @@ export class JsTaskRunner extends TaskRunner { private async runForAllItems( taskId: string, settings: JSExecSettings, - allData: AllCodeTaskData, + data: DataRequestResponse, workflow: Workflow, customConsole: CustomConsole, ): Promise { - const dataProxy = this.createDataProxy(allData, workflow, allData.itemIndex); - const inputItems = allData.connectionInputData; + const dataProxy = this.createDataProxy(data, workflow, data.itemIndex); + const inputItems = data.connectionInputData; const context: Context = { require: this.requireResolver, @@ -212,16 +224,16 @@ export class JsTaskRunner extends TaskRunner { private async runForEachItem( taskId: string, settings: JSExecSettings, - allData: AllCodeTaskData, + data: DataRequestResponse, workflow: Workflow, customConsole: CustomConsole, ): Promise { - const inputItems = allData.connectionInputData; + const inputItems = data.connectionInputData; const returnData: INodeExecutionData[] = []; for (let index = 0; index < inputItems.length; index++) { const item = inputItems[index]; - const dataProxy = this.createDataProxy(allData, workflow, index); + const dataProxy = this.createDataProxy(data, workflow, index); const context: Context = { require: this.requireResolver, module: {}, @@ -279,33 +291,37 @@ export class JsTaskRunner extends TaskRunner { return returnData; } - private createDataProxy(allData: AllCodeTaskData, workflow: Workflow, itemIndex: number) { + private createDataProxy(data: DataRequestResponse, workflow: Workflow, itemIndex: number) { return new WorkflowDataProxy( workflow, - allData.runExecutionData, - allData.runIndex, + data.runExecutionData, + data.runIndex, itemIndex, - allData.activeNodeName, - allData.connectionInputData, - allData.siblingParameters, - allData.mode, + data.activeNodeName, + data.connectionInputData, + data.siblingParameters, + data.mode, getAdditionalKeys( - allData.additionalData as IWorkflowExecuteAdditionalData, - allData.mode, - allData.runExecutionData, + data.additionalData as IWorkflowExecuteAdditionalData, + data.mode, + data.runExecutionData, ), - allData.executeData, - allData.defaultReturnRunIndex, - allData.selfData, - allData.contextNodeName, + data.executeData, + data.defaultReturnRunIndex, + data.selfData, + data.contextNodeName, // Make sure that even if we don't receive the envProviderState for // whatever reason, we don't expose the task runner's env to the code - allData.envProviderState ?? { + data.envProviderState ?? { env: {}, isEnvAccessBlocked: false, isProcessAvailable: true, }, - ).getDataProxy(); + // Because we optimize the needed data, it can be partially available. + // We assign the available built-ins to the execution context, which + // means we run the getter for '$json', and by default $json throws + // if there is no data available. + ).getDataProxy({ throwOnMissingExecutionData: false }); } private toExecutionErrorIfNeeded(error: unknown): Error { diff --git a/packages/@n8n/task-runner/src/runner-types.ts b/packages/@n8n/task-runner/src/runner-types.ts index 1e84843653..898279feac 100644 --- a/packages/@n8n/task-runner/src/runner-types.ts +++ b/packages/@n8n/task-runner/src/runner-types.ts @@ -1,6 +1,11 @@ import type { INodeExecutionData, INodeTypeBaseDescription } from 'n8n-workflow'; -export type DataRequestType = 'input' | 'node' | 'all'; +export interface TaskDataRequestParams { + dataOfNodes: string[] | 'all'; + prevNode: boolean; + input: boolean; + env: boolean; +} export interface TaskResultData { result: INodeExecutionData[]; @@ -89,8 +94,7 @@ export namespace N8nMessage { type: 'broker:taskdatarequest'; taskId: string; requestId: string; - requestType: DataRequestType; - param?: string; + requestParams: TaskDataRequestParams; } export interface RPC { @@ -186,8 +190,7 @@ export namespace RunnerMessage { type: 'runner:taskdatarequest'; taskId: string; requestId: string; - requestType: DataRequestType; - param?: string; + requestParams: TaskDataRequestParams; } export interface RPC { diff --git a/packages/@n8n/task-runner/src/task-runner.ts b/packages/@n8n/task-runner/src/task-runner.ts index 9629cc15d5..b292bd4413 100644 --- a/packages/@n8n/task-runner/src/task-runner.ts +++ b/packages/@n8n/task-runner/src/task-runner.ts @@ -288,8 +288,7 @@ export abstract class TaskRunner { async requestData( taskId: Task['taskId'], - type: RunnerMessage.ToN8n.TaskDataRequest['requestType'], - param?: string, + requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'], ): Promise { const requestId = nanoid(); @@ -305,8 +304,7 @@ export abstract class TaskRunner { type: 'runner:taskdatarequest', taskId, requestId, - requestType: type, - param, + requestParams, }); try { diff --git a/packages/cli/src/runners/__tests__/task-broker.test.ts b/packages/cli/src/runners/__tests__/task-broker.test.ts index a90bf7662c..8787ba5955 100644 --- a/packages/cli/src/runners/__tests__/task-broker.test.ts +++ b/packages/cli/src/runners/__tests__/task-broker.test.ts @@ -494,15 +494,18 @@ describe('TaskBroker', () => { const taskId = 'task1'; const requesterId = 'requester1'; const requestId = 'request1'; - const requestType = 'input'; - const param = 'test_param'; + const requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'] = { + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }; const message: RunnerMessage.ToN8n.TaskDataRequest = { type: 'runner:taskdatarequest', taskId, requestId, - requestType, - param, + requestParams, }; const requesterMessageCallback = jest.fn(); @@ -519,8 +522,7 @@ describe('TaskBroker', () => { type: 'broker:taskdatarequest', taskId, requestId, - requestType, - param, + requestParams, }); }); diff --git a/packages/cli/src/runners/__tests__/task-runner-process.test.ts b/packages/cli/src/runners/__tests__/task-runner-process.test.ts index eb04e3ab8e..738333bc25 100644 --- a/packages/cli/src/runners/__tests__/task-runner-process.test.ts +++ b/packages/cli/src/runners/__tests__/task-runner-process.test.ts @@ -32,10 +32,10 @@ describe('TaskRunnerProcess', () => { }); describe('constructor', () => { - it('should throw if runner mode is external', () => { + it('should not throw if runner mode is external', () => { runnerConfig.mode = 'external'; - expect(() => new TaskRunnerProcess(logger, runnerConfig, authService)).toThrow(); + expect(() => new TaskRunnerProcess(logger, runnerConfig, authService)).not.toThrow(); runnerConfig.mode = 'internal_childprocess'; }); diff --git a/packages/cli/src/runners/runner-types.ts b/packages/cli/src/runners/runner-types.ts index 8b205de801..a030f3874e 100644 --- a/packages/cli/src/runners/runner-types.ts +++ b/packages/cli/src/runners/runner-types.ts @@ -5,7 +5,17 @@ import type WebSocket from 'ws'; import type { TaskRunner } from './task-broker.service'; import type { AuthlessRequest } from '../requests'; -export type DataRequestType = 'input' | 'node' | 'all'; +/** + * Specifies what data should be included for a task data request. + */ +export interface TaskDataRequestParams { + dataOfNodes: string[] | 'all'; + prevNode: boolean; + /** Whether input data for the node should be included */ + input: boolean; + /** Whether env provider's state should be included */ + env: boolean; +} export interface TaskResultData { result: INodeExecutionData[]; @@ -101,8 +111,7 @@ export namespace N8nMessage { type: 'broker:taskdatarequest'; taskId: string; requestId: string; - requestType: DataRequestType; - param?: string; + requestParams: TaskDataRequestParams; } export interface RPC { @@ -198,8 +207,7 @@ export namespace RunnerMessage { type: 'runner:taskdatarequest'; taskId: string; requestId: string; - requestType: DataRequestType; - param?: string; + requestParams: TaskDataRequestParams; } export interface RPC { diff --git a/packages/cli/src/runners/task-broker.service.ts b/packages/cli/src/runners/task-broker.service.ts index d88d677725..754d99ef32 100644 --- a/packages/cli/src/runners/task-broker.service.ts +++ b/packages/cli/src/runners/task-broker.service.ts @@ -178,12 +178,7 @@ export class TaskBroker { await this.taskErrorHandler(message.taskId, message.error); break; case 'runner:taskdatarequest': - await this.handleDataRequest( - message.taskId, - message.requestId, - message.requestType, - message.param, - ); + await this.handleDataRequest(message.taskId, message.requestId, message.requestParams); break; case 'runner:rpc': @@ -233,8 +228,7 @@ export class TaskBroker { async handleDataRequest( taskId: Task['id'], requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'], - requestType: RunnerMessage.ToN8n.TaskDataRequest['requestType'], - param?: string, + requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'], ) { const task = this.tasks.get(taskId); if (!task) { @@ -244,8 +238,7 @@ export class TaskBroker { type: 'broker:taskdatarequest', taskId, requestId, - requestType, - param, + requestParams, }); } diff --git a/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts b/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts new file mode 100644 index 0000000000..8fc0198488 --- /dev/null +++ b/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts @@ -0,0 +1,324 @@ +import { mock } from 'jest-mock-extended'; +import type { IExecuteFunctions, IWorkflowExecuteAdditionalData } from 'n8n-workflow'; +import { type INode, type INodeExecutionData, type Workflow } from 'n8n-workflow'; + +import { DataRequestResponseBuilder } from '../data-request-response-builder'; +import type { TaskData } from '../task-manager'; + +const triggerNode: INode = mock({ + name: 'Trigger', +}); +const debugHelperNode: INode = mock({ + name: 'DebugHelper', +}); +const codeNode: INode = mock({ + name: 'Code', +}); +const workflow: TaskData['workflow'] = mock(); +const debugHelperNodeOutItems: INodeExecutionData[] = [ + { + json: { + uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032', + email: 'Dan.Schmidt31@yahoo.com', + firstname: 'Toni', + lastname: 'Schuster', + password: 'Q!D6C2', + }, + pairedItem: { + item: 0, + }, + }, +]; +const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems; +const connectionInputData: TaskData['connectionInputData'] = codeNodeInputItems; +const envProviderState: TaskData['envProviderState'] = mock({ + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, +}); +const additionalData = mock({ + formWaitingBaseUrl: 'http://localhost:5678/form-waiting', + instanceBaseUrl: 'http://localhost:5678/', + restApiUrl: 'http://localhost:5678/rest', + variables: {}, + webhookBaseUrl: 'http://localhost:5678/webhook', + webhookTestBaseUrl: 'http://localhost:5678/webhook-test', + webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting', + executionId: '45844', + userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5', + currentNodeParameters: undefined, + executionTimeoutTimestamp: undefined, + restartExecutionId: undefined, +}); +const executeFunctions = mock(); + +/** + * Drawn with https://asciiflow.com/#/ + * Task data for an execution of the following WF: + * where ►► denotes the currently being executing node. + * ►► + * ┌───────────┐ ┌─────────────┐ ┌────────┐ + * │ Trigger ├──►│ DebugHelper ├───►│ Code │ + * └───────────┘ └─────────────┘ └────────┘ + */ +const taskData: TaskData = { + executeFunctions, + workflow, + connectionInputData, + inputData: { + main: [codeNodeInputItems], + }, + itemIndex: 0, + activeNodeName: codeNode.name, + contextNodeName: codeNode.name, + defaultReturnRunIndex: -1, + mode: 'manual', + envProviderState, + node: codeNode, + runExecutionData: { + startData: { + destinationNode: codeNode.name, + runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name], + }, + resultData: { + runData: { + [triggerNode.name]: [ + { + hints: [], + startTime: 1730313407328, + executionTime: 1, + source: [], + executionStatus: 'success', + data: { + main: [[]], + }, + }, + ], + [debugHelperNode.name]: [ + { + hints: [], + startTime: 1730313407330, + executionTime: 1, + source: [ + { + previousNode: triggerNode.name, + }, + ], + executionStatus: 'success', + data: { + main: [debugHelperNodeOutItems], + }, + }, + ], + }, + pinData: {}, + }, + executionData: { + contextData: {}, + nodeExecutionStack: [], + metadata: {}, + waitingExecution: { + [codeNode.name]: { + '0': { + main: [codeNodeInputItems], + }, + }, + }, + waitingExecutionSource: { + [codeNode.name]: { + '0': { + main: [ + { + previousNode: debugHelperNode.name, + }, + ], + }, + }, + }, + }, + }, + runIndex: 0, + selfData: {}, + siblingParameters: {}, + executeData: { + node: codeNode, + data: { + main: [codeNodeInputItems], + }, + source: { + main: [ + { + previousNode: debugHelperNode.name, + previousNodeOutput: 0, + }, + ], + }, + }, + additionalData, +} as const; + +describe('DataRequestResponseBuilder', () => { + const allDataParam: DataRequestResponseBuilder['requestParams'] = { + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }; + + const newRequestParam = (opts: Partial) => ({ + ...allDataParam, + ...opts, + }); + + describe('all data', () => { + it('should build the runExecutionData as is when everything is requested', () => { + const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam); + + const { runExecutionData } = dataRequestResponseBuilder.build(); + + expect(runExecutionData).toStrictEqual(taskData.runExecutionData); + }); + }); + + describe('envProviderState', () => { + it("should filter out envProviderState when it's not requested", () => { + const dataRequestResponseBuilder = new DataRequestResponseBuilder( + taskData, + newRequestParam({ + env: false, + }), + ); + + const result = dataRequestResponseBuilder.build(); + + expect(result.envProviderState).toStrictEqual({ + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, + }); + }); + }); + + describe('additionalData', () => { + it('picks only specific properties for additional data', () => { + const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam); + + const result = dataRequestResponseBuilder.build(); + + expect(result.additionalData).toStrictEqual({ + formWaitingBaseUrl: 'http://localhost:5678/form-waiting', + instanceBaseUrl: 'http://localhost:5678/', + restApiUrl: 'http://localhost:5678/rest', + webhookBaseUrl: 'http://localhost:5678/webhook', + webhookTestBaseUrl: 'http://localhost:5678/webhook-test', + webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting', + executionId: '45844', + userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5', + currentNodeParameters: undefined, + executionTimeoutTimestamp: undefined, + restartExecutionId: undefined, + variables: additionalData.variables, + }); + }); + }); + + describe('input data', () => { + const allExceptInputParam = newRequestParam({ + input: false, + }); + + it('drops input data from executeData', () => { + const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); + + expect(result.executeData).toStrictEqual({ + node: taskData.executeData!.node, + source: taskData.executeData!.source, + data: {}, + }); + }); + + it('drops input data from result', () => { + const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); + + expect(result.inputData).toStrictEqual({}); + }); + + it('drops input data from result', () => { + const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); + + expect(result.inputData).toStrictEqual({}); + }); + + it('drops input data from connectionInputData', () => { + const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); + + expect(result.connectionInputData).toStrictEqual([]); + }); + }); + + describe('nodes', () => { + it('should return empty run data when only Code node is requested', () => { + const result = new DataRequestResponseBuilder( + taskData, + newRequestParam({ dataOfNodes: ['Code'], prevNode: false }), + ).build(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({}); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it('should return empty run data when only Code node is requested', () => { + const result = new DataRequestResponseBuilder( + taskData, + newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }), + ).build(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({}); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it("should return only DebugHelper's data when only DebugHelper node is requested", () => { + const result = new DataRequestResponseBuilder( + taskData, + newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }), + ).build(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({ + [debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name], + }); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it("should return DebugHelper's data when only prevNode node is requested", () => { + const result = new DataRequestResponseBuilder( + taskData, + newRequestParam({ dataOfNodes: [], prevNode: true }), + ).build(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({ + [debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name], + }); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + }); +}); diff --git a/packages/cli/src/runners/task-managers/data-request-response-builder.ts b/packages/cli/src/runners/task-managers/data-request-response-builder.ts new file mode 100644 index 0000000000..6f49743aeb --- /dev/null +++ b/packages/cli/src/runners/task-managers/data-request-response-builder.ts @@ -0,0 +1,205 @@ +import type { + EnvProviderState, + IExecuteData, + INodeExecutionData, + IPinData, + IRunData, + IRunExecutionData, + ITaskDataConnections, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowParameters, +} from 'n8n-workflow'; + +import type { DataRequestResponse, PartialAdditionalData, TaskData } from './task-manager'; +import type { N8nMessage } from '../runner-types'; + +/** + * Builds the response to a data request coming from a Task Runner. Tries to minimize + * the amount of data that is sent to the runner by only providing what is requested. + */ +export class DataRequestResponseBuilder { + private requestedNodeNames = new Set(); + + constructor( + private readonly taskData: TaskData, + private readonly requestParams: N8nMessage.ToRequester.TaskDataRequest['requestParams'], + ) { + this.requestedNodeNames = new Set(requestParams.dataOfNodes); + + if (this.requestParams.prevNode && this.requestParams.dataOfNodes !== 'all') { + this.requestedNodeNames.add(this.determinePrevNodeName()); + } + } + + /** + * Builds a response to the data request + */ + build(): DataRequestResponse { + const { taskData: td } = this; + + return { + workflow: this.buildWorkflow(td.workflow), + connectionInputData: this.buildConnectionInputData(td.connectionInputData), + inputData: this.buildInputData(td.inputData), + itemIndex: td.itemIndex, + activeNodeName: td.activeNodeName, + contextNodeName: td.contextNodeName, + defaultReturnRunIndex: td.defaultReturnRunIndex, + mode: td.mode, + envProviderState: this.buildEnvProviderState(td.envProviderState), + node: td.node, // The current node being executed + runExecutionData: this.buildRunExecutionData(td.runExecutionData), + runIndex: td.runIndex, + selfData: td.selfData, + siblingParameters: td.siblingParameters, + executeData: this.buildExecuteData(td.executeData), + additionalData: this.buildAdditionalData(td.additionalData), + }; + } + + private buildAdditionalData( + additionalData: IWorkflowExecuteAdditionalData, + ): PartialAdditionalData { + return { + formWaitingBaseUrl: additionalData.formWaitingBaseUrl, + instanceBaseUrl: additionalData.instanceBaseUrl, + restApiUrl: additionalData.restApiUrl, + variables: additionalData.variables, + webhookBaseUrl: additionalData.webhookBaseUrl, + webhookTestBaseUrl: additionalData.webhookTestBaseUrl, + webhookWaitingBaseUrl: additionalData.webhookWaitingBaseUrl, + currentNodeParameters: additionalData.currentNodeParameters, + executionId: additionalData.executionId, + executionTimeoutTimestamp: additionalData.executionTimeoutTimestamp, + restartExecutionId: additionalData.restartExecutionId, + userId: additionalData.userId, + }; + } + + private buildExecuteData(executeData: IExecuteData | undefined): IExecuteData | undefined { + if (executeData === undefined) { + return undefined; + } + + return { + node: executeData.node, // The current node being executed + data: this.requestParams.input ? executeData.data : {}, + source: executeData.source, + }; + } + + private buildRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData { + if (this.requestParams.dataOfNodes === 'all') { + return runExecutionData; + } + + return { + startData: runExecutionData.startData, + resultData: { + error: runExecutionData.resultData.error, + lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted, + metadata: runExecutionData.resultData.metadata, + runData: this.buildRunData(runExecutionData.resultData.runData), + pinData: this.buildPinData(runExecutionData.resultData.pinData), + }, + executionData: runExecutionData.executionData + ? { + // TODO: Figure out what these two are and can they be filtered + contextData: runExecutionData.executionData?.contextData, + nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack, + + metadata: runExecutionData.executionData.metadata, + waitingExecution: runExecutionData.executionData.waitingExecution, + waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource, + } + : undefined, + }; + } + + private buildRunData(runData: IRunData): IRunData { + return this.filterObjectByNodeNames(runData); + } + + private buildPinData(pinData: IPinData | undefined): IPinData | undefined { + return pinData ? this.filterObjectByNodeNames(pinData) : undefined; + } + + private buildEnvProviderState(envProviderState: EnvProviderState): EnvProviderState { + if (this.requestParams.env) { + // In case `isEnvAccessBlocked` = true, the provider state has already sanitized + // the environment variables and we can return it as is. + return envProviderState; + } + + return { + env: {}, + isEnvAccessBlocked: envProviderState.isEnvAccessBlocked, + isProcessAvailable: envProviderState.isProcessAvailable, + }; + } + + private buildInputData(inputData: ITaskDataConnections): ITaskDataConnections { + if (this.requestParams.input) { + return inputData; + } + + return {}; + } + + private buildConnectionInputData( + connectionInputData: INodeExecutionData[], + ): INodeExecutionData[] { + if (this.requestParams.input) { + return connectionInputData; + } + + return []; + } + + private buildWorkflow(workflow: Workflow): Omit { + return { + id: workflow.id, + name: workflow.name, + active: workflow.active, + connections: workflow.connectionsBySourceNode, + nodes: Object.values(workflow.nodes), + pinData: workflow.pinData, + settings: workflow.settings, + staticData: workflow.staticData, + }; + } + + /** + * Assuming the given `obj` is an object where the keys are node names, + * filters the object to only include the node names that are requested. + */ + private filterObjectByNodeNames>(obj: T): T { + if (this.requestParams.dataOfNodes === 'all') { + return obj; + } + + const filteredObj: T = {} as T; + + for (const nodeName in obj) { + if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) { + continue; + } + + if (this.requestedNodeNames.has(nodeName)) { + filteredObj[nodeName] = obj[nodeName]; + } + } + + return filteredObj; + } + + private determinePrevNodeName(): string { + const sourceData = this.taskData.executeData?.source?.main?.[0]; + if (!sourceData) { + return ''; + } + + return sourceData.previousNode; + } +} diff --git a/packages/cli/src/runners/task-managers/task-manager.ts b/packages/cli/src/runners/task-managers/task-manager.ts index 58d8ade906..d1ef30665a 100644 --- a/packages/cli/src/runners/task-managers/task-manager.ts +++ b/packages/cli/src/runners/task-managers/task-manager.ts @@ -18,6 +18,7 @@ import { } from 'n8n-workflow'; import { nanoid } from 'nanoid'; +import { DataRequestResponseBuilder } from './data-request-response-builder'; import { RPC_ALLOW_LIST, type TaskResultData, @@ -67,7 +68,7 @@ export interface PartialAdditionalData { variables: IDataObject; } -export interface AllCodeTaskData { +export interface DataRequestResponse { workflow: Omit; inputData: ITaskDataConnections; node: INode; @@ -104,19 +105,6 @@ interface ExecuteFunctionObject { [name: string]: ((...args: unknown[]) => unknown) | ExecuteFunctionObject; } -const workflowToParameters = (workflow: Workflow): Omit => { - return { - id: workflow.id, - name: workflow.name, - active: workflow.active, - connections: workflow.connectionsBySourceNode, - nodes: Object.values(workflow.nodes), - pinData: workflow.pinData, - settings: workflow.settings, - staticData: workflow.staticData, - }; -}; - export class TaskManager { requestAcceptRejects: Map = new Map(); @@ -245,7 +233,7 @@ export class TaskManager { this.taskError(message.taskId, message.error); break; case 'broker:taskdatarequest': - this.sendTaskData(message.taskId, message.requestId, message.requestType); + this.sendTaskData(message.taskId, message.requestId, message.requestParams); break; case 'broker:rpc': void this.handleRpc(message.taskId, message.callId, message.name, message.params); @@ -294,54 +282,23 @@ export class TaskManager { sendTaskData( taskId: string, requestId: string, - requestType: N8nMessage.ToRequester.TaskDataRequest['requestType'], + requestParams: N8nMessage.ToRequester.TaskDataRequest['requestParams'], ) { const job = this.tasks.get(taskId); if (!job) { // TODO: logging return; } - if (requestType === 'all') { - const jd = job.data; - const ad = jd.additionalData; - const data: AllCodeTaskData = { - workflow: workflowToParameters(jd.workflow), - connectionInputData: jd.connectionInputData, - inputData: jd.inputData, - itemIndex: jd.itemIndex, - activeNodeName: jd.activeNodeName, - contextNodeName: jd.contextNodeName, - defaultReturnRunIndex: jd.defaultReturnRunIndex, - mode: jd.mode, - envProviderState: jd.envProviderState, - node: jd.node, - runExecutionData: jd.runExecutionData, - runIndex: jd.runIndex, - selfData: jd.selfData, - siblingParameters: jd.siblingParameters, - executeData: jd.executeData, - additionalData: { - formWaitingBaseUrl: ad.formWaitingBaseUrl, - instanceBaseUrl: ad.instanceBaseUrl, - restApiUrl: ad.restApiUrl, - variables: ad.variables, - webhookBaseUrl: ad.webhookBaseUrl, - webhookTestBaseUrl: ad.webhookTestBaseUrl, - webhookWaitingBaseUrl: ad.webhookWaitingBaseUrl, - currentNodeParameters: ad.currentNodeParameters, - executionId: ad.executionId, - executionTimeoutTimestamp: ad.executionTimeoutTimestamp, - restartExecutionId: ad.restartExecutionId, - userId: ad.userId, - }, - }; - this.sendMessage({ - type: 'requester:taskdataresponse', - taskId, - requestId, - data, - }); - } + + const dataRequestResponseBuilder = new DataRequestResponseBuilder(job.data, requestParams); + const requestedData = dataRequestResponseBuilder.build(); + + this.sendMessage({ + type: 'requester:taskdataresponse', + taskId, + requestId, + data: requestedData, + }); } async handleRpc( diff --git a/packages/cli/src/runners/task-runner-process.ts b/packages/cli/src/runners/task-runner-process.ts index 5b31a96ba3..917ce2b75a 100644 --- a/packages/cli/src/runners/task-runner-process.ts +++ b/packages/cli/src/runners/task-runner-process.ts @@ -68,15 +68,14 @@ export class TaskRunnerProcess extends TypedEmitter { ) { super(); - a.ok( - this.runnerConfig.mode === 'internal_childprocess' || - this.runnerConfig.mode === 'internal_launcher', - ); - this.logger = logger.scoped('task-runner'); } async start() { + a.ok( + this.runnerConfig.mode === 'internal_childprocess' || + this.runnerConfig.mode === 'internal_launcher', + ); a.ok(!this.process, 'Task Runner Process already running'); const grantToken = await this.authService.createGrantToken(); diff --git a/packages/workflow/src/WorkflowDataProxy.ts b/packages/workflow/src/WorkflowDataProxy.ts index f140edc98f..2ad2beb3a1 100644 --- a/packages/workflow/src/WorkflowDataProxy.ts +++ b/packages/workflow/src/WorkflowDataProxy.ts @@ -388,8 +388,13 @@ export class WorkflowDataProxy { * @private * @param {string} nodeName The name of the node query data from * @param {boolean} [shortSyntax=false] If short syntax got used + * @param {boolean} [throwOnMissingExecutionData=true] If an error should get thrown if no execution data is available */ - private nodeDataGetter(nodeName: string, shortSyntax = false) { + private nodeDataGetter( + nodeName: string, + shortSyntax = false, + throwOnMissingExecutionData = true, + ) { const that = this; const node = this.workflow.nodes[nodeName]; @@ -416,6 +421,10 @@ export class WorkflowDataProxy { shortSyntax, }); + if (executionData.length === 0 && !throwOnMissingExecutionData) { + return undefined; + } + if (executionData.length === 0) { if (that.workflow.getParentNodes(nodeName).length === 0) { throw new ExpressionError('No execution data available', { @@ -613,7 +622,7 @@ export class WorkflowDataProxy { * Returns the data proxy object which allows to query data from current run * */ - getDataProxy(): IWorkflowDataProxyData { + getDataProxy(opts?: { throwOnMissingExecutionData: boolean }): IWorkflowDataProxyData { const that = this; // replacing proxies with the actual data. @@ -1367,6 +1376,7 @@ export class WorkflowDataProxy { $nodeId: that.workflow.getNode(that.activeNodeName)?.id, $webhookId: that.workflow.getNode(that.activeNodeName)?.webhookId, }; + const throwOnMissingExecutionData = opts?.throwOnMissingExecutionData ?? true; return new Proxy(base, { has: () => true, @@ -1374,10 +1384,11 @@ export class WorkflowDataProxy { if (name === 'isProxy') return true; if (['$data', '$json'].includes(name as string)) { - return that.nodeDataGetter(that.contextNodeName, true)?.json; + return that.nodeDataGetter(that.contextNodeName, true, throwOnMissingExecutionData)?.json; } if (name === '$binary') { - return that.nodeDataGetter(that.contextNodeName, true)?.binary; + return that.nodeDataGetter(that.contextNodeName, true, throwOnMissingExecutionData) + ?.binary; } return Reflect.get(target, name, receiver); diff --git a/packages/workflow/test/WorkflowDataProxy.test.ts b/packages/workflow/test/WorkflowDataProxy.test.ts index ac9504dcd8..89b0751321 100644 --- a/packages/workflow/test/WorkflowDataProxy.test.ts +++ b/packages/workflow/test/WorkflowDataProxy.test.ts @@ -26,6 +26,7 @@ const getProxyFromFixture = ( run: IRun | null, activeNode: string, mode?: WorkflowExecuteMode, + opts?: { throwOnMissingExecutionData: boolean }, ) => { const taskData = run?.data.resultData.runData[activeNode]?.[0]; const lastNodeConnectionInputData = taskData?.data?.main[0]; @@ -73,7 +74,7 @@ const getProxyFromFixture = ( executeData, ); - return dataProxy.getDataProxy(); + return dataProxy.getDataProxy(opts); }; describe('WorkflowDataProxy', () => { @@ -404,4 +405,42 @@ describe('WorkflowDataProxy', () => { expect(proxy.$node.PinnedSet.json.firstName).toBe('Joe'); }); }); + + describe('Partial data', () => { + const fixture = loadFixture('partial_data'); + + describe('Default behaviour (throw on missing execution data)', () => { + const proxy = getProxyFromFixture(fixture.workflow, fixture.run, 'End'); + + test('$binary', () => { + expect(() => proxy.$binary).toThrowError(ExpressionError); + }); + + test('$json', () => { + expect(() => proxy.$json).toThrowError(ExpressionError); + }); + + test('$data', () => { + expect(() => proxy.$data).toThrowError(ExpressionError); + }); + }); + + describe("Don't throw on missing execution data)", () => { + const proxy = getProxyFromFixture(fixture.workflow, fixture.run, 'End', undefined, { + throwOnMissingExecutionData: false, + }); + + test('$binary', () => { + expect(proxy.$binary).toBeUndefined(); + }); + + test('$json', () => { + expect(proxy.$json).toBeUndefined(); + }); + + test('$data', () => { + expect(proxy.$data).toBeUndefined(); + }); + }); + }); }); diff --git a/packages/workflow/test/fixtures/WorkflowDataProxy/partial_data_run.json b/packages/workflow/test/fixtures/WorkflowDataProxy/partial_data_run.json new file mode 100644 index 0000000000..ae1a51d0b8 --- /dev/null +++ b/packages/workflow/test/fixtures/WorkflowDataProxy/partial_data_run.json @@ -0,0 +1,71 @@ +{ + "data": { + "startData": {}, + "resultData": { + "runData": { + "Start": [ + { + "startTime": 1, + "executionTime": 1, + "data": { + "main": [ + [ + { + "json": {} + } + ] + ] + }, + "source": [] + } + ], + "Function": [ + { + "startTime": 1, + "executionTime": 1, + "data": { + "main": [[]] + }, + "source": [ + { + "previousNode": "Start" + } + ] + } + ], + "Rename": [ + { + "startTime": 1, + "executionTime": 1, + "data": { + "main": [[]] + }, + "source": [ + { + "previousNode": "Function" + } + ] + } + ], + "End": [ + { + "startTime": 1, + "executionTime": 1, + "data": { + "main": [[]] + }, + "source": [ + { + "previousNode": "Rename" + } + ] + } + ] + } + } + }, + "mode": "manual", + "startedAt": "2024-02-08T15:45:18.848Z", + "stoppedAt": "2024-02-08T15:45:18.862Z", + "status": "running" +} diff --git a/packages/workflow/test/fixtures/WorkflowDataProxy/partial_data_workflow.json b/packages/workflow/test/fixtures/WorkflowDataProxy/partial_data_workflow.json new file mode 100644 index 0000000000..9894b66626 --- /dev/null +++ b/packages/workflow/test/fixtures/WorkflowDataProxy/partial_data_workflow.json @@ -0,0 +1,86 @@ +{ + "name": "", + "nodes": [ + { + "name": "Start", + "type": "test.set", + "parameters": {}, + "typeVersion": 1, + "id": "uuid-1", + "position": [100, 200] + }, + { + "name": "Function", + "type": "test.set", + "parameters": { + "functionCode": "// Code here will run only once, no matter how many input items there are.\n// More info and help: https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.function/\nconst { DateTime, Duration, Interval } = require(\"luxon\");\n\nconst data = [\n {\n \"length\": 105\n },\n {\n \"length\": 160\n },\n {\n \"length\": 121\n },\n {\n \"length\": 275\n },\n {\n \"length\": 950\n },\n];\n\nreturn data.map(fact => ({json: fact}));" + }, + "typeVersion": 1, + "id": "uuid-2", + "position": [280, 200] + }, + { + "name": "Rename", + "type": "test.set", + "parameters": { + "value1": "data", + "value2": "initialName" + }, + "typeVersion": 1, + "id": "uuid-3", + "position": [460, 200] + }, + { + "name": "Set", + "type": "test.set", + "parameters": {}, + "typeVersion": 1, + "id": "uuid-4", + "position": [640, 200] + }, + { + "name": "End", + "type": "test.set", + "parameters": {}, + "typeVersion": 1, + "id": "uuid-5", + "position": [640, 200] + } + ], + "pinData": {}, + "connections": { + "Start": { + "main": [ + [ + { + "node": "Function", + "type": "main", + "index": 0 + } + ] + ] + }, + "Function": { + "main": [ + [ + { + "node": "Rename", + "type": "main", + "index": 0 + } + ] + ] + }, + "Rename": { + "main": [ + [ + { + "node": "End", + "type": "main", + "index": 0 + } + ] + ] + } + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f527da5bb3..0df1bb0092 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -645,6 +645,12 @@ importers: '@n8n/config': specifier: workspace:* version: link:../config + acorn: + specifier: 8.14.0 + version: 8.14.0 + acorn-walk: + specifier: 8.3.4 + version: 8.3.4 n8n-core: specifier: workspace:* version: link:../../core @@ -1090,7 +1096,7 @@ importers: dependencies: '@langchain/core': specifier: 'catalog:' - version: 0.3.3(patch_hash=ekay3bw7hexufl733lypqvmx2e)(openai@4.63.0(zod@3.23.8)) + version: 0.3.3(patch_hash=ekay3bw7hexufl733lypqvmx2e)(openai@4.63.0(encoding@0.1.13)(zod@3.23.8)) '@n8n/client-oauth2': specifier: workspace:* version: link:../@n8n/client-oauth2 @@ -1921,7 +1927,7 @@ importers: devDependencies: '@langchain/core': specifier: 'catalog:' - version: 0.3.3(patch_hash=ekay3bw7hexufl733lypqvmx2e)(openai@4.63.0) + version: 0.3.3(patch_hash=ekay3bw7hexufl733lypqvmx2e)(openai@4.63.0(encoding@0.1.13)(zod@3.23.8)) '@types/deep-equal': specifier: ^1.0.1 version: 1.0.1 @@ -2227,7 +2233,7 @@ packages: '@azure/core-http@3.0.4': resolution: {integrity: sha512-Fok9VVhMdxAFOtqiiAtg74fL0UJkt0z3D+ouUUxcRLzZNBioPRAMJFVxiWoJljYpXsRi4GDQHzQHDc9AiYaIUQ==} engines: {node: '>=14.0.0'} - deprecated: deprecating as we migrated to core v2 + deprecated: This package is no longer supported. Please migrate to use @azure/core-rest-pipeline '@azure/core-lro@2.4.0': resolution: {integrity: sha512-F65+rYkll1dpw3RGm8/SSiSj+/QkMeYDanzS/QKlM1dmuneVyXbO46C88V1MRHluLGdMP6qfD3vDRYALn0z0tQ==} @@ -5475,10 +5481,6 @@ packages: peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - acorn-walk@8.3.2: - resolution: {integrity: sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==} - engines: {node: '>=0.4.0'} - acorn-walk@8.3.4: resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==} engines: {node: '>=0.4.0'} @@ -5493,6 +5495,11 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + acorn@8.14.0: + resolution: {integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==} + engines: {node: '>=0.4.0'} + hasBin: true + adm-zip@0.5.10: resolution: {integrity: sha512-x0HvcHqVJNTPk/Bw8JbLWlWoo6Wwnsug0fnYYro1HBrjxZ3G7/AZk7Ahv8JwDe1uIcz8eBqvu86FuF1POiG7vQ==} engines: {node: '>=6.0'} @@ -14689,38 +14696,6 @@ snapshots: transitivePeerDependencies: - openai - '@langchain/core@0.3.3(patch_hash=ekay3bw7hexufl733lypqvmx2e)(openai@4.63.0(zod@3.23.8))': - dependencies: - ansi-styles: 5.2.0 - camelcase: 6.3.0 - decamelize: 1.2.0 - js-tiktoken: 1.0.12 - langsmith: 0.1.59(openai@4.63.0(zod@3.23.8)) - mustache: 4.2.0 - p-queue: 6.6.2 - p-retry: 4.6.2 - uuid: 10.0.0 - zod: 3.23.8 - zod-to-json-schema: 3.23.3(zod@3.23.8) - transitivePeerDependencies: - - openai - - '@langchain/core@0.3.3(patch_hash=ekay3bw7hexufl733lypqvmx2e)(openai@4.63.0)': - dependencies: - ansi-styles: 5.2.0 - camelcase: 6.3.0 - decamelize: 1.2.0 - js-tiktoken: 1.0.12 - langsmith: 0.1.59(openai@4.63.0) - mustache: 4.2.0 - p-queue: 6.6.2 - p-retry: 4.6.2 - uuid: 10.0.0 - zod: 3.23.8 - zod-to-json-schema: 3.23.3(zod@3.23.8) - transitivePeerDependencies: - - openai - '@langchain/google-common@0.1.1(@langchain/core@0.3.3(patch_hash=ekay3bw7hexufl733lypqvmx2e)(openai@4.63.0(encoding@0.1.13)(zod@3.23.8)))(zod@3.23.8)': dependencies: '@langchain/core': 0.3.3(patch_hash=ekay3bw7hexufl733lypqvmx2e)(openai@4.63.0(encoding@0.1.13)(zod@3.23.8)) @@ -15031,7 +15006,7 @@ snapshots: '@n8n/vm2@3.9.25': dependencies: acorn: 8.12.1 - acorn-walk: 8.3.2 + acorn-walk: 8.3.4 '@n8n_io/ai-assistant-sdk@1.10.3': {} @@ -17232,7 +17207,7 @@ snapshots: '@vue/test-utils@2.4.6': dependencies: js-beautify: 1.14.9 - vue-component-type-helpers: 2.1.6 + vue-component-type-helpers: 2.1.8 '@vueuse/components@10.11.0(vue@3.5.11(typescript@5.6.2))': dependencies: @@ -17306,24 +17281,23 @@ snapshots: acorn-globals@7.0.1: dependencies: - acorn: 8.12.1 - acorn-walk: 8.3.2 + acorn: 8.14.0 + acorn-walk: 8.3.4 - acorn-jsx@5.3.2(acorn@8.12.1): + acorn-jsx@5.3.2(acorn@8.14.0): dependencies: - acorn: 8.12.1 - - acorn-walk@8.3.2: {} + acorn: 8.14.0 acorn-walk@8.3.4: dependencies: - acorn: 8.12.1 - optional: true + acorn: 8.14.0 acorn@7.4.1: {} acorn@8.12.1: {} + acorn@8.14.0: {} + adm-zip@0.5.10: {} agent-base@6.0.2: @@ -19316,7 +19290,7 @@ snapshots: eslint-import-resolver-node@0.3.9: dependencies: - debug: 3.2.7(supports-color@5.5.0) + debug: 3.2.7(supports-color@8.1.1) is-core-module: 2.13.1 resolve: 1.22.8 transitivePeerDependencies: @@ -19341,7 +19315,7 @@ snapshots: eslint-module-utils@2.8.0(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-plugin-import@2.29.1)(eslint@8.57.0))(eslint@8.57.0): dependencies: - debug: 3.2.7(supports-color@5.5.0) + debug: 3.2.7(supports-color@8.1.1) optionalDependencies: '@typescript-eslint/parser': 7.2.0(eslint@8.57.0)(typescript@5.6.2) eslint: 8.57.0 @@ -19361,7 +19335,7 @@ snapshots: array.prototype.findlastindex: 1.2.3 array.prototype.flat: 1.3.2 array.prototype.flatmap: 1.3.2 - debug: 3.2.7(supports-color@5.5.0) + debug: 3.2.7(supports-color@8.1.1) doctrine: 2.1.0 eslint: 8.57.0 eslint-import-resolver-node: 0.3.9 @@ -19504,8 +19478,8 @@ snapshots: espree@9.6.1: dependencies: - acorn: 8.12.1 - acorn-jsx: 5.3.2(acorn@8.12.1) + acorn: 8.14.0 + acorn-jsx: 5.3.2(acorn@8.14.0) eslint-visitor-keys: 3.4.3 esprima-next@5.8.4: {} @@ -20159,7 +20133,7 @@ snapshots: array-parallel: 0.1.3 array-series: 0.1.5 cross-spawn: 4.0.2 - debug: 3.2.7(supports-color@5.5.0) + debug: 3.2.7(supports-color@8.1.1) transitivePeerDependencies: - supports-color @@ -21498,28 +21472,6 @@ snapshots: optionalDependencies: openai: 4.63.0(encoding@0.1.13)(zod@3.23.8) - langsmith@0.1.59(openai@4.63.0(zod@3.23.8)): - dependencies: - '@types/uuid': 10.0.0 - commander: 10.0.1 - p-queue: 6.6.2 - p-retry: 4.6.2 - semver: 7.6.0 - uuid: 10.0.0 - optionalDependencies: - openai: 4.63.0(zod@3.23.8) - - langsmith@0.1.59(openai@4.63.0): - dependencies: - '@types/uuid': 10.0.0 - commander: 10.0.1 - p-queue: 6.6.2 - p-retry: 4.6.2 - semver: 7.6.0 - uuid: 10.0.0 - optionalDependencies: - openai: 4.63.0(zod@3.23.8) - lazy-ass@1.6.0: {} ldapts@4.2.6: @@ -22352,14 +22304,14 @@ snapshots: mlly@1.4.2: dependencies: - acorn: 8.12.1 + acorn: 8.14.0 pathe: 1.1.2 pkg-types: 1.0.3 ufo: 1.3.2 mlly@1.7.1: dependencies: - acorn: 8.12.1 + acorn: 8.14.0 pathe: 1.1.2 pkg-types: 1.1.3 ufo: 1.5.4 @@ -22864,22 +22816,6 @@ snapshots: - encoding - supports-color - openai@4.63.0(zod@3.23.8): - dependencies: - '@types/node': 18.16.16 - '@types/node-fetch': 2.6.4 - abort-controller: 3.0.0 - agentkeepalive: 4.2.1 - form-data-encoder: 1.7.2 - formdata-node: 4.4.1 - node-fetch: 2.7.0(encoding@0.1.13) - optionalDependencies: - zod: 3.23.8 - transitivePeerDependencies: - - encoding - - supports-color - optional: true - openapi-sampler@1.5.1: dependencies: '@types/json-schema': 7.0.15 @@ -23060,7 +22996,7 @@ snapshots: pdf-parse@1.1.1: dependencies: - debug: 3.2.7(supports-color@5.5.0) + debug: 3.2.7(supports-color@8.1.1) node-ensure: 0.0.0 transitivePeerDependencies: - supports-color @@ -23889,7 +23825,7 @@ snapshots: rhea@1.0.24: dependencies: - debug: 3.2.7(supports-color@5.5.0) + debug: 3.2.7(supports-color@8.1.1) transitivePeerDependencies: - supports-color @@ -24786,7 +24722,7 @@ snapshots: terser@5.16.1: dependencies: '@jridgewell/source-map': 0.3.6 - acorn: 8.12.1 + acorn: 8.14.0 commander: 2.20.3 source-map-support: 0.5.21 optional: true @@ -24959,7 +24895,7 @@ snapshots: '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 '@types/node': 18.16.16 - acorn: 8.12.1 + acorn: 8.14.0 acorn-walk: 8.3.4 arg: 4.1.3 create-require: 1.1.1 @@ -25253,14 +25189,14 @@ snapshots: unplugin@1.0.1: dependencies: - acorn: 8.12.1 + acorn: 8.14.0 chokidar: 4.0.1 webpack-sources: 3.2.3 webpack-virtual-modules: 0.5.0 unplugin@1.11.0: dependencies: - acorn: 8.12.1 + acorn: 8.14.0 chokidar: 4.0.1 webpack-sources: 3.2.3 webpack-virtual-modules: 0.6.1 From 1be7de6180f90c79873dab6d5682f5d82598de2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E0=A4=95=E0=A4=BE=E0=A4=B0=E0=A4=A4=E0=A5=8B=E0=A4=AB?= =?UTF-8?q?=E0=A5=8D=E0=A4=AB=E0=A5=87=E0=A4=B2=E0=A4=B8=E0=A5=8D=E0=A4=95?= =?UTF-8?q?=E0=A5=8D=E0=A4=B0=E0=A4=BF=E0=A4=AA=E0=A5=8D=E0=A4=9F=E2=84=A2?= Date: Mon, 4 Nov 2024 11:13:44 +0100 Subject: [PATCH 04/21] refactor(core): Extract trigger context out of NodeExecutionFunctions (no-changelog) (#11453) --- packages/cli/src/active-workflow-manager.ts | 23 +++-- packages/core/src/NodeExecuteFunctions.ts | 65 +------------ .../__tests__/trigger-context.test.ts | 96 +++++++++++++++++++ .../__tests__/ssh-tunnel-helpers.test.ts | 32 +++++++ .../helpers/ssh-tunnel-helpers.ts | 18 ++++ .../core/src/node-execution-context/index.ts | 1 + .../node-execution-context/trigger-context.ts | 96 +++++++++++++++++++ 7 files changed, 256 insertions(+), 75 deletions(-) create mode 100644 packages/core/src/node-execution-context/__tests__/trigger-context.test.ts create mode 100644 packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts create mode 100644 packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts create mode 100644 packages/core/src/node-execution-context/trigger-context.ts diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index de3ff6d25e..22cc0f5700 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -1,6 +1,12 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ -import { ActiveWorkflows, InstanceSettings, NodeExecuteFunctions, PollContext } from 'n8n-core'; +import { + ActiveWorkflows, + InstanceSettings, + NodeExecuteFunctions, + PollContext, + TriggerContext, +} from 'n8n-core'; import type { ExecutionError, IDeferredPromise, @@ -325,18 +331,11 @@ export class ActiveWorkflowManager { activation: WorkflowActivateMode, ): IGetExecuteTriggerFunctions { return (workflow: Workflow, node: INode) => { - const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions( - workflow, - node, - additionalData, - mode, - activation, - ); - returnFunctions.emit = ( + const emit = ( data: INodeExecutionData[][], responsePromise?: IDeferredPromise, donePromise?: IDeferredPromise, - ): void => { + ) => { this.logger.debug(`Received trigger for workflow "${workflow.name}"`); void this.workflowStaticDataService.saveStaticData(workflow); @@ -360,7 +359,7 @@ export class ActiveWorkflowManager { executePromise.catch((error: Error) => this.logger.error(error.message, { error })); } }; - returnFunctions.emitError = (error: Error): void => { + const emitError = (error: Error): void => { this.logger.info( `The trigger node "${node.name}" of workflow "${workflowData.name}" failed with the error: "${error.message}". Will try to reactivate.`, { @@ -385,7 +384,7 @@ export class ActiveWorkflowManager { this.addQueuedWorkflowActivation(activation, workflowData as WorkflowEntity); }; - return returnFunctions; + return new TriggerContext(workflow, node, additionalData, mode, activation, emit, emitError); }; } diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index b6028b9194..978e4bacd9 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -101,7 +101,6 @@ import type { INodeParameters, EnsureTypeOptions, SSHTunnelFunctions, - SchedulingFunctions, DeduplicationHelperFunctions, IDeduplicationOutput, IDeduplicationOutputItems, @@ -168,8 +167,7 @@ import { extractValue } from './ExtractValue'; import { InstanceSettings } from './InstanceSettings'; import type { ExtendedValidationResult, IResponseError } from './Interfaces'; // eslint-disable-next-line import/no-cycle -import { PollContext } from './node-execution-context'; -import { ScheduledTaskManager } from './ScheduledTaskManager'; +import { PollContext, TriggerContext } from './node-execution-context'; import { getSecretsProxy } from './Secrets'; import { SSHClientsManager } from './SSHClientsManager'; @@ -3346,14 +3344,6 @@ const getSSHTunnelFunctions = (): SSHTunnelFunctions => ({ await Container.get(SSHClientsManager).getClient(credentials), }); -const getSchedulingFunctions = (workflow: Workflow): SchedulingFunctions => { - const scheduledTaskManager = Container.get(ScheduledTaskManager); - return { - registerCron: (cronExpression, onTick) => - scheduledTaskManager.registerCron(workflow, cronExpression, onTick), - }; -}; - const getAllowedPaths = () => { const restrictFileAccessTo = process.env[RESTRICT_FILE_ACCESS_TO]; if (!restrictFileAccessTo) { @@ -3571,58 +3561,7 @@ export function getExecuteTriggerFunctions( mode: WorkflowExecuteMode, activation: WorkflowActivateMode, ): ITriggerFunctions { - return ((workflow: Workflow, node: INode) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - emit: (): void => { - throw new ApplicationError( - 'Overwrite NodeExecuteFunctions.getExecuteTriggerFunctions.emit function', - ); - }, - emitError: (): void => { - throw new ApplicationError( - 'Overwrite NodeExecuteFunctions.getExecuteTriggerFunctions.emit function', - ); - }, - getMode: () => mode, - getActivationMode: () => activation, - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, mode), - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const runExecutionData: IRunExecutionData | null = null; - const itemIndex = 0; - const runIndex = 0; - const connectionInputData: INodeExecutionData[] = []; - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - undefined, - fallbackValue, - options, - ); - }, - helpers: { - createDeferredPromise, - ...getSSHTunnelFunctions(), - ...getRequestHelperFunctions(workflow, node, additionalData), - ...getBinaryHelperFunctions(additionalData, workflow.id), - ...getSchedulingFunctions(workflow), - returnJsonArray, - }, - }; - })(workflow, node); + return new TriggerContext(workflow, node, additionalData, mode, activation); } /** diff --git a/packages/core/src/node-execution-context/__tests__/trigger-context.test.ts b/packages/core/src/node-execution-context/__tests__/trigger-context.test.ts new file mode 100644 index 0000000000..3c91d22e6d --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/trigger-context.test.ts @@ -0,0 +1,96 @@ +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; + +import { TriggerContext } from '../trigger-context'; + +describe('TriggerContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ credentialsHelper }); + const mode: WorkflowExecuteMode = 'manual'; + const activation: WorkflowActivateMode = 'init'; + + const triggerContext = new TriggerContext(workflow, node, additionalData, mode, activation); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getActivationMode', () => { + it('should return the activation property', () => { + const result = triggerContext.getActivationMode(); + expect(result).toBe(activation); + }); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await triggerContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = triggerContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = triggerContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts new file mode 100644 index 0000000000..cbe6916eea --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts @@ -0,0 +1,32 @@ +import { mock } from 'jest-mock-extended'; +import type { SSHCredentials } from 'n8n-workflow'; +import type { Client } from 'ssh2'; +import { Container } from 'typedi'; + +import { SSHClientsManager } from '@/SSHClientsManager'; + +import { SSHTunnelHelpers } from '../ssh-tunnel-helpers'; + +describe('SSHTunnelHelpers', () => { + const sshClientsManager = mock(); + Container.set(SSHClientsManager, sshClientsManager); + const sshTunnelHelpers = new SSHTunnelHelpers(); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getSSHClient', () => { + const credentials = mock(); + + it('should call SSHClientsManager.getClient with the given credentials', async () => { + const mockClient = mock(); + sshClientsManager.getClient.mockResolvedValue(mockClient); + + const client = await sshTunnelHelpers.getSSHClient(credentials); + + expect(sshClientsManager.getClient).toHaveBeenCalledWith(credentials); + expect(client).toBe(mockClient); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts b/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts new file mode 100644 index 0000000000..f44df0e166 --- /dev/null +++ b/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts @@ -0,0 +1,18 @@ +import type { SSHCredentials, SSHTunnelFunctions } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import { SSHClientsManager } from '@/SSHClientsManager'; + +export class SSHTunnelHelpers { + private readonly sshClientsManager = Container.get(SSHClientsManager); + + get exported(): SSHTunnelFunctions { + return { + getSSHClient: this.getSSHClient.bind(this), + }; + } + + async getSSHClient(credentials: SSHCredentials) { + return await this.sshClientsManager.getClient(credentials); + } +} diff --git a/packages/core/src/node-execution-context/index.ts b/packages/core/src/node-execution-context/index.ts index 5182804dee..1a64023850 100644 --- a/packages/core/src/node-execution-context/index.ts +++ b/packages/core/src/node-execution-context/index.ts @@ -1,2 +1,3 @@ // eslint-disable-next-line import/no-cycle export { PollContext } from './poll-context'; +export { TriggerContext } from './trigger-context'; diff --git a/packages/core/src/node-execution-context/trigger-context.ts b/packages/core/src/node-execution-context/trigger-context.ts new file mode 100644 index 0000000000..8535ccfe6c --- /dev/null +++ b/packages/core/src/node-execution-context/trigger-context.ts @@ -0,0 +1,96 @@ +import type { + ICredentialDataDecryptedObject, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + IRunExecutionData, + ITriggerFunctions, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; + +// eslint-disable-next-line import/no-cycle +import { + getAdditionalKeys, + getCredentials, + getNodeParameter, + returnJsonArray, +} from '@/NodeExecuteFunctions'; + +import { BinaryHelpers } from './helpers/binary-helpers'; +import { RequestHelpers } from './helpers/request-helpers'; +import { SchedulingHelpers } from './helpers/scheduling-helpers'; +import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +const throwOnEmit = () => { + throw new ApplicationError('Overwrite TriggerContext.emit function'); +}; + +const throwOnEmitError = () => { + throw new ApplicationError('Overwrite TriggerContext.emitError function'); +}; + +export class TriggerContext extends NodeExecutionContext implements ITriggerFunctions { + readonly helpers: ITriggerFunctions['helpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + private readonly activation: WorkflowActivateMode, + readonly emit: ITriggerFunctions['emit'] = throwOnEmit, + readonly emitError: ITriggerFunctions['emitError'] = throwOnEmitError, + ) { + super(workflow, node, additionalData, mode); + + this.helpers = { + createDeferredPromise, + returnJsonArray, + ...new BinaryHelpers(workflow, additionalData).exported, + ...new RequestHelpers(this, workflow, node, additionalData).exported, + ...new SchedulingHelpers(workflow).exported, + ...new SSHTunnelHelpers().exported, + }; + } + + getActivationMode() { + return this.activation; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const runExecutionData: IRunExecutionData | null = null; + const itemIndex = 0; + const runIndex = 0; + const connectionInputData: INodeExecutionData[] = []; + + return getNodeParameter( + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, runExecutionData), + undefined, + fallbackValue, + options, + ); + } +} From 097c2542d060711a78dd8c3cb65778c3979ff4c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E0=A4=95=E0=A4=BE=E0=A4=B0=E0=A4=A4=E0=A5=8B=E0=A4=AB?= =?UTF-8?q?=E0=A5=8D=E0=A4=AB=E0=A5=87=E0=A4=B2=E0=A4=B8=E0=A5=8D=E0=A4=95?= =?UTF-8?q?=E0=A5=8D=E0=A4=B0=E0=A4=BF=E0=A4=AA=E0=A5=8D=E0=A4=9F=E2=84=A2?= Date: Mon, 4 Nov 2024 11:43:19 +0100 Subject: [PATCH 05/21] refactor(core): Extract webhook context out of NodeExecutionFunctions (no-changelog) (#11455) --- packages/core/src/NodeExecuteFunctions.ts | 182 +------------ .../__tests__/webhook-context.test.ts | 161 ++++++++++++ .../core/src/node-execution-context/index.ts | 1 + .../node-execution-context/webhook-context.ts | 239 ++++++++++++++++++ 4 files changed, 413 insertions(+), 170 deletions(-) create mode 100644 packages/core/src/node-execution-context/__tests__/webhook-context.test.ts create mode 100644 packages/core/src/node-execution-context/webhook-context.ts diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 978e4bacd9..5560b447b9 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -23,12 +23,11 @@ import type { } from 'axios'; import axios from 'axios'; import crypto, { createHmac } from 'crypto'; -import type { Request, Response } from 'express'; import FileType from 'file-type'; import FormData from 'form-data'; import { createReadStream } from 'fs'; import { access as fsAccess, writeFile as fsWriteFile } from 'fs/promises'; -import { IncomingMessage, type IncomingHttpHeaders } from 'http'; +import { IncomingMessage } from 'http'; import { Agent, type AgentOptions } from 'https'; import get from 'lodash/get'; import isEmpty from 'lodash/isEmpty'; @@ -167,7 +166,7 @@ import { extractValue } from './ExtractValue'; import { InstanceSettings } from './InstanceSettings'; import type { ExtendedValidationResult, IResponseError } from './Interfaces'; // eslint-disable-next-line import/no-cycle -import { PollContext, TriggerContext } from './node-execution-context'; +import { PollContext, TriggerContext, WebhookContext } from './node-execution-context'; import { getSecretsProxy } from './Secrets'; import { SSHClientsManager } from './SSHClientsManager'; @@ -2800,7 +2799,7 @@ const addExecutionDataFunctions = async ( } }; -async function getInputConnectionData( +export async function getInputConnectionData( this: IAllExecuteFunctions, workflow: Workflow, runExecutionData: IRunExecutionData, @@ -4491,170 +4490,13 @@ export function getExecuteWebhookFunctions( closeFunctions: CloseFunction[], runExecutionData: IRunExecutionData | null, ): IWebhookFunctions { - return ((workflow: Workflow, node: INode, runExecutionData: IRunExecutionData | null) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - getBodyData(): IDataObject { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest.body; - }, - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, mode), - getHeaderData(): IncomingHttpHeaders { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest.headers; - }, - async getInputConnectionData( - inputName: NodeConnectionType, - itemIndex: number, - ): Promise { - // To be able to use expressions like "$json.sessionId" set the - // body data the webhook received to what is normally used for - // incoming node data. - const connectionInputData: INodeExecutionData[] = [ - { json: additionalData.httpRequest?.body || {} }, - ]; - const runExecutionData: IRunExecutionData = { - resultData: { - runData: {}, - }, - }; - const executeData: IExecuteData = { - data: { - main: [connectionInputData], - }, - node, - source: null, - }; - const runIndex = 0; - - return await getInputConnectionData.call( - this, - workflow, - runExecutionData, - runIndex, - connectionInputData, - {} as ITaskDataConnections, - additionalData, - executeData, - mode, - closeFunctions, - inputName, - itemIndex, - ); - }, - getMode: () => mode, - evaluateExpression: (expression: string, evaluateItemIndex?: number) => { - const itemIndex = evaluateItemIndex === undefined ? 0 : evaluateItemIndex; - const runIndex = 0; - - let connectionInputData: INodeExecutionData[] = []; - let executionData: IExecuteData | undefined; - - if (runExecutionData?.executionData !== undefined) { - executionData = runExecutionData.executionData.nodeExecutionStack[0]; - - if (executionData !== undefined) { - connectionInputData = executionData.data.main[0]!; - } - } - - const additionalKeys = getAdditionalKeys(additionalData, mode, runExecutionData); - - return workflow.expression.resolveSimpleParameterValue( - `=${expression}`, - {}, - runExecutionData, - runIndex, - itemIndex, - node.name, - connectionInputData, - mode, - additionalKeys, - executionData, - ); - }, - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const itemIndex = 0; - const runIndex = 0; - - let connectionInputData: INodeExecutionData[] = []; - let executionData: IExecuteData | undefined; - - if (runExecutionData?.executionData !== undefined) { - executionData = runExecutionData.executionData.nodeExecutionStack[0]; - - if (executionData !== undefined) { - connectionInputData = executionData.data.main[0]!; - } - } - - const additionalKeys = getAdditionalKeys(additionalData, mode, runExecutionData); - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - additionalKeys, - executionData, - fallbackValue, - options, - ); - }, - getParamsData(): object { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest.params; - }, - getQueryData(): object { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest.query; - }, - getRequestObject(): Request { - if (additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return additionalData.httpRequest; - }, - getResponseObject(): Response { - if (additionalData.httpResponse === undefined) { - throw new ApplicationError('Response is missing'); - } - return additionalData.httpResponse; - }, - getNodeWebhookUrl: (name: string): string | undefined => - getNodeWebhookUrl( - name, - workflow, - node, - additionalData, - mode, - getAdditionalKeys(additionalData, mode, null), - ), - getWebhookName: () => webhookData.webhookDescription.name, - helpers: { - createDeferredPromise, - ...getRequestHelperFunctions(workflow, node, additionalData), - ...getBinaryHelperFunctions(additionalData, workflow.id), - returnJsonArray, - }, - nodeHelpers: getNodeHelperFunctions(additionalData, workflow.id), - }; - })(workflow, node, runExecutionData); + return new WebhookContext( + workflow, + node, + additionalData, + mode, + webhookData, + closeFunctions, + runExecutionData, + ); } diff --git a/packages/core/src/node-execution-context/__tests__/webhook-context.test.ts b/packages/core/src/node-execution-context/__tests__/webhook-context.test.ts new file mode 100644 index 0000000000..15d425c988 --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/webhook-context.test.ts @@ -0,0 +1,161 @@ +import type { Request, Response } from 'express'; +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWebhookData, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; + +import { WebhookContext } from '../webhook-context'; + +describe('WebhookContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ + credentialsHelper, + }); + additionalData.httpRequest = { + body: { test: 'body' }, + headers: { test: 'header' }, + params: { test: 'param' }, + query: { test: 'query' }, + } as unknown as Request; + additionalData.httpResponse = mock(); + const mode: WorkflowExecuteMode = 'manual'; + const webhookData = mock({ + webhookDescription: { + name: 'default', + }, + }); + const runExecutionData = null; + + const webhookContext = new WebhookContext( + workflow, + node, + additionalData, + mode, + webhookData, + [], + runExecutionData, + ); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await webhookContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getBodyData', () => { + it('should return the body data of the request', () => { + const bodyData = webhookContext.getBodyData(); + expect(bodyData).toEqual({ test: 'body' }); + }); + }); + + describe('getHeaderData', () => { + it('should return the header data of the request', () => { + const headerData = webhookContext.getHeaderData(); + expect(headerData).toEqual({ test: 'header' }); + }); + }); + + describe('getParamsData', () => { + it('should return the params data of the request', () => { + const paramsData = webhookContext.getParamsData(); + expect(paramsData).toEqual({ test: 'param' }); + }); + }); + + describe('getQueryData', () => { + it('should return the query data of the request', () => { + const queryData = webhookContext.getQueryData(); + expect(queryData).toEqual({ test: 'query' }); + }); + }); + + describe('getRequestObject', () => { + it('should return the request object', () => { + const request = webhookContext.getRequestObject(); + expect(request).toBe(additionalData.httpRequest); + }); + }); + + describe('getResponseObject', () => { + it('should return the response object', () => { + const response = webhookContext.getResponseObject(); + expect(response).toBe(additionalData.httpResponse); + }); + }); + + describe('getWebhookName', () => { + it('should return the name of the webhook', () => { + const webhookName = webhookContext.getWebhookName(); + expect(webhookName).toBe('default'); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = webhookContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = webhookContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/index.ts b/packages/core/src/node-execution-context/index.ts index 1a64023850..61be738ced 100644 --- a/packages/core/src/node-execution-context/index.ts +++ b/packages/core/src/node-execution-context/index.ts @@ -1,3 +1,4 @@ // eslint-disable-next-line import/no-cycle export { PollContext } from './poll-context'; export { TriggerContext } from './trigger-context'; +export { WebhookContext } from './webhook-context'; diff --git a/packages/core/src/node-execution-context/webhook-context.ts b/packages/core/src/node-execution-context/webhook-context.ts new file mode 100644 index 0000000000..56e1f4767d --- /dev/null +++ b/packages/core/src/node-execution-context/webhook-context.ts @@ -0,0 +1,239 @@ +import type { Request, Response } from 'express'; +import type { + CloseFunction, + ICredentialDataDecryptedObject, + IDataObject, + IExecuteData, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + IRunExecutionData, + ITaskDataConnections, + IWebhookData, + IWebhookFunctions, + IWorkflowExecuteAdditionalData, + NodeConnectionType, + NodeParameterValueType, + Workflow, + WorkflowExecuteMode, +} from 'n8n-workflow'; +import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; + +// eslint-disable-next-line import/no-cycle +import { + copyBinaryFile, + getAdditionalKeys, + getCredentials, + getInputConnectionData, + getNodeParameter, + getNodeWebhookUrl, + returnJsonArray, +} from '@/NodeExecuteFunctions'; + +import { BinaryHelpers } from './helpers/binary-helpers'; +import { RequestHelpers } from './helpers/request-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +export class WebhookContext extends NodeExecutionContext implements IWebhookFunctions { + readonly helpers: IWebhookFunctions['helpers']; + + readonly nodeHelpers: IWebhookFunctions['nodeHelpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + private readonly webhookData: IWebhookData, + private readonly closeFunctions: CloseFunction[], + private readonly runExecutionData: IRunExecutionData | null, + ) { + super(workflow, node, additionalData, mode); + + this.helpers = { + createDeferredPromise, + returnJsonArray, + ...new BinaryHelpers(workflow, additionalData).exported, + ...new RequestHelpers(this, workflow, node, additionalData).exported, + }; + + this.nodeHelpers = { + copyBinaryFile: async (filePath, fileName, mimeType) => + await copyBinaryFile( + this.workflow.id, + this.additionalData.executionId!, + filePath, + fileName, + mimeType, + ), + }; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getBodyData() { + if (this.additionalData.httpRequest === undefined) { + throw new ApplicationError('Request is missing'); + } + return this.additionalData.httpRequest.body as IDataObject; + } + + getHeaderData() { + if (this.additionalData.httpRequest === undefined) { + throw new ApplicationError('Request is missing'); + } + return this.additionalData.httpRequest.headers; + } + + getParamsData(): object { + if (this.additionalData.httpRequest === undefined) { + throw new ApplicationError('Request is missing'); + } + return this.additionalData.httpRequest.params; + } + + getQueryData(): object { + if (this.additionalData.httpRequest === undefined) { + throw new ApplicationError('Request is missing'); + } + return this.additionalData.httpRequest.query; + } + + getRequestObject(): Request { + if (this.additionalData.httpRequest === undefined) { + throw new ApplicationError('Request is missing'); + } + return this.additionalData.httpRequest; + } + + getResponseObject(): Response { + if (this.additionalData.httpResponse === undefined) { + throw new ApplicationError('Response is missing'); + } + return this.additionalData.httpResponse; + } + + getNodeWebhookUrl(name: string): string | undefined { + return getNodeWebhookUrl( + name, + this.workflow, + this.node, + this.additionalData, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, null), + ); + } + + getWebhookName() { + return this.webhookData.webhookDescription.name; + } + + async getInputConnectionData(inputName: NodeConnectionType, itemIndex: number): Promise { + // To be able to use expressions like "$json.sessionId" set the + // body data the webhook received to what is normally used for + // incoming node data. + const connectionInputData: INodeExecutionData[] = [ + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + { json: this.additionalData.httpRequest?.body || {} }, + ]; + const runExecutionData: IRunExecutionData = { + resultData: { + runData: {}, + }, + }; + const executeData: IExecuteData = { + data: { + main: [connectionInputData], + }, + node: this.node, + source: null, + }; + const runIndex = 0; + + return await getInputConnectionData.call( + this, + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + {} as ITaskDataConnections, + this.additionalData, + executeData, + this.mode, + this.closeFunctions, + inputName, + itemIndex, + ); + } + + evaluateExpression(expression: string, evaluateItemIndex?: number) { + const itemIndex = evaluateItemIndex ?? 0; + const runIndex = 0; + + let connectionInputData: INodeExecutionData[] = []; + let executionData: IExecuteData | undefined; + + if (this.runExecutionData?.executionData !== undefined) { + executionData = this.runExecutionData.executionData.nodeExecutionStack[0]; + + if (executionData !== undefined) { + connectionInputData = executionData.data.main[0]!; + } + } + + const additionalKeys = getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData); + + return this.workflow.expression.resolveSimpleParameterValue( + `=${expression}`, + {}, + this.runExecutionData, + runIndex, + itemIndex, + this.node.name, + connectionInputData, + this.mode, + additionalKeys, + executionData, + ); + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const itemIndex = 0; + const runIndex = 0; + + let connectionInputData: INodeExecutionData[] = []; + let executionData: IExecuteData | undefined; + + if (this.runExecutionData?.executionData !== undefined) { + executionData = this.runExecutionData.executionData.nodeExecutionStack[0]; + + if (executionData !== undefined) { + connectionInputData = executionData.data.main[0]!; + } + } + + const additionalKeys = getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData); + + return getNodeParameter( + this.workflow, + this.runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + additionalKeys, + executionData, + fallbackValue, + options, + ); + } +} From a092b8e972e1253d92df416f19096a045858e7c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E0=A4=95=E0=A4=BE=E0=A4=B0=E0=A4=A4=E0=A5=8B=E0=A4=AB?= =?UTF-8?q?=E0=A5=8D=E0=A4=AB=E0=A5=87=E0=A4=B2=E0=A4=B8=E0=A5=8D=E0=A4=95?= =?UTF-8?q?=E0=A5=8D=E0=A4=B0=E0=A4=BF=E0=A4=AA=E0=A5=8D=E0=A4=9F=E2=84=A2?= Date: Mon, 4 Nov 2024 12:31:17 +0100 Subject: [PATCH 06/21] fix(core): Use the correct docs URL for regular nodes when used as tools (#11529) --- packages/workflow/src/NodeHelpers.ts | 3 +++ packages/workflow/test/NodeHelpers.test.ts | 7 +++++++ 2 files changed, 10 insertions(+) diff --git a/packages/workflow/src/NodeHelpers.ts b/packages/workflow/src/NodeHelpers.ts index 85431c6897..e21f2b7e1a 100644 --- a/packages/workflow/src/NodeHelpers.ts +++ b/packages/workflow/src/NodeHelpers.ts @@ -432,12 +432,15 @@ export function convertNodeToAiTool< } } + const resources = item.description.codex?.resources ?? {}; + item.description.codex = { categories: ['AI'], subcategories: { AI: ['Tools'], Tools: ['Other Tools'], }, + resources, }; return item; } diff --git a/packages/workflow/test/NodeHelpers.test.ts b/packages/workflow/test/NodeHelpers.test.ts index 81b7af5de6..09e036fc94 100644 --- a/packages/workflow/test/NodeHelpers.test.ts +++ b/packages/workflow/test/NodeHelpers.test.ts @@ -3693,6 +3693,7 @@ describe('NodeHelpers', () => { AI: ['Tools'], Tools: ['Other Tools'], }, + resources: {}, }); }); @@ -3775,6 +3776,9 @@ describe('NodeHelpers', () => { subcategories: { Existing: ['Category'], }, + resources: { + primaryDocumentation: [{ url: 'https://example.com' }], + }, }; const result = convertNodeToAiTool(fullNodeWrapper); expect(result.description.codex).toEqual({ @@ -3783,6 +3787,9 @@ describe('NodeHelpers', () => { AI: ['Tools'], Tools: ['Other Tools'], }, + resources: { + primaryDocumentation: [{ url: 'https://example.com' }], + }, }); }); From da18e1ad29adc591c727fb1989298a4cdd4bbaf1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E0=A4=95=E0=A4=BE=E0=A4=B0=E0=A4=A4=E0=A5=8B=E0=A4=AB?= =?UTF-8?q?=E0=A5=8D=E0=A4=AB=E0=A5=87=E0=A4=B2=E0=A4=B8=E0=A5=8D=E0=A4=95?= =?UTF-8?q?=E0=A5=8D=E0=A4=B0=E0=A4=BF=E0=A4=AA=E0=A5=8D=E0=A4=9F=E2=84=A2?= Date: Mon, 4 Nov 2024 12:35:07 +0100 Subject: [PATCH 07/21] refactor(core): Extract load-options context out of NodeExecutionFunctions (no-changelog) (#11461) --- .../dynamic-node-parameters.service.ts | 4 +- packages/core/src/NodeExecuteFunctions.ts | 80 -------------- .../__tests__/load-options-context.test.ts | 102 ++++++++++++++++++ .../core/src/node-execution-context/index.ts | 1 + .../load-options-context.ts | 102 ++++++++++++++++++ .../node-execution-context/webhook-context.ts | 33 +++--- 6 files changed, 220 insertions(+), 102 deletions(-) create mode 100644 packages/core/src/node-execution-context/__tests__/load-options-context.test.ts create mode 100644 packages/core/src/node-execution-context/load-options-context.ts diff --git a/packages/cli/src/services/dynamic-node-parameters.service.ts b/packages/cli/src/services/dynamic-node-parameters.service.ts index 7a641e84a6..dd60c85444 100644 --- a/packages/cli/src/services/dynamic-node-parameters.service.ts +++ b/packages/cli/src/services/dynamic-node-parameters.service.ts @@ -1,4 +1,4 @@ -import { NodeExecuteFunctions } from 'n8n-core'; +import { LoadOptionsContext, NodeExecuteFunctions } from 'n8n-core'; import type { ILoadOptions, ILoadOptionsFunctions, @@ -253,6 +253,6 @@ export class DynamicNodeParametersService { workflow: Workflow, ) { const node = workflow.nodes['Temp-Node']; - return NodeExecuteFunctions.getLoadOptionsFunctions(workflow, node, path, additionalData); + return new LoadOptionsContext(workflow, node, additionalData, path); } } diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 5560b447b9..4e80e1a153 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -59,7 +59,6 @@ import type { IGetNodeParameterOptions, IHookFunctions, IHttpRequestOptions, - ILoadOptionsFunctions, IN8nHttpFullResponse, IN8nHttpResponse, INode, @@ -4332,85 +4331,6 @@ export function getCredentialTestFunctions(): ICredentialTestFunctions { }; } -/** - * Returns the execute functions regular nodes have access to in load-options-function. - */ -export function getLoadOptionsFunctions( - workflow: Workflow, - node: INode, - path: string, - additionalData: IWorkflowExecuteAdditionalData, -): ILoadOptionsFunctions { - return ((workflow: Workflow, node: INode, path: string) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - getCredentials: async (type) => - await getCredentials(workflow, node, type, additionalData, 'internal'), - getCurrentNodeParameter: ( - parameterPath: string, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object | undefined => { - const nodeParameters = additionalData.currentNodeParameters; - - if (parameterPath.charAt(0) === '&') { - parameterPath = `${path.split('.').slice(1, -1).join('.')}.${parameterPath.slice(1)}`; - } - - let returnData = get(nodeParameters, parameterPath); - - // This is outside the try/catch because it throws errors with proper messages - if (options?.extractValue) { - const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); - if (nodeType === undefined) { - throw new ApplicationError('Node type is not known so cannot return parameter value', { - tags: { nodeType: node.type }, - }); - } - returnData = extractValue( - returnData, - parameterPath, - node, - nodeType, - ) as NodeParameterValueType; - } - - return returnData; - }, - getCurrentNodeParameters: () => additionalData.currentNodeParameters, - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - const runExecutionData: IRunExecutionData | null = null; - const itemIndex = 0; - const runIndex = 0; - const mode = 'internal' as WorkflowExecuteMode; - const connectionInputData: INodeExecutionData[] = []; - - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - undefined, - fallbackValue, - options, - ); - }, - helpers: { - ...getSSHTunnelFunctions(), - ...getRequestHelperFunctions(workflow, node, additionalData), - }, - }; - })(workflow, node, path); -} - /** * Returns the execute functions regular nodes have access to in hook-function. */ diff --git a/packages/core/src/node-execution-context/__tests__/load-options-context.test.ts b/packages/core/src/node-execution-context/__tests__/load-options-context.test.ts new file mode 100644 index 0000000000..5ab03e105d --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/load-options-context.test.ts @@ -0,0 +1,102 @@ +import { mock } from 'jest-mock-extended'; +import type { + Expression, + ICredentialDataDecryptedObject, + ICredentialsHelper, + INode, + INodeType, + INodeTypes, + IWorkflowExecuteAdditionalData, + Workflow, +} from 'n8n-workflow'; + +import { LoadOptionsContext } from '../load-options-context'; + +describe('LoadOptionsContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ credentialsHelper }); + const path = 'testPath'; + + const loadOptionsContext = new LoadOptionsContext(workflow, node, additionalData, path); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await loadOptionsContext.getCredentials(testCredentialType); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getCurrentNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + }); + + it('should return parameter value when it exists', () => { + additionalData.currentNodeParameters = { + testParameter: 'testValue', + }; + + const parameter = loadOptionsContext.getCurrentNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = loadOptionsContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = loadOptionsContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/index.ts b/packages/core/src/node-execution-context/index.ts index 61be738ced..cc12f1927d 100644 --- a/packages/core/src/node-execution-context/index.ts +++ b/packages/core/src/node-execution-context/index.ts @@ -1,4 +1,5 @@ // eslint-disable-next-line import/no-cycle +export { LoadOptionsContext } from './load-options-context'; export { PollContext } from './poll-context'; export { TriggerContext } from './trigger-context'; export { WebhookContext } from './webhook-context'; diff --git a/packages/core/src/node-execution-context/load-options-context.ts b/packages/core/src/node-execution-context/load-options-context.ts new file mode 100644 index 0000000000..98dd58210b --- /dev/null +++ b/packages/core/src/node-execution-context/load-options-context.ts @@ -0,0 +1,102 @@ +import { get } from 'lodash'; +import type { + ICredentialDataDecryptedObject, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + ILoadOptionsFunctions, + IRunExecutionData, + IWorkflowExecuteAdditionalData, + NodeParameterValueType, + Workflow, +} from 'n8n-workflow'; + +import { extractValue } from '@/ExtractValue'; +// eslint-disable-next-line import/no-cycle +import { getAdditionalKeys, getCredentials, getNodeParameter } from '@/NodeExecuteFunctions'; + +import { RequestHelpers } from './helpers/request-helpers'; +import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers'; +import { NodeExecutionContext } from './node-execution-context'; + +export class LoadOptionsContext extends NodeExecutionContext implements ILoadOptionsFunctions { + readonly helpers: ILoadOptionsFunctions['helpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + private readonly path: string, + ) { + super(workflow, node, additionalData, 'internal'); + + this.helpers = { + ...new RequestHelpers(this, workflow, node, additionalData).exported, + ...new SSHTunnelHelpers().exported, + }; + } + + async getCredentials(type: string) { + return await getCredentials(this.workflow, this.node, type, this.additionalData, this.mode); + } + + getCurrentNodeParameter( + parameterPath: string, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object | undefined { + const nodeParameters = this.additionalData.currentNodeParameters; + + if (parameterPath.charAt(0) === '&') { + parameterPath = `${this.path.split('.').slice(1, -1).join('.')}.${parameterPath.slice(1)}`; + } + + let returnData = get(nodeParameters, parameterPath); + + // This is outside the try/catch because it throws errors with proper messages + if (options?.extractValue) { + const nodeType = this.workflow.nodeTypes.getByNameAndVersion( + this.node.type, + this.node.typeVersion, + ); + returnData = extractValue( + returnData, + parameterPath, + this.node, + nodeType, + ) as NodeParameterValueType; + } + + return returnData; + } + + getCurrentNodeParameters() { + return this.additionalData.currentNodeParameters; + } + + getNodeParameter( + parameterName: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ): NodeParameterValueType | object { + const runExecutionData: IRunExecutionData | null = null; + const itemIndex = 0; + const runIndex = 0; + const connectionInputData: INodeExecutionData[] = []; + + return getNodeParameter( + this.workflow, + runExecutionData, + runIndex, + connectionInputData, + this.node, + parameterName, + itemIndex, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, runExecutionData), + undefined, + fallbackValue, + options, + ); + } +} diff --git a/packages/core/src/node-execution-context/webhook-context.ts b/packages/core/src/node-execution-context/webhook-context.ts index 56e1f4767d..6b2d8c0d89 100644 --- a/packages/core/src/node-execution-context/webhook-context.ts +++ b/packages/core/src/node-execution-context/webhook-context.ts @@ -74,38 +74,23 @@ export class WebhookContext extends NodeExecutionContext implements IWebhookFunc } getBodyData() { - if (this.additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return this.additionalData.httpRequest.body as IDataObject; + return this.assertHttpRequest().body as IDataObject; } getHeaderData() { - if (this.additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return this.additionalData.httpRequest.headers; + return this.assertHttpRequest().headers; } getParamsData(): object { - if (this.additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return this.additionalData.httpRequest.params; + return this.assertHttpRequest().params; } getQueryData(): object { - if (this.additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return this.additionalData.httpRequest.query; + return this.assertHttpRequest().query; } getRequestObject(): Request { - if (this.additionalData.httpRequest === undefined) { - throw new ApplicationError('Request is missing'); - } - return this.additionalData.httpRequest; + return this.assertHttpRequest(); } getResponseObject(): Response { @@ -115,6 +100,14 @@ export class WebhookContext extends NodeExecutionContext implements IWebhookFunc return this.additionalData.httpResponse; } + private assertHttpRequest() { + const { httpRequest } = this.additionalData; + if (httpRequest === undefined) { + throw new ApplicationError('Request is missing'); + } + return httpRequest; + } + getNodeWebhookUrl(name: string): string | undefined { return getNodeWebhookUrl( name, From d1153f51e80911cbc8f34ba5f038f349b75295c3 Mon Sep 17 00:00:00 2001 From: Danny Martini Date: Mon, 4 Nov 2024 13:04:22 +0100 Subject: [PATCH 08/21] fix(core): Don't send a `executionFinished` event to the browser with no run data if the execution has already been cleaned up (#11502) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ --- .../cli/src/__tests__/workflow-runner.test.ts | 14 ++++++++++++++ packages/cli/src/workflow-runner.ts | 12 +++++++++++- packages/core/src/WorkflowExecute.ts | 15 ++++++++------- 3 files changed, 33 insertions(+), 8 deletions(-) diff --git a/packages/cli/src/__tests__/workflow-runner.test.ts b/packages/cli/src/__tests__/workflow-runner.test.ts index f828880c74..4774746ba7 100644 --- a/packages/cli/src/__tests__/workflow-runner.test.ts +++ b/packages/cli/src/__tests__/workflow-runner.test.ts @@ -4,6 +4,7 @@ import Container from 'typedi'; import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; import type { User } from '@/databases/entities/user'; +import { ExecutionNotFoundError } from '@/errors/execution-not-found-error'; import { Telemetry } from '@/telemetry'; import { WorkflowRunner } from '@/workflow-runner'; import { mockInstance } from '@test/mocking'; @@ -64,6 +65,19 @@ test('processError should return early in Bull stalled edge case', async () => { expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0); }); +test('processError should return early if the error is `ExecutionNotFoundError`', async () => { + const workflow = await createWorkflow({}, owner); + const execution = await createExecution({ status: 'success', finished: true }, workflow); + await runner.processError( + new ExecutionNotFoundError(execution.id), + new Date(), + 'webhook', + execution.id, + new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow), + ); + expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0); +}); + test('processError should process error', async () => { const workflow = await createWorkflow({}, owner); const execution = await createExecution( diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index 02e0b94afd..38ead8896e 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -35,6 +35,7 @@ import * as WorkflowHelpers from '@/workflow-helpers'; import { generateFailedExecutionFromError } from '@/workflow-helpers'; import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service'; +import { ExecutionNotFoundError } from './errors/execution-not-found-error'; import { EventService } from './events/event.service'; @Service() @@ -57,12 +58,21 @@ export class WorkflowRunner { /** The process did error */ async processError( - error: ExecutionError, + error: ExecutionError | ExecutionNotFoundError, startedAt: Date, executionMode: WorkflowExecuteMode, executionId: string, hooks?: WorkflowHooks, ) { + // This means the execution was probably cancelled and has already + // been cleaned up. + // + // FIXME: This is a quick fix. The proper fix would be to not remove + // the execution from the active executions while it's still running. + if (error instanceof ExecutionNotFoundError) { + return; + } + ErrorReporter.error(error, { executionId }); const isQueueMode = config.getEnv('executions.mode') === 'queue'; diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index 23b88abd5b..f8c32351bb 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -45,6 +45,7 @@ import { NodeExecutionOutput, sleep, ErrorReporterProxy, + ExecutionCancelledError, } from 'n8n-workflow'; import PCancelable from 'p-cancelable'; @@ -154,10 +155,6 @@ export class WorkflowExecute { return this.processRunExecutionData(workflow); } - static isAbortError(e?: ExecutionBaseError) { - return e?.message === 'AbortError'; - } - forceInputNodeExecution(workflow: Workflow): boolean { return workflow.settings.executionOrder !== 'v1'; } @@ -1479,7 +1476,7 @@ export class WorkflowExecute { // Add the execution data again so that it can get restarted this.runExecutionData.executionData!.nodeExecutionStack.unshift(executionData); // Only execute the nodeExecuteAfter hook if the node did not get aborted - if (!WorkflowExecute.isAbortError(executionError)) { + if (!this.isCancelled) { await this.executeHook('nodeExecuteAfter', [ executionNode.name, taskData, @@ -1827,7 +1824,7 @@ export class WorkflowExecute { return await this.processSuccessExecution( startedAt, workflow, - new WorkflowOperationError('Workflow has been canceled or timed out'), + new ExecutionCancelledError(this.additionalData.executionId ?? 'unknown'), closeFunction, ); } @@ -1928,7 +1925,7 @@ export class WorkflowExecute { this.moveNodeMetadata(); // Prevent from running the hook if the error is an abort error as it was already handled - if (!WorkflowExecute.isAbortError(executionError)) { + if (!this.isCancelled) { await this.executeHook('workflowExecuteAfter', [fullRunData, newStaticData]); } @@ -1959,4 +1956,8 @@ export class WorkflowExecute { return fullRunData; } + + private get isCancelled() { + return this.abortController.signal.aborted; + } } From e10968b26f8ea7b13561a1b1ecc44272bea9f347 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E0=A4=95=E0=A4=BE=E0=A4=B0=E0=A4=A4=E0=A5=8B=E0=A4=AB?= =?UTF-8?q?=E0=A5=8D=E0=A4=AB=E0=A5=87=E0=A4=B2=E0=A4=B8=E0=A5=8D=E0=A4=95?= =?UTF-8?q?=E0=A5=8D=E0=A4=B0=E0=A4=BF=E0=A4=AA=E0=A5=8D=E0=A4=9F=E2=84=A2?= Date: Mon, 4 Nov 2024 13:07:06 +0100 Subject: [PATCH 09/21] fix(editor): Respect user's dark-mode preference even before the application is loaded (no-changelog) (#11530) --- packages/editor-ui/index.html | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/editor-ui/index.html b/packages/editor-ui/index.html index 8176735fdd..25bbde030f 100644 --- a/packages/editor-ui/index.html +++ b/packages/editor-ui/index.html @@ -5,6 +5,7 @@ + diff --git a/packages/editor-ui/src/views/CredentialsView.vue b/packages/editor-ui/src/views/CredentialsView.vue index cdb11456ea..8d29191941 100644 --- a/packages/editor-ui/src/views/CredentialsView.vue +++ b/packages/editor-ui/src/views/CredentialsView.vue @@ -2,8 +2,10 @@ import { ref, computed, onMounted, watch } from 'vue'; import { useRoute, useRouter } from 'vue-router'; import type { ICredentialsResponse, ICredentialTypeMap } from '@/Interface'; -import type { IResource } from '@/components/layouts/ResourcesListLayout.vue'; -import ResourcesListLayout from '@/components/layouts/ResourcesListLayout.vue'; +import ResourcesListLayout, { + type IResource, + type IFilters, +} from '@/components/layouts/ResourcesListLayout.vue'; import CredentialCard from '@/components/CredentialCard.vue'; import type { ICredentialType } from 'n8n-workflow'; import { @@ -43,10 +45,10 @@ const router = useRouter(); const telemetry = useTelemetry(); const i18n = useI18n(); -const filters = ref({ +const filters = ref({ search: '', homeProject: '', - type: '', + type: [], }); const loading = ref(false); @@ -123,13 +125,11 @@ watch( }, ); -const onFilter = ( - resource: ICredentialsResponse, - filtersToApply: { type: string[]; search: string }, - matches: boolean, -): boolean => { +const onFilter = (resource: IResource, newFilters: IFilters, matches: boolean): boolean => { + const iResource = resource as ICredentialsResponse; + const filtersToApply = newFilters as IFilters & { type: string[] }; if (filtersToApply.type.length > 0) { - matches = matches && filtersToApply.type.includes(resource.type); + matches = matches && filtersToApply.type.includes(iResource.type); } if (filtersToApply.search) { @@ -137,8 +137,8 @@ const onFilter = ( matches = matches || - (credentialTypesById.value[resource.type] && - credentialTypesById.value[resource.type].displayName.toLowerCase().includes(searchString)); + (credentialTypesById.value[iResource.type] && + credentialTypesById.value[iResource.type].displayName.toLowerCase().includes(searchString)); } return matches; diff --git a/packages/editor-ui/src/views/WorkflowsView.vue b/packages/editor-ui/src/views/WorkflowsView.vue index c629f4e318..7025202181 100644 --- a/packages/editor-ui/src/views/WorkflowsView.vue +++ b/packages/editor-ui/src/views/WorkflowsView.vue @@ -1,10 +1,13 @@