diff --git a/.github/workflows/chromatic.yml b/.github/workflows/chromatic.yml index 7c5682076a..feb39f1f4f 100644 --- a/.github/workflows/chromatic.yml +++ b/.github/workflows/chromatic.yml @@ -65,6 +65,7 @@ jobs: continue-on-error: true with: workingDir: packages/design-system + onlyChanged: true projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }} exitZeroOnChanges: false diff --git a/.github/workflows/release-publish.yml b/.github/workflows/release-publish.yml index e8d5fdc714..9196a7fccb 100644 --- a/.github/workflows/release-publish.yml +++ b/.github/workflows/release-publish.yml @@ -42,7 +42,7 @@ jobs: uses: actions/cache/save@v4.0.0 with: path: ./packages/**/dist - key: ${{ github.sha }}-base:build + key: ${{ github.sha }}-release:build - name: Dry-run publishing run: pnpm publish -r --no-git-checks --dry-run @@ -126,7 +126,7 @@ jobs: body: ${{github.event.pull_request.body}} create-sentry-release: - name: Create release on Sentry + name: Create a Sentry Release needs: [publish-to-npm, publish-to-docker-hub] runs-on: ubuntu-latest if: github.event.pull_request.merged == true @@ -136,18 +136,19 @@ jobs: SENTRY_ORG: ${{ secrets.SENTRY_ORG }} steps: + - uses: actions/checkout@v4.1.1 - name: Restore cached build artifacts uses: actions/cache/restore@v4.0.0 with: path: ./packages/**/dist - key: ${{ github.sha }}:db-tests + key: ${{ github.sha }}-release:build - name: Create a frontend release uses: getsentry/action-release@v1.7.0 continue-on-error: true with: projects: ${{ secrets.SENTRY_FRONTEND_PROJECT }} - version: {{ needs.publish-to-npm.outputs.release }} + version: ${{ needs.publish-to-npm.outputs.release }} sourcemaps: packages/editor-ui/dist - name: Create a backend release @@ -155,7 +156,7 @@ jobs: continue-on-error: true with: projects: ${{ secrets.SENTRY_BACKEND_PROJECT }} - version: {{ needs.publish-to-npm.outputs.release }} + version: ${{ needs.publish-to-npm.outputs.release }} sourcemaps: packages/cli/dist packages/core/dist packages/nodes-base/dist packages/@n8n/n8n-nodes-langchain/dist trigger-release-note: diff --git a/CHANGELOG.md b/CHANGELOG.md index 4a380fa531..f85ab264be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,39 @@ +# [1.67.0](https://github.com/n8n-io/n8n/compare/n8n@1.66.0...n8n@1.67.0) (2024-11-06) + + +### Bug Fixes + +* Bring back nodes panel telemetry events ([#11456](https://github.com/n8n-io/n8n/issues/11456)) ([130c942](https://github.com/n8n-io/n8n/commit/130c942f633788d1b2f937d6fea342d4450c6e3d)) +* **core:** Account for double quotes in instance base URL ([#11495](https://github.com/n8n-io/n8n/issues/11495)) ([c5191e6](https://github.com/n8n-io/n8n/commit/c5191e697a9a9ebfa2b67587cd01b5835ebf6ea8)) +* **core:** Do not delete waiting executions when saving of successful executions is disabled ([#11458](https://github.com/n8n-io/n8n/issues/11458)) ([e8757e5](https://github.com/n8n-io/n8n/commit/e8757e58f69e091ac3d2a2f8e8c8e33ac57c1e47)) +* **core:** Don't send a `executionFinished` event to the browser with no run data if the execution has already been cleaned up ([#11502](https://github.com/n8n-io/n8n/issues/11502)) ([d1153f5](https://github.com/n8n-io/n8n/commit/d1153f51e80911cbc8f34ba5f038f349b75295c3)) +* **core:** Include `projectId` in range query middleware ([#11590](https://github.com/n8n-io/n8n/issues/11590)) ([a6070af](https://github.com/n8n-io/n8n/commit/a6070afdda29631fd36e5213f52bf815268bcda4)) +* **core:** Save exeution progress for waiting executions, even when progress saving is disabled ([#11535](https://github.com/n8n-io/n8n/issues/11535)) ([6b9353c](https://github.com/n8n-io/n8n/commit/6b9353c80f61ab36945fff434d98242dc1cab7b3)) +* **core:** Use the correct docs URL for regular nodes when used as tools ([#11529](https://github.com/n8n-io/n8n/issues/11529)) ([a092b8e](https://github.com/n8n-io/n8n/commit/a092b8e972e1253d92df416f19096a045858e7c1)) +* **Edit Image Node:** Fix Text operation by setting Arial as default font ([#11125](https://github.com/n8n-io/n8n/issues/11125)) ([60c1ace](https://github.com/n8n-io/n8n/commit/60c1ace64be29d651ce7b777fbb576598e38b9d7)) +* **editor:** Auto focus first fields on SignIn, SignUp and ForgotMyPassword views ([#11445](https://github.com/n8n-io/n8n/issues/11445)) ([5b5bd72](https://github.com/n8n-io/n8n/commit/5b5bd7291dde17880b7699f7e6832938599ffd8f)) +* **editor:** Do not overwrite the webhookId in the new canvas ([#11562](https://github.com/n8n-io/n8n/issues/11562)) ([dfd785b](https://github.com/n8n-io/n8n/commit/dfd785bc0894257eb6e62b0dd8f71248c27aae53)) +* **editor:** Ensure Enter key on Cancel button correctly cancels node rename ([#11563](https://github.com/n8n-io/n8n/issues/11563)) ([be05ae3](https://github.com/n8n-io/n8n/commit/be05ae36e7790156cb48b317fc254ae46a3b2d8c)) +* **editor:** Fix emitting `n8nReady` notification via `postmessage` on new canvas ([#11558](https://github.com/n8n-io/n8n/issues/11558)) ([463d101](https://github.com/n8n-io/n8n/commit/463d101f3592e6df4afd66c4d0fde0cb4aec34cc)) +* **editor:** Fix run index input for RunData view in sub-nodes ([#11538](https://github.com/n8n-io/n8n/issues/11538)) ([434d31c](https://github.com/n8n-io/n8n/commit/434d31ce928342d52b6ab8b78639afd7829216d4)) +* **editor:** Fix selected credential being overwritten in NDV ([#11496](https://github.com/n8n-io/n8n/issues/11496)) ([a26c0e2](https://github.com/n8n-io/n8n/commit/a26c0e2c3c7da87bfaba9737a967aa0070810d85)) +* **editor:** Keep workflow pristine after load on new canvas ([#11579](https://github.com/n8n-io/n8n/issues/11579)) ([7254359](https://github.com/n8n-io/n8n/commit/7254359855b89769613cd5cc24dbb4f45a7cc76f)) +* Show Pinned data in demo mode ([#11490](https://github.com/n8n-io/n8n/issues/11490)) ([ca2a583](https://github.com/n8n-io/n8n/commit/ca2a583b5cbb0cba3ecb694261806de16547aa91)) +* Toast not aligned to the bottom when AI assistant disable ([#11549](https://github.com/n8n-io/n8n/issues/11549)) ([e80f7e0](https://github.com/n8n-io/n8n/commit/e80f7e0a02a972379f73af6a44de11768081086e)) + + +### Features + +* Add Rapid7 InsightVm credentials ([#11462](https://github.com/n8n-io/n8n/issues/11462)) ([46eceab](https://github.com/n8n-io/n8n/commit/46eceabc27ac219b11b85c16c533a2cff848c5dd)) +* **AI Transform Node:** UX improvements ([#11280](https://github.com/n8n-io/n8n/issues/11280)) ([8a48407](https://github.com/n8n-io/n8n/commit/8a484077af3d3e1fe2d1b90b1ea9edf4ba41fcb6)) +* **Anthropic Chat Model Node:** Add support for Haiku 3.5 ([#11551](https://github.com/n8n-io/n8n/issues/11551)) ([8b39825](https://github.com/n8n-io/n8n/commit/8b398256a81594a52f20f8eb8adf8ff205209bc1)) +* **Convert to File Node:** Add delimiter convert to csv ([#11556](https://github.com/n8n-io/n8n/issues/11556)) ([63d454b](https://github.com/n8n-io/n8n/commit/63d454b776c092ff8c6c521a7e083774adb8f649)) +* **editor:** Update panning and selection keybindings on new canvas ([#11534](https://github.com/n8n-io/n8n/issues/11534)) ([5e2e205](https://github.com/n8n-io/n8n/commit/5e2e205394adf76faf02aee2d4f21df71848e1d4)) +* **Gmail Trigger Node:** Add filter option to include drafts ([#11441](https://github.com/n8n-io/n8n/issues/11441)) ([7a2be77](https://github.com/n8n-io/n8n/commit/7a2be77f384a32ede3acad8fe24fb89227c058bf)) +* **Intercom Node:** Update credential to new style ([#11485](https://github.com/n8n-io/n8n/issues/11485)) ([b137e13](https://github.com/n8n-io/n8n/commit/b137e13845f0714ebf7421c837f5ab104b66709b)) + + + # [1.66.0](https://github.com/n8n-io/n8n/compare/n8n@1.65.0...n8n@1.66.0) (2024-10-31) diff --git a/cypress/e2e/2-credentials.cy.ts b/cypress/e2e/2-credentials.cy.ts index 8ce3bc4080..71c3083856 100644 --- a/cypress/e2e/2-credentials.cy.ts +++ b/cypress/e2e/2-credentials.cy.ts @@ -26,6 +26,22 @@ const nodeDetailsView = new NDV(); const NEW_CREDENTIAL_NAME = 'Something else'; const NEW_CREDENTIAL_NAME2 = 'Something else entirely'; +function createNotionCredential() { + workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME); + workflowPage.actions.openNode(NOTION_NODE_NAME); + workflowPage.getters.nodeCredentialsSelect().click(); + getVisibleSelect().find('li').last().click(); + credentialsModal.actions.fillCredentialsForm(); + cy.get('body').type('{esc}'); + workflowPage.actions.deleteNode(NOTION_NODE_NAME); +} + +function deleteSelectedCredential() { + workflowPage.getters.nodeCredentialsEditButton().click(); + credentialsModal.getters.deleteButton().click(); + cy.get('.el-message-box').find('button').contains('Yes').click(); +} + describe('Credentials', () => { beforeEach(() => { cy.visit(credentialsPage.url); @@ -229,6 +245,40 @@ describe('Credentials', () => { .should('have.value', NEW_CREDENTIAL_NAME); }); + it('should set a default credential when adding nodes', () => { + workflowPage.actions.visit(); + + createNotionCredential(); + + workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME, true, true); + workflowPage.getters + .nodeCredentialsSelect() + .find('input') + .should('have.value', NEW_NOTION_ACCOUNT_NAME); + + deleteSelectedCredential(); + }); + + it('should set a default credential when editing a node', () => { + workflowPage.actions.visit(); + + createNotionCredential(); + + workflowPage.actions.addNodeToCanvas(HTTP_REQUEST_NODE_NAME, true, true); + nodeDetailsView.getters.parameterInput('authentication').click(); + getVisibleSelect().find('li').contains('Predefined').click(); + + nodeDetailsView.getters.parameterInput('nodeCredentialType').click(); + getVisibleSelect().find('li').contains('Notion API').click(); + + workflowPage.getters + .nodeCredentialsSelect() + .find('input') + .should('have.value', NEW_NOTION_ACCOUNT_NAME); + + deleteSelectedCredential(); + }); + it('should setup generic authentication for HTTP node', () => { workflowPage.actions.visit(); workflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); diff --git a/cypress/e2e/45-ai-assistant.cy.ts b/cypress/e2e/45-ai-assistant.cy.ts index 3b4f61f660..9c69269b33 100644 --- a/cypress/e2e/45-ai-assistant.cy.ts +++ b/cypress/e2e/45-ai-assistant.cy.ts @@ -4,6 +4,7 @@ import { clickCreateNewCredential, openCredentialSelect } from '../composables/n import { GMAIL_NODE_NAME, SCHEDULE_TRIGGER_NODE_NAME } from '../constants'; import { CredentialsModal, CredentialsPage, NDV, WorkflowPage } from '../pages'; import { AIAssistant } from '../pages/features/ai-assistant'; +import { NodeCreator } from '../pages/features/node-creator'; import { getVisibleSelect } from '../utils'; const wf = new WorkflowPage(); @@ -11,6 +12,7 @@ const ndv = new NDV(); const aiAssistant = new AIAssistant(); const credentialsPage = new CredentialsPage(); const credentialsModal = new CredentialsModal(); +const nodeCreatorFeature = new NodeCreator(); describe('AI Assistant::disabled', () => { beforeEach(() => { @@ -280,6 +282,20 @@ describe('AI Assistant::enabled', () => { wf.getters.isWorkflowSaved(); aiAssistant.getters.placeholderMessage().should('not.exist'); }); + + it('should send message via enter even with global NodeCreator panel opened', () => { + cy.intercept('POST', '/rest/ai/chat', { + statusCode: 200, + fixture: 'aiAssistant/responses/simple_message_response.json', + }).as('chatRequest'); + + wf.actions.addInitialNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); + aiAssistant.actions.openChat(); + nodeCreatorFeature.actions.openNodeCreator(); + aiAssistant.getters.chatInput().type('Hello{Enter}'); + + aiAssistant.getters.placeholderMessage().should('not.exist'); + }); }); describe('AI Assistant Credential Help', () => { diff --git a/cypress/e2e/5-ndv.cy.ts b/cypress/e2e/5-ndv.cy.ts index f2ccccb6ab..70a62bb244 100644 --- a/cypress/e2e/5-ndv.cy.ts +++ b/cypress/e2e/5-ndv.cy.ts @@ -795,4 +795,46 @@ describe('NDV', () => { .find('[data-test-id=run-data-schema-item]') .should('contain.text', 'onlyOnItem3'); }); + + it('should keep search expanded after Test step node run', () => { + cy.createFixtureWorkflow('Test_ndv_search.json'); + workflowPage.actions.zoomToFit(); + workflowPage.actions.executeWorkflow(); + workflowPage.actions.openNode('Edit Fields'); + ndv.getters.outputPanel().should('be.visible'); + ndv.getters.outputPanel().findChildByTestId('ndv-search').click().type('US'); + ndv.getters.outputTableRow(1).find('mark').should('have.text', 'US'); + + ndv.actions.execute(); + ndv.getters + .outputPanel() + .findChildByTestId('ndv-search') + .should('be.visible') + .should('have.value', 'US'); + }); + + it('should not show items count when seaching in schema view', () => { + cy.createFixtureWorkflow('Test_ndv_search.json'); + workflowPage.actions.zoomToFit(); + workflowPage.actions.openNode('Edit Fields'); + ndv.getters.outputPanel().should('be.visible'); + ndv.actions.execute(); + ndv.actions.switchOutputMode('Schema'); + ndv.getters.outputPanel().find('[data-test-id=ndv-search]').click().type('US'); + ndv.getters.outputPanel().find('[data-test-id=ndv-items-count]').should('not.exist'); + }); + + it('should show additional tooltip when seaching in schema view if no matches', () => { + cy.createFixtureWorkflow('Test_ndv_search.json'); + workflowPage.actions.zoomToFit(); + workflowPage.actions.openNode('Edit Fields'); + ndv.getters.outputPanel().should('be.visible'); + ndv.actions.execute(); + ndv.actions.switchOutputMode('Schema'); + ndv.getters.outputPanel().find('[data-test-id=ndv-search]').click().type('foo'); + ndv.getters + .outputPanel() + .contains('To search field contents rather than just names, use Table or JSON view') + .should('exist'); + }); }); diff --git a/cypress/fixtures/Test_ndv_search.json b/cypress/fixtures/Test_ndv_search.json new file mode 100644 index 0000000000..996b558e5a --- /dev/null +++ b/cypress/fixtures/Test_ndv_search.json @@ -0,0 +1,135 @@ +{ + "name": "NDV search bugs (introduced by schema view?)", + "nodes": [ + { + "parameters": {}, + "id": "55635c7b-92ee-4d2d-a0c0-baff9ab071da", + "name": "When clicking ‘Test workflow’", + "type": "n8n-nodes-base.manualTrigger", + "position": [ + 800, + 380 + ], + "typeVersion": 1 + }, + { + "parameters": { + "operation": "getAllPeople" + }, + "id": "4737af43-e49b-4c92-b76f-32605c047114", + "name": "Customer Datastore (n8n training)", + "type": "n8n-nodes-base.n8nTrainingCustomerDatastore", + "typeVersion": 1, + "position": [ + 1020, + 380 + ] + }, + { + "parameters": { + "assignments": { + "assignments": [] + }, + "includeOtherFields": true, + "options": {} + }, + "id": "8cc9b374-1856-4f3f-9315-08e6e27840d8", + "name": "Edit Fields", + "type": "n8n-nodes-base.set", + "typeVersion": 3.4, + "position": [ + 1240, + 380 + ] + } + ], + "pinData": { + "Customer Datastore (n8n training)": [ + { + "json": { + "id": "23423532", + "name": "Jay Gatsby", + "email": "gatsby@west-egg.com", + "notes": "Keeps asking about a green light??", + "country": "US", + "created": "1925-04-10" + } + }, + { + "json": { + "id": "23423533", + "name": "José Arcadio Buendía", + "email": "jab@macondo.co", + "notes": "Lots of people named after him. Very confusing", + "country": "CO", + "created": "1967-05-05" + } + }, + { + "json": { + "id": "23423534", + "name": "Max Sendak", + "email": "info@in-and-out-of-weeks.org", + "notes": "Keeps rolling his terrible eyes", + "country": "US", + "created": "1963-04-09" + } + }, + { + "json": { + "id": "23423535", + "name": "Zaphod Beeblebrox", + "email": "captain@heartofgold.com", + "notes": "Felt like I was talking to more than one person", + "country": null, + "created": "1979-10-12" + } + }, + { + "json": { + "id": "23423536", + "name": "Edmund Pevensie", + "email": "edmund@narnia.gov", + "notes": "Passionate sailor", + "country": "UK", + "created": "1950-10-16" + } + } + ] + }, + "connections": { + "When clicking ‘Test workflow’": { + "main": [ + [ + { + "node": "Customer Datastore (n8n training)", + "type": "main", + "index": 0 + } + ] + ] + }, + "Customer Datastore (n8n training)": { + "main": [ + [ + { + "node": "Edit Fields", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "20178044-fb64-4443-88dd-e941517520d0", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "be251a83c052a9862eeac953816fbb1464f89dfbf79d7ac490a8e336a8cc8bfd" + }, + "id": "aBVnTRON9Y2cSmse", + "tags": [] +} \ No newline at end of file diff --git a/package.json b/package.json index 20c7ff181e..c7d4512b2e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "n8n-monorepo", - "version": "1.66.0", + "version": "1.67.0", "private": true, "engines": { "node": ">=20.15", @@ -46,6 +46,7 @@ "@types/jest": "^29.5.3", "@types/node": "*", "@types/supertest": "^6.0.2", + "cross-env": "^7.0.3", "jest": "^29.6.2", "jest-environment-jsdom": "^29.6.2", "jest-expect-message": "^1.1.3", diff --git a/packages/@n8n/chat/package.json b/packages/@n8n/chat/package.json index 5dc5881181..a6098892f0 100644 --- a/packages/@n8n/chat/package.json +++ b/packages/@n8n/chat/package.json @@ -1,11 +1,11 @@ { "name": "@n8n/chat", - "version": "0.29.0", + "version": "0.30.0", "scripts": { "dev": "pnpm run storybook", "build": "pnpm build:vite && pnpm build:bundle", - "build:vite": "vite build", - "build:bundle": "INCLUDE_VUE=true vite build", + "build:vite": "cross-env vite build", + "build:bundle": "cross-env INCLUDE_VUE=true vite build", "preview": "vite preview", "test:dev": "vitest", "test": "vitest run", diff --git a/packages/@n8n/config/package.json b/packages/@n8n/config/package.json index d1dddf3e70..4867390bf7 100644 --- a/packages/@n8n/config/package.json +++ b/packages/@n8n/config/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/config", - "version": "1.16.0", + "version": "1.17.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/config/src/configs/runners.config.ts b/packages/@n8n/config/src/configs/runners.config.ts index c7be197963..5a6969ba6f 100644 --- a/packages/@n8n/config/src/configs/runners.config.ts +++ b/packages/@n8n/config/src/configs/runners.config.ts @@ -10,9 +10,8 @@ export type TaskRunnerMode = 'internal_childprocess' | 'internal_launcher' | 'ex @Config export class TaskRunnersConfig { - // Defaults to true for now - @Env('N8N_RUNNERS_DISABLED') - disabled: boolean = true; + @Env('N8N_RUNNERS_ENABLED') + enabled: boolean = false; // Defaults to true for now @Env('N8N_RUNNERS_MODE') @@ -50,4 +49,8 @@ export class TaskRunnersConfig { /** How many concurrent tasks can a runner execute at a time */ @Env('N8N_RUNNERS_MAX_CONCURRENCY') maxConcurrency: number = 5; + + /** Should the output of deduplication be asserted for correctness */ + @Env('N8N_RUNNERS_ASSERT_DEDUPLICATION_OUTPUT') + assertDeduplicationOutput: boolean = false; } diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index bc10028f36..eeb98269de 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -222,7 +222,7 @@ describe('GlobalConfig', () => { }, }, taskRunners: { - disabled: true, + enabled: false, mode: 'internal_childprocess', path: '/runners', authToken: '', @@ -233,6 +233,7 @@ describe('GlobalConfig', () => { launcherRunner: 'javascript', maxOldSpaceSize: '', maxConcurrency: 5, + assertDeduplicationOutput: false, }, sentry: { backendDsn: '', diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts index 84d775d0f5..25e6e78984 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts @@ -206,10 +206,28 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise; + let parserInput: string; + + if (finalResponse instanceof Object) { + if ('output' in finalResponse) { + try { + // If the output is an object, we will try to parse it as JSON + // this is because parser expects stringified JSON object like { "output": { .... } } + // so we try to parse the output before wrapping it and then stringify it + parserInput = JSON.stringify({ output: jsonParse(finalResponse.output) }); + } catch (error) { + // If parsing of the output fails, we will use the raw output + parserInput = finalResponse.output; + } + } else { + // If the output is not an object, we will stringify it as it is + parserInput = JSON.stringify(finalResponse); + } + } else { + parserInput = finalResponse; + } + + const returnValues = (await outputParser.parse(parserInput)) as Record; return handleParsedStepOutput(returnValues); } return handleAgentFinishOutput(steps); diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts index d4743fb043..4f385e1770 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts @@ -1,5 +1,6 @@ import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import { NodeConnectionType } from 'n8n-workflow'; +import { PromptTemplate } from '@langchain/core/prompts'; +import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { ISupplyDataFunctions, INodeType, @@ -7,6 +8,7 @@ import type { SupplyData, } from 'n8n-workflow'; +import { NAIVE_FIX_PROMPT } from './prompt'; import { N8nOutputFixingParser, type N8nStructuredOutputParser, @@ -65,6 +67,27 @@ export class OutputParserAutofixing implements INodeType { default: '', }, getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]), + { + displayName: 'Options', + name: 'options', + type: 'collection', + placeholder: 'Add Option', + default: {}, + options: [ + { + displayName: 'Retry Prompt', + name: 'prompt', + type: 'string', + default: NAIVE_FIX_PROMPT, + typeOptions: { + rows: 10, + }, + hint: 'Should include "{error}", "{instructions}", and "{completion}" placeholders', + description: + 'Prompt template used for fixing the output. Uses placeholders: "{instructions}" for parsing rules, "{completion}" for the failed attempt, and "{error}" for the validation error message.', + }, + ], + }, ], }; @@ -77,8 +100,20 @@ export class OutputParserAutofixing implements INodeType { NodeConnectionType.AiOutputParser, itemIndex, )) as N8nStructuredOutputParser; + const prompt = this.getNodeParameter('options.prompt', itemIndex, NAIVE_FIX_PROMPT) as string; - const parser = new N8nOutputFixingParser(this, model, outputParser); + if (prompt.length === 0 || !prompt.includes('{error}')) { + throw new NodeOperationError( + this.getNode(), + 'Auto-fixing parser prompt has to contain {error} placeholder', + ); + } + const parser = new N8nOutputFixingParser( + this, + model, + outputParser, + PromptTemplate.fromTemplate(prompt), + ); return { response: parser, diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/prompt.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/prompt.ts new file mode 100644 index 0000000000..9e4431a68c --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/prompt.ts @@ -0,0 +1,16 @@ +export const NAIVE_FIX_PROMPT = `Instructions: +-------------- +{instructions} +-------------- +Completion: +-------------- +{completion} +-------------- + +Above, the Completion did not satisfy the constraints given in the Instructions. +Error: +-------------- +{error} +-------------- + +Please try again. Please only respond with an answer that satisfies the constraints laid out in the Instructions:`; diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts index 32d25d4f73..9fcae1a8fa 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts @@ -1,15 +1,19 @@ /* eslint-disable @typescript-eslint/unbound-method */ /* eslint-disable @typescript-eslint/no-unsafe-call */ import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import { OutputParserException } from '@langchain/core/output_parsers'; import type { MockProxy } from 'jest-mock-extended'; import { mock } from 'jest-mock-extended'; import { normalizeItems } from 'n8n-core'; import type { IExecuteFunctions, IWorkflowDataProxyData } from 'n8n-workflow'; -import { ApplicationError, NodeConnectionType } from 'n8n-workflow'; +import { ApplicationError, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import { N8nOutputFixingParser } from '../../../../utils/output_parsers/N8nOutputParser'; -import type { N8nStructuredOutputParser } from '../../../../utils/output_parsers/N8nOutputParser'; +import type { + N8nOutputFixingParser, + N8nStructuredOutputParser, +} from '../../../../utils/output_parsers/N8nOutputParser'; import { OutputParserAutofixing } from '../OutputParserAutofixing.node'; +import { NAIVE_FIX_PROMPT } from '../prompt'; describe('OutputParserAutofixing', () => { let outputParser: OutputParserAutofixing; @@ -34,6 +38,13 @@ describe('OutputParserAutofixing', () => { throw new ApplicationError('Unexpected connection type'); }); + thisArg.getNodeParameter.mockReset(); + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options.prompt') { + return NAIVE_FIX_PROMPT; + } + throw new ApplicationError('Not implemented'); + }); }); afterEach(() => { @@ -48,73 +59,132 @@ describe('OutputParserAutofixing', () => { }); } - it('should successfully parse valid output without needing to fix it', async () => { - const validOutput = { name: 'Alice', age: 25 }; + describe('Configuration', () => { + it('should throw error when prompt template does not contain {error} placeholder', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options.prompt') { + return 'Invalid prompt without error placeholder'; + } + throw new ApplicationError('Not implemented'); + }); - mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput); + await expect(outputParser.supplyData.call(thisArg, 0)).rejects.toThrow( + new NodeOperationError( + thisArg.getNode(), + 'Auto-fixing parser prompt has to contain {error} placeholder', + ), + ); + }); - const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { - response: N8nOutputFixingParser; - }; + it('should throw error when prompt template is empty', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options.prompt') { + return ''; + } + throw new ApplicationError('Not implemented'); + }); - // Ensure the response contains the output-fixing parser - expect(response).toBeDefined(); - expect(response).toBeInstanceOf(N8nOutputFixingParser); + await expect(outputParser.supplyData.call(thisArg, 0)).rejects.toThrow( + new NodeOperationError( + thisArg.getNode(), + 'Auto-fixing parser prompt has to contain {error} placeholder', + ), + ); + }); - const result = await response.parse('{"name": "Alice", "age": 25}'); + it('should use default prompt when none specified', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options.prompt') { + return NAIVE_FIX_PROMPT; + } + throw new ApplicationError('Not implemented'); + }); - // Validate that the parser succeeds without retry - expect(result).toEqual(validOutput); - expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1); // Only one call to parse + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; + + expect(response).toBeDefined(); + }); }); - it('should throw an error when both structured parser and fixing parser fail', async () => { - mockStructuredOutputParser.parse - .mockRejectedValueOnce(new Error('Invalid JSON')) // First attempt fails - .mockRejectedValueOnce(new Error('Fixing attempt failed')); // Second attempt fails + describe('Parsing', () => { + it('should successfully parse valid output without needing to fix it', async () => { + const validOutput = { name: 'Alice', age: 25 }; - const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { - response: N8nOutputFixingParser; - }; + mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput); - response.getRetryChain = getMockedRetryChain('{}'); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; - await expect(response.parse('Invalid JSON string')).rejects.toThrow('Fixing attempt failed'); - expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); - }); + const result = await response.parse('{"name": "Alice", "age": 25}'); - it('should reject on the first attempt and succeed on retry with the parsed content', async () => { - const validOutput = { name: 'Bob', age: 28 }; + expect(result).toEqual(validOutput); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1); + }); - mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Invalid JSON')); + it('should not retry on non-OutputParserException errors', async () => { + const error = new Error('Some other error'); + mockStructuredOutputParser.parse.mockRejectedValueOnce(error); - const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { - response: N8nOutputFixingParser; - }; + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; - response.getRetryChain = getMockedRetryChain(JSON.stringify(validOutput)); + await expect(response.parse('Invalid JSON string')).rejects.toThrow(error); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1); + }); - mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput); + it('should retry on OutputParserException and succeed', async () => { + const validOutput = { name: 'Bob', age: 28 }; - const result = await response.parse('Invalid JSON string'); + mockStructuredOutputParser.parse + .mockRejectedValueOnce(new OutputParserException('Invalid JSON')) + .mockResolvedValueOnce(validOutput); - expect(result).toEqual(validOutput); - expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); // First fails, second succeeds - }); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; - it('should handle non-JSON formatted response from fixing parser', async () => { - mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Invalid JSON')); + response.getRetryChain = getMockedRetryChain(JSON.stringify(validOutput)); - const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { - response: N8nOutputFixingParser; - }; + const result = await response.parse('Invalid JSON string'); - response.getRetryChain = getMockedRetryChain('This is not JSON'); + expect(result).toEqual(validOutput); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); + }); - mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Unexpected token')); + it('should handle failed retry attempt', async () => { + mockStructuredOutputParser.parse + .mockRejectedValueOnce(new OutputParserException('Invalid JSON')) + .mockRejectedValueOnce(new Error('Still invalid JSON')); - // Expect the structured parser to throw an error on invalid JSON from retry - await expect(response.parse('Invalid JSON string')).rejects.toThrow('Unexpected token'); - expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); // First fails, second tries and fails + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; + + response.getRetryChain = getMockedRetryChain('Still not valid JSON'); + + await expect(response.parse('Invalid JSON string')).rejects.toThrow('Still invalid JSON'); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); + }); + + it('should throw non-OutputParserException errors immediately without retry', async () => { + const customError = new Error('Database connection error'); + const retryChainSpy = jest.fn(); + + mockStructuredOutputParser.parse.mockRejectedValueOnce(customError); + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; + + response.getRetryChain = retryChainSpy; + + await expect(response.parse('Some input')).rejects.toThrow('Database connection error'); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1); + expect(retryChainSpy).not.toHaveBeenCalled(); + }); }); }); diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index 5910094890..38acd88c9a 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/n8n-nodes-langchain", - "version": "1.66.0", + "version": "1.67.0", "description": "", "main": "index.js", "scripts": { diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts index eec3b0c187..de07bfc7cf 100644 --- a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts @@ -1,12 +1,12 @@ import type { Callbacks } from '@langchain/core/callbacks/manager'; import type { BaseLanguageModel } from '@langchain/core/language_models/base'; import type { AIMessage } from '@langchain/core/messages'; -import { BaseOutputParser } from '@langchain/core/output_parsers'; +import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers'; +import type { PromptTemplate } from '@langchain/core/prompts'; import type { ISupplyDataFunctions } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser'; -import { NAIVE_FIX_PROMPT } from './prompt'; import { logAiEvent } from '../helpers'; export class N8nOutputFixingParser extends BaseOutputParser { @@ -16,12 +16,13 @@ export class N8nOutputFixingParser extends BaseOutputParser { private context: ISupplyDataFunctions, private model: BaseLanguageModel, private outputParser: N8nStructuredOutputParser, + private fixPromptTemplate: PromptTemplate, ) { super(); } getRetryChain() { - return NAIVE_FIX_PROMPT.pipe(this.model); + return this.fixPromptTemplate.pipe(this.model); } /** @@ -47,11 +48,14 @@ export class N8nOutputFixingParser extends BaseOutputParser { return response; } catch (error) { + if (!(error instanceof OutputParserException)) { + throw error; + } try { // Second attempt: use retry chain to fix the output const result = (await this.getRetryChain().invoke({ completion, - error, + error: error.message, instructions: this.getFormatInstructions(), })) as AIMessage; diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json index 4b5478a97d..8350667099 100644 --- a/packages/@n8n/task-runner/package.json +++ b/packages/@n8n/task-runner/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/task-runner", - "version": "1.4.0", + "version": "1.5.0", "scripts": { "clean": "rimraf dist .turbo", "start": "node dist/start.js", diff --git a/packages/@n8n/task-runner/src/__tests__/node-types.test.ts b/packages/@n8n/task-runner/src/__tests__/node-types.test.ts index c102c80df3..c535bb0147 100644 --- a/packages/@n8n/task-runner/src/__tests__/node-types.test.ts +++ b/packages/@n8n/task-runner/src/__tests__/node-types.test.ts @@ -63,4 +63,35 @@ describe('TaskRunnerNodeTypes', () => { }); }); }); + + describe('addNodeTypeDescriptions', () => { + it('should add new node types', () => { + const nodeTypes = new TaskRunnerNodeTypes(TYPES); + + const nodeTypeDescriptions = [ + { name: 'new-type', version: 1 }, + { name: 'new-type', version: 2 }, + ] as INodeTypeDescription[]; + + nodeTypes.addNodeTypeDescriptions(nodeTypeDescriptions); + + expect(nodeTypes.getByNameAndVersion('new-type', 1)).toEqual({ + description: { name: 'new-type', version: 1 }, + }); + expect(nodeTypes.getByNameAndVersion('new-type', 2)).toEqual({ + description: { name: 'new-type', version: 2 }, + }); + }); + }); + + describe('onlyUnknown', () => { + it('should return only unknown node types', () => { + const nodeTypes = new TaskRunnerNodeTypes(TYPES); + + const candidate = { name: 'unknown', version: 1 }; + + expect(nodeTypes.onlyUnknown([candidate])).toEqual([candidate]); + expect(nodeTypes.onlyUnknown([SINGLE_VERSIONED])).toEqual([]); + }); + }); }); diff --git a/packages/@n8n/task-runner/src/data-request/data-request-response-reconstruct.ts b/packages/@n8n/task-runner/src/data-request/data-request-response-reconstruct.ts new file mode 100644 index 0000000000..83a291a491 --- /dev/null +++ b/packages/@n8n/task-runner/src/data-request/data-request-response-reconstruct.ts @@ -0,0 +1,29 @@ +import type { IExecuteData, INodeExecutionData } from 'n8n-workflow'; + +import type { DataRequestResponse } from '@/runner-types'; + +/** + * Reconstructs data from a DataRequestResponse to the initial + * data structures. + */ +export class DataRequestResponseReconstruct { + /** + * Reconstructs `connectionInputData` from a DataRequestResponse + */ + reconstructConnectionInputData( + inputData: DataRequestResponse['inputData'], + ): INodeExecutionData[] { + return inputData?.main?.[0] ?? []; + } + + /** + * Reconstruct `executeData` from a DataRequestResponse + */ + reconstructExecuteData(response: DataRequestResponse): IExecuteData { + return { + data: response.inputData, + node: response.node, + source: response.connectionInputSource, + }; + } +} diff --git a/packages/@n8n/task-runner/src/index.ts b/packages/@n8n/task-runner/src/index.ts index bc770ea08e..5fcc6e078b 100644 --- a/packages/@n8n/task-runner/src/index.ts +++ b/packages/@n8n/task-runner/src/index.ts @@ -1,3 +1,4 @@ export * from './task-runner'; export * from './runner-types'; export * from './message-types'; +export * from './data-request/data-request-response-reconstruct'; diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts index cd0863b13e..621a9c81a7 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts @@ -3,15 +3,21 @@ import type { CodeExecutionMode, IDataObject } from 'n8n-workflow'; import fs from 'node:fs'; import { builtinModules } from 'node:module'; +import type { JsRunnerConfig } from '@/config/js-runner-config'; +import { MainConfig } from '@/config/main-config'; +import { ExecutionError } from '@/js-task-runner/errors/execution-error'; import { ValidationError } from '@/js-task-runner/errors/validation-error'; -import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner'; +import type { JSExecSettings } from '@/js-task-runner/js-task-runner'; import { JsTaskRunner } from '@/js-task-runner/js-task-runner'; +import type { DataRequestResponse } from '@/runner-types'; import type { Task } from '@/task-runner'; -import { newCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data'; -import type { JsRunnerConfig } from '../../config/js-runner-config'; -import { MainConfig } from '../../config/main-config'; -import { ExecutionError } from '../errors/execution-error'; +import { + newDataRequestResponse, + newTaskWithSettings, + withPairedItem, + wrapIntoJson, +} from './test-data'; jest.mock('ws'); @@ -68,7 +74,7 @@ describe('JsTaskRunner', () => { nodeMode: 'runOnceForAllItems', ...settings, }), - taskData: newCodeTaskData(inputItems.map(wrapIntoJson)), + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson)), runner, }); }; @@ -91,7 +97,7 @@ describe('JsTaskRunner', () => { nodeMode: 'runOnceForEachItem', ...settings, }), - taskData: newCodeTaskData(inputItems.map(wrapIntoJson)), + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson)), runner, }); }; @@ -108,7 +114,7 @@ describe('JsTaskRunner', () => { await execTaskWithParams({ task, - taskData: newCodeTaskData([wrapIntoJson({})]), + taskData: newDataRequestResponse([wrapIntoJson({})]), }); expect(defaultTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [ @@ -243,7 +249,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.VAR1 }', nodeMode: 'runOnceForAllItems', }), - taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: false, isProcessAvailable: true, @@ -262,7 +268,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.VAR1 }', nodeMode: 'runOnceForAllItems', }), - taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: true, isProcessAvailable: true, @@ -279,7 +285,7 @@ describe('JsTaskRunner', () => { code: 'return Object.values($env).concat(Object.keys($env))', nodeMode: 'runOnceForAllItems', }), - taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { envProviderState: { isEnvAccessBlocked: false, isProcessAvailable: true, @@ -298,7 +304,7 @@ describe('JsTaskRunner', () => { code: 'return { val: $env.N8N_RUNNERS_N8N_URI }', nodeMode: 'runOnceForAllItems', }), - taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -313,7 +319,7 @@ describe('JsTaskRunner', () => { code: 'return { val: Buffer.from("test-buffer").toString() }', nodeMode: 'runOnceForAllItems', }), - taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -325,7 +331,7 @@ describe('JsTaskRunner', () => { code: 'return { val: Buffer.from("test-buffer").toString() }', nodeMode: 'runOnceForEachItem', }), - taskData: newCodeTaskData(inputItems.map(wrapIntoJson), { + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { envProviderState: undefined, }), }); @@ -771,7 +777,7 @@ describe('JsTaskRunner', () => { code: 'unknown', nodeMode, }), - taskData: newCodeTaskData([wrapIntoJson({ a: 1 })]), + taskData: newDataRequestResponse([wrapIntoJson({ a: 1 })]), }), ).rejects.toThrow(ExecutionError); }, @@ -793,7 +799,7 @@ describe('JsTaskRunner', () => { jest.spyOn(runner, 'sendOffers').mockImplementation(() => {}); jest .spyOn(runner, 'requestData') - .mockResolvedValue(newCodeTaskData([wrapIntoJson({ a: 1 })])); + .mockResolvedValue(newDataRequestResponse([wrapIntoJson({ a: 1 })])); await runner.receivedSettings(taskId, task.settings); diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts index 6de3e6d2b1..224f630807 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts @@ -2,7 +2,8 @@ import type { IDataObject, INode, INodeExecutionData, ITaskData } from 'n8n-work import { NodeConnectionType } from 'n8n-workflow'; import { nanoid } from 'nanoid'; -import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner'; +import type { JSExecSettings } from '@/js-task-runner/js-task-runner'; +import type { DataRequestResponse } from '@/runner-types'; import type { Task } from '@/task-runner'; /** @@ -46,10 +47,10 @@ export const newTaskData = (opts: Partial & Pick }); /** - * Creates a new all code task data with the given options + * Creates a new data request response with the given options */ -export const newCodeTaskData = ( - codeNodeInputData: INodeExecutionData[], +export const newDataRequestResponse = ( + inputData: INodeExecutionData[], opts: Partial = {}, ): DataRequestResponse => { const codeNode = newNode({ @@ -83,9 +84,8 @@ export const newCodeTaskData = ( nodes: [manualTriggerNode, codeNode], }, inputData: { - main: [codeNodeInputData], + main: [inputData], }, - connectionInputData: codeNodeInputData, node: codeNode, runExecutionData: { startData: {}, @@ -95,7 +95,7 @@ export const newCodeTaskData = ( newTaskData({ source: [], data: { - main: [codeNodeInputData], + main: [inputData], }, }), ], @@ -137,14 +137,13 @@ export const newCodeTaskData = ( var: 'value', }, }, - executeData: { - node: codeNode, - data: { - main: [codeNodeInputData], - }, - source: { - main: [{ previousNode: manualTriggerNode.name }], - }, + connectionInputSource: { + main: [ + { + previousNode: 'Trigger', + previousNodeOutput: 0, + }, + ], }, ...opts, }; diff --git a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts index 399d9e6e2b..366a9188de 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/built-ins-parser/__tests__/built-ins-parser.test.ts @@ -1,8 +1,13 @@ import { getAdditionalKeys } from 'n8n-core'; -import type { IDataObject, INodeType, IWorkflowExecuteAdditionalData } from 'n8n-workflow'; +import type { + IDataObject, + IExecuteData, + INodeType, + IWorkflowExecuteAdditionalData, +} from 'n8n-workflow'; import { Workflow, WorkflowDataProxy } from 'n8n-workflow'; -import { newCodeTaskData } from '../../__tests__/test-data'; +import { newDataRequestResponse } from '../../__tests__/test-data'; import { BuiltInsParser } from '../built-ins-parser'; import { BuiltInsParserState } from '../built-ins-parser-state'; @@ -159,7 +164,12 @@ describe('BuiltInsParser', () => { describe('WorkflowDataProxy built-ins', () => { it('should have a known list of built-ins', () => { - const data = newCodeTaskData([]); + const data = newDataRequestResponse([]); + const executeData: IExecuteData = { + data: {}, + node: data.node, + source: data.connectionInputSource, + }; const dataProxy = new WorkflowDataProxy( new Workflow({ ...data.workflow, @@ -179,7 +189,7 @@ describe('BuiltInsParser', () => { data.runIndex, 0, data.activeNodeName, - data.connectionInputData, + [], data.siblingParameters, data.mode, getAdditionalKeys( @@ -187,7 +197,7 @@ describe('BuiltInsParser', () => { data.mode, data.runExecutionData, ), - data.executeData, + executeData, data.defaultReturnRunIndex, data.selfData, data.contextNodeName, diff --git a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts index e0bebe0521..c64d58636b 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts @@ -1,27 +1,25 @@ import { getAdditionalKeys } from 'n8n-core'; -import { - WorkflowDataProxy, - // type IWorkflowDataProxyAdditionalKeys, - Workflow, -} from 'n8n-workflow'; +import { WorkflowDataProxy, Workflow } from 'n8n-workflow'; import type { CodeExecutionMode, - INode, - ITaskDataConnections, IWorkflowExecuteAdditionalData, - WorkflowParameters, IDataObject, - IExecuteData, INodeExecutionData, INodeParameters, - IRunExecutionData, WorkflowExecuteMode, + WorkflowParameters, + ITaskDataConnections, + INode, + IRunExecutionData, EnvProviderState, + IExecuteData, + INodeTypeDescription, } from 'n8n-workflow'; import * as a from 'node:assert'; import { runInNewContext, type Context } from 'node:vm'; -import type { TaskResultData } from '@/runner-types'; +import type { MainConfig } from '@/config/main-config'; +import type { DataRequestResponse, PartialAdditionalData, TaskResultData } from '@/runner-types'; import { type Task, TaskRunner } from '@/task-runner'; import { BuiltInsParser } from './built-ins-parser/built-ins-parser'; @@ -32,7 +30,7 @@ import { makeSerializable } from './errors/serializable-error'; import type { RequireResolver } from './require-resolver'; import { createRequireResolver } from './require-resolver'; import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation'; -import type { MainConfig } from '../config/main-config'; +import { DataRequestResponseReconstruct } from '../data-request/data-request-response-reconstruct'; export interface JSExecSettings { code: string; @@ -44,34 +42,19 @@ export interface JSExecSettings { mode: WorkflowExecuteMode; } -export interface PartialAdditionalData { - executionId?: string; - restartExecutionId?: string; - restApiUrl: string; - instanceBaseUrl: string; - formWaitingBaseUrl: string; - webhookBaseUrl: string; - webhookWaitingBaseUrl: string; - webhookTestBaseUrl: string; - currentNodeParameters?: INodeParameters; - executionTimeoutTimestamp?: number; - userId?: string; - variables: IDataObject; -} - -export interface DataRequestResponse { +export interface JsTaskData { workflow: Omit; inputData: ITaskDataConnections; + connectionInputData: INodeExecutionData[]; node: INode; runExecutionData: IRunExecutionData; runIndex: number; itemIndex: number; activeNodeName: string; - connectionInputData: INodeExecutionData[]; siblingParameters: INodeParameters; mode: WorkflowExecuteMode; - envProviderState?: EnvProviderState; + envProviderState: EnvProviderState; executeData?: IExecuteData; defaultReturnRunIndex: number; selfData: IDataObject; @@ -88,6 +71,8 @@ export class JsTaskRunner extends TaskRunner { private readonly builtInsParser = new BuiltInsParser(); + private readonly taskDataReconstruct = new DataRequestResponseReconstruct(); + constructor(config: MainConfig, name = 'JS Task Runner') { super({ taskType: 'javascript', @@ -114,11 +99,15 @@ export class JsTaskRunner extends TaskRunner { ? neededBuiltInsResult.result : BuiltInsParserState.newNeedsAllDataState(); - const data = await this.requestData( + const dataResponse = await this.requestData( task.taskId, neededBuiltIns.toDataRequestParams(), ); + const data = this.reconstructTaskData(dataResponse); + + await this.requestNodeTypeIfNeeded(neededBuiltIns, data.workflow, task.taskId); + const workflowParams = data.workflow; const workflow = new Workflow({ ...workflowParams, @@ -177,7 +166,7 @@ export class JsTaskRunner extends TaskRunner { private async runForAllItems( taskId: string, settings: JSExecSettings, - data: DataRequestResponse, + data: JsTaskData, workflow: Workflow, customConsole: CustomConsole, ): Promise { @@ -224,7 +213,7 @@ export class JsTaskRunner extends TaskRunner { private async runForEachItem( taskId: string, settings: JSExecSettings, - data: DataRequestResponse, + data: JsTaskData, workflow: Workflow, customConsole: CustomConsole, ): Promise { @@ -291,7 +280,7 @@ export class JsTaskRunner extends TaskRunner { return returnData; } - private createDataProxy(data: DataRequestResponse, workflow: Workflow, itemIndex: number) { + private createDataProxy(data: JsTaskData, workflow: Workflow, itemIndex: number) { return new WorkflowDataProxy( workflow, data.runExecutionData, @@ -335,4 +324,43 @@ export class JsTaskRunner extends TaskRunner { return new ExecutionError({ message: JSON.stringify(error) }); } + + private reconstructTaskData(response: DataRequestResponse): JsTaskData { + return { + ...response, + connectionInputData: this.taskDataReconstruct.reconstructConnectionInputData( + response.inputData, + ), + executeData: this.taskDataReconstruct.reconstructExecuteData(response), + }; + } + + private async requestNodeTypeIfNeeded( + neededBuiltIns: BuiltInsParserState, + workflow: JsTaskData['workflow'], + taskId: string, + ) { + /** + * We request node types only when we know a task needs all nodes, because + * needing all nodes means that the task relies on paired item functionality, + * which is the same requirement for needing node types. + */ + if (neededBuiltIns.needsAllNodes) { + const uniqueNodeTypes = new Map( + workflow.nodes.map((node) => [ + `${node.type}|${node.typeVersion}`, + { name: node.type, version: node.typeVersion }, + ]), + ); + + const unknownNodeTypes = this.nodeTypes.onlyUnknown([...uniqueNodeTypes.values()]); + + const nodeTypes = await this.requestNodeTypes( + taskId, + unknownNodeTypes, + ); + + this.nodeTypes.addNodeTypeDescriptions(nodeTypes); + } + } } diff --git a/packages/@n8n/task-runner/src/message-types.ts b/packages/@n8n/task-runner/src/message-types.ts index 95c5f5b72f..b5f17f965e 100644 --- a/packages/@n8n/task-runner/src/message-types.ts +++ b/packages/@n8n/task-runner/src/message-types.ts @@ -1,6 +1,11 @@ import type { INodeTypeBaseDescription } from 'n8n-workflow'; -import type { RPC_ALLOW_LIST, TaskDataRequestParams, TaskResultData } from './runner-types'; +import type { + NeededNodeType, + RPC_ALLOW_LIST, + TaskDataRequestParams, + TaskResultData, +} from './runner-types'; export namespace BrokerMessage { export namespace ToRunner { @@ -47,6 +52,8 @@ export namespace BrokerMessage { export interface NodeTypes { type: 'broker:nodetypes'; + taskId: string; + requestId: string; nodeTypes: INodeTypeBaseDescription[]; } @@ -87,6 +94,13 @@ export namespace BrokerMessage { requestParams: TaskDataRequestParams; } + export interface NodeTypesRequest { + type: 'broker:nodetypesrequest'; + taskId: string; + requestId: string; + requestParams: NeededNodeType[]; + } + export interface RPC { type: 'broker:rpc'; callId: string; @@ -95,7 +109,7 @@ export namespace BrokerMessage { params: unknown[]; } - export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC; + export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | NodeTypesRequest | RPC; } } @@ -120,6 +134,13 @@ export namespace RequesterMessage { data: unknown; } + export interface NodeTypesResponse { + type: 'requester:nodetypesresponse'; + taskId: string; + requestId: string; + nodeTypes: INodeTypeBaseDescription[]; + } + export interface RPCResponse { type: 'requester:rpcresponse'; taskId: string; @@ -134,7 +155,13 @@ export namespace RequesterMessage { taskType: string; } - export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest; + export type All = + | TaskSettings + | TaskCancel + | RPCResponse + | TaskDataResponse + | NodeTypesResponse + | TaskRequest; } } @@ -183,6 +210,25 @@ export namespace RunnerMessage { requestParams: TaskDataRequestParams; } + export interface NodeTypesRequest { + type: 'runner:nodetypesrequest'; + taskId: string; + requestId: string; + + /** + * Which node types should be included in the runner's node types request. + * + * Node types are needed only when the script relies on paired item functionality. + * If so, we need only the node types not already cached in the runner. + * + * TODO: In future we can trim this down to only node types in the paired item chain, + * rather than assuming we need all node types in the workflow. + * + * @example [{ name: 'n8n-nodes-base.httpRequest', version: 1 }] + */ + requestParams: NeededNodeType[]; + } + export interface RPC { type: 'runner:rpc'; callId: string; @@ -199,6 +245,7 @@ export namespace RunnerMessage { | TaskRejected | TaskOffer | RPC - | TaskDataRequest; + | TaskDataRequest + | NodeTypesRequest; } } diff --git a/packages/@n8n/task-runner/src/node-types.ts b/packages/@n8n/task-runner/src/node-types.ts index 046321bff9..8f910134b5 100644 --- a/packages/@n8n/task-runner/src/node-types.ts +++ b/packages/@n8n/task-runner/src/node-types.ts @@ -7,6 +7,8 @@ import { type IVersionedNodeType, } from 'n8n-workflow'; +import type { NeededNodeType } from './runner-types'; + type VersionedTypes = Map; export const DEFAULT_NODETYPE_VERSION = 1; @@ -61,4 +63,30 @@ export class TaskRunnerNodeTypes implements INodeTypes { getKnownTypes(): IDataObject { throw new ApplicationError('Unimplemented `getKnownTypes`', { level: 'error' }); } + + addNodeTypeDescriptions(nodeTypeDescriptions: INodeTypeDescription[]) { + const newNodeTypes = this.parseNodeTypes(nodeTypeDescriptions); + + for (const [name, newVersions] of newNodeTypes.entries()) { + if (!this.nodeTypesByVersion.has(name)) { + this.nodeTypesByVersion.set(name, newVersions); + } else { + const existingVersions = this.nodeTypesByVersion.get(name)!; + for (const [version, nodeType] of newVersions.entries()) { + existingVersions.set(version, nodeType); + } + } + } + } + + /** Filter out node type versions that are already registered. */ + onlyUnknown(nodeTypes: NeededNodeType[]) { + return nodeTypes.filter(({ name, version }) => { + const existingVersions = this.nodeTypesByVersion.get(name); + + if (!existingVersions) return true; + + return !existingVersions.has(version); + }); + } } diff --git a/packages/@n8n/task-runner/src/runner-types.ts b/packages/@n8n/task-runner/src/runner-types.ts index c55b50bf4c..4649c2cc2f 100644 --- a/packages/@n8n/task-runner/src/runner-types.ts +++ b/packages/@n8n/task-runner/src/runner-types.ts @@ -8,6 +8,7 @@ import type { INodeParameters, IRunExecutionData, ITaskDataConnections, + ITaskDataConnectionsSource, IWorkflowExecuteAdditionalData, Workflow, WorkflowExecuteMode, @@ -29,17 +30,16 @@ export interface TaskDataRequestParams { export interface DataRequestResponse { workflow: Omit; inputData: ITaskDataConnections; + connectionInputSource: ITaskDataConnectionsSource | null; node: INode; runExecutionData: IRunExecutionData; runIndex: number; itemIndex: number; activeNodeName: string; - connectionInputData: INodeExecutionData[]; siblingParameters: INodeParameters; mode: WorkflowExecuteMode; envProviderState: EnvProviderState; - executeData?: IExecuteData; defaultReturnRunIndex: number; selfData: IDataObject; contextNodeName: string; @@ -112,3 +112,6 @@ export const RPC_ALLOW_LIST = [ 'helpers.httpRequest', 'logNodeOutput', ] as const; + +/** Node types needed for the runner to execute a task. */ +export type NeededNodeType = { name: string; version: number }; diff --git a/packages/@n8n/task-runner/src/task-runner.ts b/packages/@n8n/task-runner/src/task-runner.ts index 81de93e6b0..4bd8661daa 100644 --- a/packages/@n8n/task-runner/src/task-runner.ts +++ b/packages/@n8n/task-runner/src/task-runner.ts @@ -1,4 +1,4 @@ -import { ApplicationError, type INodeTypeDescription } from 'n8n-workflow'; +import { ApplicationError } from 'n8n-workflow'; import { nanoid } from 'nanoid'; import { type MessageEvent, WebSocket } from 'ws'; @@ -25,6 +25,12 @@ interface DataRequest { reject: (error: unknown) => void; } +interface NodeTypesRequest { + requestId: string; + resolve: (data: unknown) => void; + reject: (error: unknown) => void; +} + interface RPCCall { callId: string; resolve: (data: unknown) => void; @@ -58,6 +64,8 @@ export abstract class TaskRunner { dataRequests: Map = new Map(); + nodeTypesRequests: Map = new Map(); + rpcCalls: Map = new Map(); nodeTypes: TaskRunnerNodeTypes = new TaskRunnerNodeTypes([]); @@ -168,15 +176,11 @@ export abstract class TaskRunner { this.handleRpcResponse(message.callId, message.status, message.data); break; case 'broker:nodetypes': - this.setNodeTypes(message.nodeTypes as unknown as INodeTypeDescription[]); + this.processNodeTypesResponse(message.requestId, message.nodeTypes); break; } } - setNodeTypes(nodeTypes: INodeTypeDescription[]) { - this.nodeTypes = new TaskRunnerNodeTypes(nodeTypes); - } - processDataResponse(requestId: string, data: unknown) { const request = this.dataRequests.get(requestId); if (!request) { @@ -187,6 +191,16 @@ export abstract class TaskRunner { request.resolve(data); } + processNodeTypesResponse(requestId: string, nodeTypes: unknown) { + const request = this.nodeTypesRequests.get(requestId); + + if (!request) return; + + // Deleting of the request is handled in `requestNodeTypes`, using a + // `finally` wrapped around the return + request.resolve(nodeTypes); + } + hasOpenTasks() { return Object.values(this.runningTasks).length < this.maxConcurrency; } @@ -282,6 +296,34 @@ export abstract class TaskRunner { throw new ApplicationError('Unimplemented'); } + async requestNodeTypes( + taskId: Task['taskId'], + requestParams: RunnerMessage.ToBroker.NodeTypesRequest['requestParams'], + ) { + const requestId = nanoid(); + + const nodeTypesPromise = new Promise((resolve, reject) => { + this.nodeTypesRequests.set(requestId, { + requestId, + resolve: resolve as (data: unknown) => void, + reject, + }); + }); + + this.send({ + type: 'runner:nodetypesrequest', + taskId, + requestId, + requestParams, + }); + + try { + return await nodeTypesPromise; + } finally { + this.nodeTypesRequests.delete(requestId); + } + } + async requestData( taskId: Task['taskId'], requestParams: RunnerMessage.ToBroker.TaskDataRequest['requestParams'], diff --git a/packages/cli/package.json b/packages/cli/package.json index 0404b8c116..5c589116ee 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "1.66.0", + "version": "1.67.0", "description": "n8n Workflow Automation Tool", "main": "dist/index", "types": "dist/index.d.ts", diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index bb8b56d32b..42b5df13e6 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -221,7 +221,7 @@ export class Start extends BaseCommand { } const { taskRunners: taskRunnerConfig } = this.globalConfig; - if (!taskRunnerConfig.disabled) { + if (taskRunnerConfig.enabled) { const { TaskRunnerModule } = await import('@/runners/task-runner-module'); const taskRunnerModule = Container.get(TaskRunnerModule); await taskRunnerModule.start(); diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index 730c6f6e80..0291a9e416 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -113,7 +113,7 @@ export class Worker extends BaseCommand { ); const { taskRunners: taskRunnerConfig } = this.globalConfig; - if (!taskRunnerConfig.disabled) { + if (taskRunnerConfig.enabled) { const { TaskRunnerModule } = await import('@/runners/task-runner-module'); const taskRunnerModule = Container.get(TaskRunnerModule); await taskRunnerModule.start(); diff --git a/packages/cli/src/constants.ts b/packages/cli/src/constants.ts index 04512e8be9..be26616fb6 100644 --- a/packages/cli/src/constants.ts +++ b/packages/cli/src/constants.ts @@ -127,6 +127,9 @@ export const TIME = { * Eventually this will superseed `TIME` above */ export const Time = { + milliseconds: { + toMinutes: 1 / (60 * 1000), + }, seconds: { toMilliseconds: 1000, }, diff --git a/packages/cli/src/databases/dsl/table.ts b/packages/cli/src/databases/dsl/table.ts index 78ed98272c..f598b674d7 100644 --- a/packages/cli/src/databases/dsl/table.ts +++ b/packages/cli/src/databases/dsl/table.ts @@ -50,8 +50,8 @@ export class CreateTable extends TableOperation { ref: { tableName: string; columnName: string; - onDelete?: 'CASCADE'; - onUpdate?: 'CASCADE'; + onDelete?: 'RESTRICT' | 'CASCADE' | 'NO ACTION' | 'SET NULL'; + onUpdate?: 'RESTRICT' | 'CASCADE' | 'NO ACTION' | 'SET NULL'; name?: string; }, ) { diff --git a/packages/cli/src/databases/entities/index.ts b/packages/cli/src/databases/entities/index.ts index 39f67b3252..b73b348d8a 100644 --- a/packages/cli/src/databases/entities/index.ts +++ b/packages/cli/src/databases/entities/index.ts @@ -20,6 +20,7 @@ import { Settings } from './settings'; import { SharedCredentials } from './shared-credentials'; import { SharedWorkflow } from './shared-workflow'; import { TagEntity } from './tag-entity'; +import { TestDefinition } from './test-definition'; import { User } from './user'; import { Variables } from './variables'; import { WebhookEntity } from './webhook-entity'; @@ -58,4 +59,5 @@ export const entities = { ProjectRelation, ApiKey, ProcessedData, + TestDefinition, }; diff --git a/packages/cli/src/databases/entities/test-definition.ts b/packages/cli/src/databases/entities/test-definition.ts new file mode 100644 index 0000000000..5395bd0c4c --- /dev/null +++ b/packages/cli/src/databases/entities/test-definition.ts @@ -0,0 +1,61 @@ +import { + Column, + Entity, + Generated, + Index, + ManyToOne, + OneToOne, + PrimaryColumn, + RelationId, +} from '@n8n/typeorm'; +import { Length } from 'class-validator'; + +import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; +import { WorkflowEntity } from '@/databases/entities/workflow-entity'; + +import { WithTimestamps } from './abstract-entity'; + +/** + * Entity representing a Test Definition + * It combines: + * - the workflow under test + * - the workflow used to evaluate the results of test execution + * - the filter used to select test cases from previous executions of the workflow under test - annotation tag + */ +@Entity() +@Index(['workflow']) +@Index(['evaluationWorkflow']) +export class TestDefinition extends WithTimestamps { + @Generated() + @PrimaryColumn() + id: number; + + @Column({ length: 255 }) + @Length(1, 255, { message: 'Test name must be $constraint1 to $constraint2 characters long.' }) + name: string; + + /** + * Relation to the workflow under test + */ + @ManyToOne('WorkflowEntity', 'tests') + workflow: WorkflowEntity; + + @RelationId((test: TestDefinition) => test.workflow) + workflowId: string; + + /** + * Relation to the workflow used to evaluate the results of test execution + */ + @ManyToOne('WorkflowEntity', 'evaluationTests') + evaluationWorkflow: WorkflowEntity; + + @RelationId((test: TestDefinition) => test.evaluationWorkflow) + evaluationWorkflowId: string; + + /** + * Relation to the annotation tag associated with the test + * This tag will be used to select the test cases to run from previous executions + */ + @OneToOne('AnnotationTagEntity', 'test') + annotationTag: AnnotationTagEntity; +} diff --git a/packages/cli/src/databases/migrations/common/1730386903556-CreateTestDefinitionTable.ts b/packages/cli/src/databases/migrations/common/1730386903556-CreateTestDefinitionTable.ts new file mode 100644 index 0000000000..d71353ee73 --- /dev/null +++ b/packages/cli/src/databases/migrations/common/1730386903556-CreateTestDefinitionTable.ts @@ -0,0 +1,37 @@ +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; + +const testEntityTableName = 'test_definition'; + +export class CreateTestDefinitionTable1730386903556 implements ReversibleMigration { + async up({ schemaBuilder: { createTable, column } }: MigrationContext) { + await createTable(testEntityTableName) + .withColumns( + column('id').int.notNull.primary.autoGenerate, + column('name').varchar(255).notNull, + column('workflowId').varchar(36).notNull, + column('evaluationWorkflowId').varchar(36), + column('annotationTagId').varchar(16), + ) + .withIndexOn('workflowId') + .withIndexOn('evaluationWorkflowId') + .withForeignKey('workflowId', { + tableName: 'workflow_entity', + columnName: 'id', + onDelete: 'CASCADE', + }) + .withForeignKey('evaluationWorkflowId', { + tableName: 'workflow_entity', + columnName: 'id', + onDelete: 'SET NULL', + }) + .withForeignKey('annotationTagId', { + tableName: 'annotation_tag_entity', + columnName: 'id', + onDelete: 'SET NULL', + }).withTimestamps; + } + + async down({ schemaBuilder: { dropTable } }: MigrationContext) { + await dropTable(testEntityTableName); + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/index.ts b/packages/cli/src/databases/migrations/mysqldb/index.ts index ff40fd9dc0..ebe7cf76c0 100644 --- a/packages/cli/src/databases/migrations/mysqldb/index.ts +++ b/packages/cli/src/databases/migrations/mysqldb/index.ts @@ -68,6 +68,7 @@ import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-C import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping'; import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText'; +import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable'; export const mysqlMigrations: Migration[] = [ InitialMigration1588157391238, @@ -138,4 +139,5 @@ export const mysqlMigrations: Migration[] = [ CreateProcessedDataTable1726606152711, AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644, UpdateProcessedDataValueColumnToText1729607673464, + CreateTestDefinitionTable1730386903556, ]; diff --git a/packages/cli/src/databases/migrations/postgresdb/index.ts b/packages/cli/src/databases/migrations/postgresdb/index.ts index f3ac7e0474..731ddc2680 100644 --- a/packages/cli/src/databases/migrations/postgresdb/index.ts +++ b/packages/cli/src/databases/migrations/postgresdb/index.ts @@ -68,6 +68,7 @@ import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-C import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping'; import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText'; +import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable'; export const postgresMigrations: Migration[] = [ InitialMigration1587669153312, @@ -138,4 +139,5 @@ export const postgresMigrations: Migration[] = [ CreateProcessedDataTable1726606152711, AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644, UpdateProcessedDataValueColumnToText1729607673464, + CreateTestDefinitionTable1730386903556, ]; diff --git a/packages/cli/src/databases/migrations/sqlite/index.ts b/packages/cli/src/databases/migrations/sqlite/index.ts index e53b5f43bd..8004894ccf 100644 --- a/packages/cli/src/databases/migrations/sqlite/index.ts +++ b/packages/cli/src/databases/migrations/sqlite/index.ts @@ -65,6 +65,7 @@ import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-Cre import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText'; +import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable'; const sqliteMigrations: Migration[] = [ InitialMigration1588102412422, @@ -132,6 +133,7 @@ const sqliteMigrations: Migration[] = [ CreateProcessedDataTable1726606152711, AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644, UpdateProcessedDataValueColumnToText1729607673464, + CreateTestDefinitionTable1730386903556, ]; export { sqliteMigrations }; diff --git a/packages/cli/src/executions/__tests__/parse-range-query.middleware.test.ts b/packages/cli/src/executions/__tests__/parse-range-query.middleware.test.ts index 8b3395b226..3c8fa93053 100644 --- a/packages/cli/src/executions/__tests__/parse-range-query.middleware.test.ts +++ b/packages/cli/src/executions/__tests__/parse-range-query.middleware.test.ts @@ -108,6 +108,22 @@ describe('`parseRangeQuery` middleware', () => { expect(nextFn).toBeCalledTimes(1); }); + test('should parse `projectId` field', () => { + const req = mock({ + query: { + filter: '{ "projectId": "123" }', + limit: undefined, + firstId: undefined, + lastId: undefined, + }, + }); + + parseRangeQuery(req, res, nextFn); + + expect(req.rangeQuery.projectId).toBe('123'); + expect(nextFn).toBeCalledTimes(1); + }); + test('should delete invalid fields', () => { const req = mock({ query: { diff --git a/packages/cli/src/executions/execution.service.ts b/packages/cli/src/executions/execution.service.ts index 5f4ec0c535..60dadfdc1b 100644 --- a/packages/cli/src/executions/execution.service.ts +++ b/packages/cli/src/executions/execution.service.ts @@ -66,6 +66,7 @@ export const schemaGetExecutionsQueryFilter = { startedBefore: { type: 'date-time' }, annotationTags: { type: 'array', items: { type: 'string' } }, vote: { type: 'string' }, + projectId: { type: 'string' }, }, $defs: { metadata: { diff --git a/packages/cli/src/generic-helpers.ts b/packages/cli/src/generic-helpers.ts index e5978bb34a..378619a4e9 100644 --- a/packages/cli/src/generic-helpers.ts +++ b/packages/cli/src/generic-helpers.ts @@ -3,6 +3,7 @@ import { validate } from 'class-validator'; import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; import type { CredentialsEntity } from '@/databases/entities/credentials-entity'; import type { TagEntity } from '@/databases/entities/tag-entity'; +import type { TestDefinition } from '@/databases/entities/test-definition'; import type { User } from '@/databases/entities/user'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; @@ -12,6 +13,7 @@ import { BadRequestError } from './errors/response-errors/bad-request.error'; export async function validateEntity( entity: | WorkflowEntity + | TestDefinition | CredentialsEntity | TagEntity | AnnotationTagEntity diff --git a/packages/cli/src/node-types.ts b/packages/cli/src/node-types.ts index 26b1b61e36..553aedd620 100644 --- a/packages/cli/src/node-types.ts +++ b/packages/cli/src/node-types.ts @@ -1,3 +1,4 @@ +import type { NeededNodeType } from '@n8n/task-runner'; import type { Dirent } from 'fs'; import { readdir } from 'fs/promises'; import { loadClassInIsolation } from 'n8n-core'; @@ -149,4 +150,22 @@ export class NodeTypes implements INodeTypes { dirent.name.toLowerCase().startsWith('v') ); } + + getNodeTypeDescriptions(nodeTypes: NeededNodeType[]): INodeTypeDescription[] { + return nodeTypes.map(({ name: nodeTypeName, version: nodeTypeVersion }) => { + const nodeType = this.getNode(nodeTypeName); + + if (!nodeType) throw new ApplicationError(`Unknown node type: ${nodeTypeName}`); + + const { description } = NodeHelpers.getVersionedNodeType(nodeType.type, nodeTypeVersion); + + const descriptionCopy = { ...description }; + + descriptionCopy.name = descriptionCopy.name.startsWith('n8n-nodes') + ? descriptionCopy.name + : `n8n-nodes-base.${descriptionCopy.name}`; // nodes-base nodes are unprefixed + + return descriptionCopy; + }); + } } diff --git a/packages/cli/src/runners/__tests__/task-broker.test.ts b/packages/cli/src/runners/__tests__/task-broker.test.ts index 715c5d8eb8..614d04c3b5 100644 --- a/packages/cli/src/runners/__tests__/task-broker.test.ts +++ b/packages/cli/src/runners/__tests__/task-broker.test.ts @@ -1,5 +1,6 @@ import type { RunnerMessage, TaskResultData } from '@n8n/task-runner'; import { mock } from 'jest-mock-extended'; +import type { INodeTypeBaseDescription } from 'n8n-workflow'; import { TaskRejectError } from '../errors'; import { TaskBroker } from '../task-broker.service'; @@ -11,7 +12,7 @@ describe('TaskBroker', () => { let taskBroker: TaskBroker; beforeEach(() => { - taskBroker = new TaskBroker(mock(), mock()); + taskBroker = new TaskBroker(mock()); jest.restoreAllMocks(); }); @@ -76,13 +77,6 @@ describe('TaskBroker', () => { const messageCallback = jest.fn(); taskBroker.registerRunner(runner, messageCallback); - - expect(messageCallback).toBeCalledWith({ - type: 'broker:nodetypes', - // We're mocking the node types service, so this will - // be undefined. - nodeType: undefined, - }); }); }); @@ -560,5 +554,68 @@ describe('TaskBroker', () => { params: rpcParams, }); }); + + it('should handle `runner:nodetypesrequest` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const requestId = 'request1'; + const requestParams = [ + { + name: 'n8n-nodes-base.someNode', + version: 1, + }, + ]; + + const message: RunnerMessage.ToBroker.NodeTypesRequest = { + type: 'runner:nodetypesrequest', + taskId, + requestId, + requestParams, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:nodetypesrequest', + taskId, + requestId, + requestParams, + }); + }); + }); + + describe('onRequesterMessage', () => { + it('should handle `requester:nodetypesresponse` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const requestId = 'request1'; + const nodeTypes = [mock(), mock()]; + + const runnerMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), runnerMessageCallback); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + + await taskBroker.handleRequesterNodeTypesResponse(taskId, requestId, nodeTypes); + + expect(runnerMessageCallback).toHaveBeenCalledWith({ + type: 'broker:nodetypes', + taskId, + requestId, + nodeTypes, + }); + }); }); }); diff --git a/packages/cli/src/runners/__tests__/task-runner-process.test.ts b/packages/cli/src/runners/__tests__/task-runner-process.test.ts index eb04e3ab8e..fbab9ee1e3 100644 --- a/packages/cli/src/runners/__tests__/task-runner-process.test.ts +++ b/packages/cli/src/runners/__tests__/task-runner-process.test.ts @@ -22,7 +22,7 @@ require('child_process').spawn = spawnMock; describe('TaskRunnerProcess', () => { const logger = mockInstance(Logger); const runnerConfig = mockInstance(TaskRunnersConfig); - runnerConfig.disabled = false; + runnerConfig.enabled = true; runnerConfig.mode = 'internal_childprocess'; const authService = mock(); let taskRunnerProcess = new TaskRunnerProcess(logger, runnerConfig, authService); diff --git a/packages/cli/src/runners/runner-types.ts b/packages/cli/src/runners/runner-types.ts index 8fcfe968d3..b373d3051e 100644 --- a/packages/cli/src/runners/runner-types.ts +++ b/packages/cli/src/runners/runner-types.ts @@ -5,18 +5,6 @@ import type WebSocket from 'ws'; import type { TaskRunner } from './task-broker.service'; import type { AuthlessRequest } from '../requests'; -/** - * Specifies what data should be included for a task data request. - */ -export interface TaskDataRequestParams { - dataOfNodes: string[] | 'all'; - prevNode: boolean; - /** Whether input data for the node should be included */ - input: boolean; - /** Whether env provider's state should be included */ - env: boolean; -} - export interface DisconnectAnalyzer { determineDisconnectReason(runnerId: TaskRunner['id']): Promise; } diff --git a/packages/cli/src/runners/runner-ws-server.ts b/packages/cli/src/runners/runner-ws-server.ts index baaac82bf3..c691462558 100644 --- a/packages/cli/src/runners/runner-ws-server.ts +++ b/packages/cli/src/runners/runner-ws-server.ts @@ -70,7 +70,7 @@ export class TaskRunnerWsServer { this.sendMessage.bind(this, id) as MessageCallback, ); - this.logger.info(`Runner "${message.name}"(${id}) has been registered`); + this.logger.info(`Runner "${message.name}" (${id}) has been registered`); return; } diff --git a/packages/cli/src/runners/task-broker.service.ts b/packages/cli/src/runners/task-broker.service.ts index 7d00cf232e..daa5b48c07 100644 --- a/packages/cli/src/runners/task-broker.service.ts +++ b/packages/cli/src/runners/task-broker.service.ts @@ -8,7 +8,6 @@ import { ApplicationError } from 'n8n-workflow'; import { nanoid } from 'nanoid'; import { Service } from 'typedi'; -import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; import { Logger } from '@/logging/logger.service'; import { TaskRejectError } from './errors'; @@ -79,19 +78,7 @@ export class TaskBroker { private pendingTaskRequests: TaskRequest[] = []; - constructor( - private readonly logger: Logger, - private readonly loadNodesAndCredentials: LoadNodesAndCredentials, - ) { - this.loadNodesAndCredentials.addPostProcessor(this.updateNodeTypes); - } - - updateNodeTypes = async () => { - await this.messageAllRunners({ - type: 'broker:nodetypes', - nodeTypes: this.loadNodesAndCredentials.types.nodes, - }); - }; + constructor(private readonly logger: Logger) {} expireTasks() { const now = process.hrtime.bigint(); @@ -105,10 +92,6 @@ export class TaskBroker { registerRunner(runner: TaskRunner, messageCallback: MessageCallback) { this.knownRunners.set(runner.id, { runner, messageCallback }); void this.knownRunners.get(runner.id)!.messageCallback({ type: 'broker:runnerregistered' }); - void this.knownRunners.get(runner.id)!.messageCallback({ - type: 'broker:nodetypes', - nodeTypes: this.loadNodesAndCredentials.types.nodes, - }); } deregisterRunner(runnerId: string, error: Error) { @@ -145,14 +128,6 @@ export class TaskBroker { await this.knownRunners.get(runnerId)?.messageCallback(message); } - private async messageAllRunners(message: BrokerMessage.ToRunner.All) { - await Promise.allSettled( - [...this.knownRunners.values()].map(async (runner) => { - await runner.messageCallback(message); - }), - ); - } - private async messageRequester(requesterId: string, message: BrokerMessage.ToRequester.All) { await this.requesters.get(requesterId)?.(message); } @@ -187,7 +162,9 @@ export class TaskBroker { case 'runner:taskdatarequest': await this.handleDataRequest(message.taskId, message.requestId, message.requestParams); break; - + case 'runner:nodetypesrequest': + await this.handleNodeTypesRequest(message.taskId, message.requestId, message.requestParams); + break; case 'runner:rpc': await this.handleRpcRequest(message.taskId, message.callId, message.name, message.params); break; @@ -249,6 +226,23 @@ export class TaskBroker { }); } + async handleNodeTypesRequest( + taskId: Task['id'], + requestId: RunnerMessage.ToBroker.NodeTypesRequest['requestId'], + requestParams: RunnerMessage.ToBroker.NodeTypesRequest['requestParams'], + ) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.messageRequester(task.requesterId, { + type: 'broker:nodetypesrequest', + taskId, + requestId, + requestParams, + }); + } + async handleResponse( taskId: Task['id'], requestId: RunnerMessage.ToBroker.TaskDataRequest['requestId'], @@ -284,6 +278,13 @@ export class TaskBroker { case 'requester:taskdataresponse': await this.handleRequesterDataResponse(message.taskId, message.requestId, message.data); break; + case 'requester:nodetypesresponse': + await this.handleRequesterNodeTypesResponse( + message.taskId, + message.requestId, + message.nodeTypes, + ); + break; case 'requester:rpcresponse': await this.handleRequesterRpcResponse( message.taskId, @@ -322,6 +323,21 @@ export class TaskBroker { }); } + async handleRequesterNodeTypesResponse( + taskId: Task['id'], + requestId: RequesterMessage.ToBroker.NodeTypesResponse['requestId'], + nodeTypes: RequesterMessage.ToBroker.NodeTypesResponse['nodeTypes'], + ) { + const runner = await this.getRunnerOrFailTask(taskId); + + await this.messageRunner(runner.id, { + type: 'broker:nodetypes', + taskId, + requestId, + nodeTypes, + }); + } + handleRequesterAccept( taskId: Task['id'], settings: RequesterMessage.ToBroker.TaskSettings['settings'], diff --git a/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts b/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts index ea1492f64e..b8868983ed 100644 --- a/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts +++ b/packages/cli/src/runners/task-managers/__tests__/data-request-response-builder.test.ts @@ -1,42 +1,10 @@ -import type { TaskData } from '@n8n/task-runner'; +import type { PartialAdditionalData, TaskData } from '@n8n/task-runner'; import { mock } from 'jest-mock-extended'; -import type { IExecuteFunctions, IWorkflowExecuteAdditionalData } from 'n8n-workflow'; -import { type INode, type INodeExecutionData, type Workflow } from 'n8n-workflow'; +import type { Workflow } from 'n8n-workflow'; import { DataRequestResponseBuilder } from '../data-request-response-builder'; -const triggerNode: INode = mock({ - name: 'Trigger', -}); -const debugHelperNode: INode = mock({ - name: 'DebugHelper', -}); -const codeNode: INode = mock({ - name: 'Code', -}); -const workflow: TaskData['workflow'] = mock(); -const debugHelperNodeOutItems: INodeExecutionData[] = [ - { - json: { - uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032', - email: 'Dan.Schmidt31@yahoo.com', - firstname: 'Toni', - lastname: 'Schuster', - password: 'Q!D6C2', - }, - pairedItem: { - item: 0, - }, - }, -]; -const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems; -const connectionInputData: TaskData['connectionInputData'] = codeNodeInputItems; -const envProviderState: TaskData['envProviderState'] = mock({ - env: {}, - isEnvAccessBlocked: false, - isProcessAvailable: true, -}); -const additionalData = mock({ +const additionalData = mock({ formWaitingBaseUrl: 'http://localhost:5678/form-waiting', instanceBaseUrl: 'http://localhost:5678/', restApiUrl: 'http://localhost:5678/rest', @@ -50,275 +18,57 @@ const additionalData = mock({ executionTimeoutTimestamp: undefined, restartExecutionId: undefined, }); -const executeFunctions = mock(); -/** - * Drawn with https://asciiflow.com/#/ - * Task data for an execution of the following WF: - * where ►► denotes the currently being executing node. - * ►► - * ┌───────────┐ ┌─────────────┐ ┌────────┐ - * │ Trigger ├──►│ DebugHelper ├───►│ Code │ - * └───────────┘ └─────────────┘ └────────┘ - */ -const taskData: TaskData = { - executeFunctions, - workflow, - connectionInputData, - inputData: { - main: [codeNodeInputItems], - }, - itemIndex: 0, - activeNodeName: codeNode.name, - contextNodeName: codeNode.name, - defaultReturnRunIndex: -1, - mode: 'manual', - envProviderState, - node: codeNode, - runExecutionData: { - startData: { - destinationNode: codeNode.name, - runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name], - }, - resultData: { - runData: { - [triggerNode.name]: [ - { - hints: [], - startTime: 1730313407328, - executionTime: 1, - source: [], - executionStatus: 'success', - data: { - main: [[]], - }, - }, - ], - [debugHelperNode.name]: [ - { - hints: [], - startTime: 1730313407330, - executionTime: 1, - source: [ - { - previousNode: triggerNode.name, - }, - ], - executionStatus: 'success', - data: { - main: [debugHelperNodeOutItems], - }, - }, - ], - }, - pinData: {}, - }, - executionData: { - contextData: {}, - nodeExecutionStack: [], - metadata: {}, - waitingExecution: { - [codeNode.name]: { - '0': { - main: [codeNodeInputItems], - }, - }, - }, - waitingExecutionSource: { - [codeNode.name]: { - '0': { - main: [ - { - previousNode: debugHelperNode.name, - }, - ], - }, - }, - }, - }, - }, - runIndex: 0, - selfData: {}, - siblingParameters: {}, - executeData: { - node: codeNode, - data: { - main: [codeNodeInputItems], - }, - source: { - main: [ - { - previousNode: debugHelperNode.name, - previousNodeOutput: 0, - }, - ], - }, - }, +const workflow: TaskData['workflow'] = mock({ + id: '1', + name: 'Test Workflow', + active: true, + connectionsBySourceNode: {}, + nodes: {}, + pinData: {}, + settings: {}, + staticData: {}, +}); + +const taskData = mock({ additionalData, -} as const; + workflow, +}); describe('DataRequestResponseBuilder', () => { - const allDataParam: DataRequestResponseBuilder['requestParams'] = { - dataOfNodes: 'all', - env: true, - input: true, - prevNode: true, - }; + const builder = new DataRequestResponseBuilder(); - const newRequestParam = (opts: Partial) => ({ - ...allDataParam, - ...opts, - }); + it('picks only specific properties for additional data', () => { + const result = builder.buildFromTaskData(taskData); - describe('all data', () => { - it('should build the runExecutionData as is when everything is requested', () => { - const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam); - - const { runExecutionData } = dataRequestResponseBuilder.build(); - - expect(runExecutionData).toStrictEqual(taskData.runExecutionData); + expect(result.additionalData).toStrictEqual({ + formWaitingBaseUrl: 'http://localhost:5678/form-waiting', + instanceBaseUrl: 'http://localhost:5678/', + restApiUrl: 'http://localhost:5678/rest', + variables: additionalData.variables, + webhookBaseUrl: 'http://localhost:5678/webhook', + webhookTestBaseUrl: 'http://localhost:5678/webhook-test', + webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting', + executionId: '45844', + userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5', + currentNodeParameters: undefined, + executionTimeoutTimestamp: undefined, + restartExecutionId: undefined, }); }); - describe('envProviderState', () => { - it("should filter out envProviderState when it's not requested", () => { - const dataRequestResponseBuilder = new DataRequestResponseBuilder( - taskData, - newRequestParam({ - env: false, - }), - ); + it('picks only specific properties for workflow', () => { + const result = builder.buildFromTaskData(taskData); - const result = dataRequestResponseBuilder.build(); - - expect(result.envProviderState).toStrictEqual({ - env: {}, - isEnvAccessBlocked: false, - isProcessAvailable: true, - }); - }); - }); - - describe('additionalData', () => { - it('picks only specific properties for additional data', () => { - const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam); - - const result = dataRequestResponseBuilder.build(); - - expect(result.additionalData).toStrictEqual({ - formWaitingBaseUrl: 'http://localhost:5678/form-waiting', - instanceBaseUrl: 'http://localhost:5678/', - restApiUrl: 'http://localhost:5678/rest', - webhookBaseUrl: 'http://localhost:5678/webhook', - webhookTestBaseUrl: 'http://localhost:5678/webhook-test', - webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting', - executionId: '45844', - userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5', - currentNodeParameters: undefined, - executionTimeoutTimestamp: undefined, - restartExecutionId: undefined, - variables: additionalData.variables, - }); - }); - }); - - describe('input data', () => { - const allExceptInputParam = newRequestParam({ - input: false, - }); - - it('drops input data from executeData', () => { - const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); - - expect(result.executeData).toStrictEqual({ - node: taskData.executeData!.node, - source: taskData.executeData!.source, - data: {}, - }); - }); - - it('drops input data from result', () => { - const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); - - expect(result.inputData).toStrictEqual({}); - }); - - it('drops input data from result', () => { - const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); - - expect(result.inputData).toStrictEqual({}); - }); - - it('drops input data from connectionInputData', () => { - const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build(); - - expect(result.connectionInputData).toStrictEqual([]); - }); - }); - - describe('nodes', () => { - it('should return empty run data when only Code node is requested', () => { - const result = new DataRequestResponseBuilder( - taskData, - newRequestParam({ dataOfNodes: ['Code'], prevNode: false }), - ).build(); - - expect(result.runExecutionData.resultData.runData).toStrictEqual({}); - expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); - // executionData & startData contain only metadata --> returned as is - expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); - expect(result.runExecutionData.executionData).toStrictEqual( - taskData.runExecutionData.executionData, - ); - }); - - it('should return empty run data when only Code node is requested', () => { - const result = new DataRequestResponseBuilder( - taskData, - newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }), - ).build(); - - expect(result.runExecutionData.resultData.runData).toStrictEqual({}); - expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); - // executionData & startData contain only metadata --> returned as is - expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); - expect(result.runExecutionData.executionData).toStrictEqual( - taskData.runExecutionData.executionData, - ); - }); - - it("should return only DebugHelper's data when only DebugHelper node is requested", () => { - const result = new DataRequestResponseBuilder( - taskData, - newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }), - ).build(); - - expect(result.runExecutionData.resultData.runData).toStrictEqual({ - [debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name], - }); - expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); - // executionData & startData contain only metadata --> returned as is - expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); - expect(result.runExecutionData.executionData).toStrictEqual( - taskData.runExecutionData.executionData, - ); - }); - - it("should return DebugHelper's data when only prevNode node is requested", () => { - const result = new DataRequestResponseBuilder( - taskData, - newRequestParam({ dataOfNodes: [], prevNode: true }), - ).build(); - - expect(result.runExecutionData.resultData.runData).toStrictEqual({ - [debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name], - }); - expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); - // executionData & startData contain only metadata --> returned as is - expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); - expect(result.runExecutionData.executionData).toStrictEqual( - taskData.runExecutionData.executionData, - ); + expect(result.workflow).toStrictEqual({ + id: '1', + name: 'Test Workflow', + active: true, + connections: workflow.connectionsBySourceNode, + nodes: [], + pinData: workflow.pinData, + settings: workflow.settings, + staticData: workflow.staticData, }); }); }); diff --git a/packages/cli/src/runners/task-managers/__tests__/data-request-response-stripper.test.ts b/packages/cli/src/runners/task-managers/__tests__/data-request-response-stripper.test.ts new file mode 100644 index 0000000000..a37b9bdc7a --- /dev/null +++ b/packages/cli/src/runners/task-managers/__tests__/data-request-response-stripper.test.ts @@ -0,0 +1,300 @@ +import type { DataRequestResponse, TaskDataRequestParams } from '@n8n/task-runner'; +import { mock } from 'jest-mock-extended'; +import type { IWorkflowExecuteAdditionalData } from 'n8n-workflow'; +import { type INode, type INodeExecutionData } from 'n8n-workflow'; + +import { DataRequestResponseStripper } from '../data-request-response-stripper'; + +const triggerNode: INode = mock({ + name: 'Trigger', +}); +const debugHelperNode: INode = mock({ + name: 'DebugHelper', +}); +const codeNode: INode = mock({ + name: 'Code', +}); +const workflow: DataRequestResponse['workflow'] = mock(); +const debugHelperNodeOutItems: INodeExecutionData[] = [ + { + json: { + uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032', + email: 'Dan.Schmidt31@yahoo.com', + firstname: 'Toni', + lastname: 'Schuster', + password: 'Q!D6C2', + }, + pairedItem: { + item: 0, + }, + }, +]; +const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems; +const envProviderState: DataRequestResponse['envProviderState'] = mock< + DataRequestResponse['envProviderState'] +>({ + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, +}); +const additionalData = mock({ + formWaitingBaseUrl: 'http://localhost:5678/form-waiting', + instanceBaseUrl: 'http://localhost:5678/', + restApiUrl: 'http://localhost:5678/rest', + variables: {}, + webhookBaseUrl: 'http://localhost:5678/webhook', + webhookTestBaseUrl: 'http://localhost:5678/webhook-test', + webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting', + executionId: '45844', + userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5', + currentNodeParameters: undefined, + executionTimeoutTimestamp: undefined, + restartExecutionId: undefined, +}); + +/** + * Drawn with https://asciiflow.com/#/ + * Task data for an execution of the following WF: + * where ►► denotes the currently being executing node. + * ►► + * ┌───────────┐ ┌─────────────┐ ┌────────┐ + * │ Trigger ├──►│ DebugHelper ├───►│ Code │ + * └───────────┘ └─────────────┘ └────────┘ + */ +const taskData: DataRequestResponse = { + workflow, + inputData: { + main: [codeNodeInputItems], + }, + itemIndex: 0, + activeNodeName: codeNode.name, + contextNodeName: codeNode.name, + defaultReturnRunIndex: -1, + mode: 'manual', + envProviderState, + node: codeNode, + runExecutionData: { + startData: { + destinationNode: codeNode.name, + runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name], + }, + resultData: { + runData: { + [triggerNode.name]: [ + { + hints: [], + startTime: 1730313407328, + executionTime: 1, + source: [], + executionStatus: 'success', + data: { + main: [[]], + }, + }, + ], + [debugHelperNode.name]: [ + { + hints: [], + startTime: 1730313407330, + executionTime: 1, + source: [ + { + previousNode: triggerNode.name, + }, + ], + executionStatus: 'success', + data: { + main: [debugHelperNodeOutItems], + }, + }, + ], + }, + pinData: {}, + }, + executionData: { + contextData: {}, + nodeExecutionStack: [], + metadata: {}, + waitingExecution: { + [codeNode.name]: { + '0': { + main: [codeNodeInputItems], + }, + }, + }, + waitingExecutionSource: { + [codeNode.name]: { + '0': { + main: [ + { + previousNode: debugHelperNode.name, + }, + ], + }, + }, + }, + }, + }, + runIndex: 0, + selfData: {}, + siblingParameters: {}, + connectionInputSource: { + main: [ + { + previousNode: debugHelperNode.name, + previousNodeOutput: 0, + }, + ], + }, + additionalData, +} as const; + +describe('DataRequestResponseStripper', () => { + const allDataParam: TaskDataRequestParams = { + dataOfNodes: 'all', + env: true, + input: true, + prevNode: true, + }; + + const newRequestParam = (opts: Partial) => ({ + ...allDataParam, + ...opts, + }); + + describe('all data', () => { + it('should build the runExecutionData as is when everything is requested', () => { + const dataRequestResponseBuilder = new DataRequestResponseStripper(taskData, allDataParam); + + const { runExecutionData } = dataRequestResponseBuilder.strip(); + + expect(runExecutionData).toStrictEqual(taskData.runExecutionData); + }); + }); + + describe('envProviderState', () => { + it("should filter out envProviderState when it's not requested", () => { + const dataRequestResponseBuilder = new DataRequestResponseStripper( + taskData, + newRequestParam({ + env: false, + }), + ); + + const result = dataRequestResponseBuilder.strip(); + + expect(result.envProviderState).toStrictEqual({ + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, + }); + }); + }); + + describe('input data', () => { + const allExceptInputParam = newRequestParam({ + input: false, + }); + + it('drops input data from result', () => { + const result = new DataRequestResponseStripper(taskData, allExceptInputParam).strip(); + + expect(result.inputData).toStrictEqual({}); + }); + + it('drops input data from result', () => { + const result = new DataRequestResponseStripper(taskData, allExceptInputParam).strip(); + + expect(result.inputData).toStrictEqual({}); + }); + }); + + describe('nodes', () => { + it('should return empty run data when only Code node is requested', () => { + const result = new DataRequestResponseStripper( + taskData, + newRequestParam({ dataOfNodes: ['Code'], prevNode: false }), + ).strip(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({}); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it('should return empty run data when only Code node is requested', () => { + const result = new DataRequestResponseStripper( + taskData, + newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }), + ).strip(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({}); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it("should return only DebugHelper's data when only DebugHelper node is requested", () => { + const result = new DataRequestResponseStripper( + taskData, + newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }), + ).strip(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({ + [debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name], + }); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + + it("should return DebugHelper's data when only prevNode node is requested", () => { + const result = new DataRequestResponseStripper( + taskData, + newRequestParam({ dataOfNodes: [], prevNode: true }), + ).strip(); + + expect(result.runExecutionData.resultData.runData).toStrictEqual({ + [debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name], + }); + expect(result.runExecutionData.resultData.pinData).toStrictEqual({}); + // executionData & startData contain only metadata --> returned as is + expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData); + expect(result.runExecutionData.executionData).toStrictEqual( + taskData.runExecutionData.executionData, + ); + }); + }); + + describe('passthrough properties', () => { + test.each>([ + ['workflow'], + ['connectionInputSource'], + ['node'], + ['runIndex'], + ['itemIndex'], + ['activeNodeName'], + ['siblingParameters'], + ['mode'], + ['defaultReturnRunIndex'], + ['selfData'], + ['contextNodeName'], + ['additionalData'], + ])("it doesn't change %s", (propertyName) => { + const dataRequestResponseBuilder = new DataRequestResponseStripper(taskData, allDataParam); + + const result = dataRequestResponseBuilder.strip(); + + expect(result[propertyName]).toBe(taskData[propertyName]); + }); + }); +}); diff --git a/packages/cli/src/runners/task-managers/data-request-response-builder.ts b/packages/cli/src/runners/task-managers/data-request-response-builder.ts index bc498c33a7..7df3b9e012 100644 --- a/packages/cli/src/runners/task-managers/data-request-response-builder.ts +++ b/packages/cli/src/runners/task-managers/data-request-response-builder.ts @@ -1,63 +1,30 @@ -import type { - DataRequestResponse, - BrokerMessage, - PartialAdditionalData, - TaskData, -} from '@n8n/task-runner'; -import type { - EnvProviderState, - IExecuteData, - INodeExecutionData, - IPinData, - IRunData, - IRunExecutionData, - ITaskDataConnections, - IWorkflowExecuteAdditionalData, - Workflow, - WorkflowParameters, -} from 'n8n-workflow'; +import type { DataRequestResponse, PartialAdditionalData, TaskData } from '@n8n/task-runner'; +import type { IWorkflowExecuteAdditionalData, Workflow, WorkflowParameters } from 'n8n-workflow'; /** - * Builds the response to a data request coming from a Task Runner. Tries to minimize - * the amount of data that is sent to the runner by only providing what is requested. + * Transforms TaskData to DataRequestResponse. The main purpose of the + * transformation is to make sure there is no duplication in the data + * (e.g. connectionInputData and executeData.data can be derived from + * inputData). */ export class DataRequestResponseBuilder { - private requestedNodeNames = new Set(); - - constructor( - private readonly taskData: TaskData, - private readonly requestParams: BrokerMessage.ToRequester.TaskDataRequest['requestParams'], - ) { - this.requestedNodeNames = new Set(requestParams.dataOfNodes); - - if (this.requestParams.prevNode && this.requestParams.dataOfNodes !== 'all') { - this.requestedNodeNames.add(this.determinePrevNodeName()); - } - } - - /** - * Builds a response to the data request - */ - build(): DataRequestResponse { - const { taskData: td } = this; - + buildFromTaskData(taskData: TaskData): DataRequestResponse { return { - workflow: this.buildWorkflow(td.workflow), - connectionInputData: this.buildConnectionInputData(td.connectionInputData), - inputData: this.buildInputData(td.inputData), - itemIndex: td.itemIndex, - activeNodeName: td.activeNodeName, - contextNodeName: td.contextNodeName, - defaultReturnRunIndex: td.defaultReturnRunIndex, - mode: td.mode, - envProviderState: this.buildEnvProviderState(td.envProviderState), - node: td.node, // The current node being executed - runExecutionData: this.buildRunExecutionData(td.runExecutionData), - runIndex: td.runIndex, - selfData: td.selfData, - siblingParameters: td.siblingParameters, - executeData: this.buildExecuteData(td.executeData), - additionalData: this.buildAdditionalData(td.additionalData), + workflow: this.buildWorkflow(taskData.workflow), + inputData: taskData.inputData, + connectionInputSource: taskData.executeData?.source ?? null, + itemIndex: taskData.itemIndex, + activeNodeName: taskData.activeNodeName, + contextNodeName: taskData.contextNodeName, + defaultReturnRunIndex: taskData.defaultReturnRunIndex, + mode: taskData.mode, + envProviderState: taskData.envProviderState, + node: taskData.node, + runExecutionData: taskData.runExecutionData, + runIndex: taskData.runIndex, + selfData: taskData.selfData, + siblingParameters: taskData.siblingParameters, + additionalData: this.buildAdditionalData(taskData.additionalData), }; } @@ -80,86 +47,6 @@ export class DataRequestResponseBuilder { }; } - private buildExecuteData(executeData: IExecuteData | undefined): IExecuteData | undefined { - if (executeData === undefined) { - return undefined; - } - - return { - node: executeData.node, // The current node being executed - data: this.requestParams.input ? executeData.data : {}, - source: executeData.source, - }; - } - - private buildRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData { - if (this.requestParams.dataOfNodes === 'all') { - return runExecutionData; - } - - return { - startData: runExecutionData.startData, - resultData: { - error: runExecutionData.resultData.error, - lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted, - metadata: runExecutionData.resultData.metadata, - runData: this.buildRunData(runExecutionData.resultData.runData), - pinData: this.buildPinData(runExecutionData.resultData.pinData), - }, - executionData: runExecutionData.executionData - ? { - // TODO: Figure out what these two are and can they be filtered - contextData: runExecutionData.executionData?.contextData, - nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack, - - metadata: runExecutionData.executionData.metadata, - waitingExecution: runExecutionData.executionData.waitingExecution, - waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource, - } - : undefined, - }; - } - - private buildRunData(runData: IRunData): IRunData { - return this.filterObjectByNodeNames(runData); - } - - private buildPinData(pinData: IPinData | undefined): IPinData | undefined { - return pinData ? this.filterObjectByNodeNames(pinData) : undefined; - } - - private buildEnvProviderState(envProviderState: EnvProviderState): EnvProviderState { - if (this.requestParams.env) { - // In case `isEnvAccessBlocked` = true, the provider state has already sanitized - // the environment variables and we can return it as is. - return envProviderState; - } - - return { - env: {}, - isEnvAccessBlocked: envProviderState.isEnvAccessBlocked, - isProcessAvailable: envProviderState.isProcessAvailable, - }; - } - - private buildInputData(inputData: ITaskDataConnections): ITaskDataConnections { - if (this.requestParams.input) { - return inputData; - } - - return {}; - } - - private buildConnectionInputData( - connectionInputData: INodeExecutionData[], - ): INodeExecutionData[] { - if (this.requestParams.input) { - return connectionInputData; - } - - return []; - } - private buildWorkflow(workflow: Workflow): Omit { return { id: workflow.id, @@ -172,37 +59,4 @@ export class DataRequestResponseBuilder { staticData: workflow.staticData, }; } - - /** - * Assuming the given `obj` is an object where the keys are node names, - * filters the object to only include the node names that are requested. - */ - private filterObjectByNodeNames>(obj: T): T { - if (this.requestParams.dataOfNodes === 'all') { - return obj; - } - - const filteredObj: T = {} as T; - - for (const nodeName in obj) { - if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) { - continue; - } - - if (this.requestedNodeNames.has(nodeName)) { - filteredObj[nodeName] = obj[nodeName]; - } - } - - return filteredObj; - } - - private determinePrevNodeName(): string { - const sourceData = this.taskData.executeData?.source?.main?.[0]; - if (!sourceData) { - return ''; - } - - return sourceData.previousNode; - } } diff --git a/packages/cli/src/runners/task-managers/data-request-response-stripper.ts b/packages/cli/src/runners/task-managers/data-request-response-stripper.ts new file mode 100644 index 0000000000..b924a87c5f --- /dev/null +++ b/packages/cli/src/runners/task-managers/data-request-response-stripper.ts @@ -0,0 +1,131 @@ +import type { DataRequestResponse, BrokerMessage } from '@n8n/task-runner'; +import type { + EnvProviderState, + IPinData, + IRunData, + IRunExecutionData, + ITaskDataConnections, +} from 'n8n-workflow'; + +/** + * Strips data from data request response based on the specified parameters + */ +export class DataRequestResponseStripper { + private requestedNodeNames = new Set(); + + constructor( + private readonly dataResponse: DataRequestResponse, + private readonly stripParams: BrokerMessage.ToRequester.TaskDataRequest['requestParams'], + ) { + this.requestedNodeNames = new Set(stripParams.dataOfNodes); + + if (this.stripParams.prevNode && this.stripParams.dataOfNodes !== 'all') { + this.requestedNodeNames.add(this.determinePrevNodeName()); + } + } + + /** + * Builds a response to the data request + */ + strip(): DataRequestResponse { + const { dataResponse: dr } = this; + + return { + ...dr, + inputData: this.stripInputData(dr.inputData), + envProviderState: this.stripEnvProviderState(dr.envProviderState), + runExecutionData: this.stripRunExecutionData(dr.runExecutionData), + }; + } + + private stripRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData { + if (this.stripParams.dataOfNodes === 'all') { + return runExecutionData; + } + + return { + startData: runExecutionData.startData, + resultData: { + error: runExecutionData.resultData.error, + lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted, + metadata: runExecutionData.resultData.metadata, + runData: this.stripRunData(runExecutionData.resultData.runData), + pinData: this.stripPinData(runExecutionData.resultData.pinData), + }, + executionData: runExecutionData.executionData + ? { + // TODO: Figure out what these two are and can they be stripped + contextData: runExecutionData.executionData?.contextData, + nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack, + + metadata: runExecutionData.executionData.metadata, + waitingExecution: runExecutionData.executionData.waitingExecution, + waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource, + } + : undefined, + }; + } + + private stripRunData(runData: IRunData): IRunData { + return this.filterObjectByNodeNames(runData); + } + + private stripPinData(pinData: IPinData | undefined): IPinData | undefined { + return pinData ? this.filterObjectByNodeNames(pinData) : undefined; + } + + private stripEnvProviderState(envProviderState: EnvProviderState): EnvProviderState { + if (this.stripParams.env) { + // In case `isEnvAccessBlocked` = true, the provider state has already sanitized + // the environment variables and we can return it as is. + return envProviderState; + } + + return { + env: {}, + isEnvAccessBlocked: envProviderState.isEnvAccessBlocked, + isProcessAvailable: envProviderState.isProcessAvailable, + }; + } + + private stripInputData(inputData: ITaskDataConnections): ITaskDataConnections { + if (this.stripParams.input) { + return inputData; + } + + return {}; + } + + /** + * Assuming the given `obj` is an object where the keys are node names, + * filters the object to only include the node names that are requested. + */ + private filterObjectByNodeNames>(obj: T): T { + if (this.stripParams.dataOfNodes === 'all') { + return obj; + } + + const filteredObj: T = {} as T; + + for (const nodeName in obj) { + if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) { + continue; + } + + if (this.requestedNodeNames.has(nodeName)) { + filteredObj[nodeName] = obj[nodeName]; + } + } + + return filteredObj; + } + + private determinePrevNodeName(): string { + const sourceData = this.dataResponse.connectionInputSource?.main?.[0]; + if (!sourceData) { + return ''; + } + + return sourceData.previousNode; + } +} diff --git a/packages/cli/src/runners/task-managers/local-task-manager.ts b/packages/cli/src/runners/task-managers/local-task-manager.ts index 623b9c912e..7d898aaebe 100644 --- a/packages/cli/src/runners/task-managers/local-task-manager.ts +++ b/packages/cli/src/runners/task-managers/local-task-manager.ts @@ -1,17 +1,20 @@ import type { RequesterMessage } from '@n8n/task-runner'; -import Container from 'typedi'; +import Container, { Service } from 'typedi'; + +import { NodeTypes } from '@/node-types'; import { TaskManager } from './task-manager'; import type { RequesterMessageCallback } from '../task-broker.service'; import { TaskBroker } from '../task-broker.service'; +@Service() export class LocalTaskManager extends TaskManager { taskBroker: TaskBroker; id: string = 'single-main'; - constructor() { - super(); + constructor(nodeTypes: NodeTypes) { + super(nodeTypes); this.registerRequester(); } diff --git a/packages/cli/src/runners/task-managers/task-manager.ts b/packages/cli/src/runners/task-managers/task-manager.ts index 617008a450..ffc86cdf62 100644 --- a/packages/cli/src/runners/task-managers/task-manager.ts +++ b/packages/cli/src/runners/task-managers/task-manager.ts @@ -1,5 +1,6 @@ +import { TaskRunnersConfig } from '@n8n/config'; import type { TaskResultData, RequesterMessage, BrokerMessage, TaskData } from '@n8n/task-runner'; -import { RPC_ALLOW_LIST } from '@n8n/task-runner'; +import { DataRequestResponseReconstruct, RPC_ALLOW_LIST } from '@n8n/task-runner'; import type { EnvProviderState, IExecuteFunctions, @@ -17,8 +18,13 @@ import type { } from 'n8n-workflow'; import { createResultOk, createResultError } from 'n8n-workflow'; import { nanoid } from 'nanoid'; +import * as a from 'node:assert/strict'; +import Container, { Service } from 'typedi'; + +import { NodeTypes } from '@/node-types'; import { DataRequestResponseBuilder } from './data-request-response-builder'; +import { DataRequestResponseStripper } from './data-request-response-stripper'; export type RequestAccept = (jobId: string) => void; export type RequestReject = (reason: string) => void; @@ -43,7 +49,8 @@ interface ExecuteFunctionObject { [name: string]: ((...args: unknown[]) => unknown) | ExecuteFunctionObject; } -export class TaskManager { +@Service() +export abstract class TaskManager { requestAcceptRejects: Map = new Map(); taskAcceptRejects: Map = new Map(); @@ -52,6 +59,12 @@ export class TaskManager { tasks: Map = new Map(); + private readonly runnerConfig = Container.get(TaskRunnersConfig); + + private readonly dataResponseBuilder = new DataRequestResponseBuilder(); + + constructor(private readonly nodeTypes: NodeTypes) {} + async startTask( additionalData: IWorkflowExecuteAdditionalData, taskType: string, @@ -173,6 +186,9 @@ export class TaskManager { case 'broker:taskdatarequest': this.sendTaskData(message.taskId, message.requestId, message.requestParams); break; + case 'broker:nodetypesrequest': + this.sendNodeTypes(message.taskId, message.requestId, message.requestParams); + break; case 'broker:rpc': void this.handleRpc(message.taskId, message.callId, message.name, message.params); break; @@ -228,14 +244,45 @@ export class TaskManager { return; } - const dataRequestResponseBuilder = new DataRequestResponseBuilder(job.data, requestParams); - const requestedData = dataRequestResponseBuilder.build(); + const dataRequestResponse = this.dataResponseBuilder.buildFromTaskData(job.data); + + if (this.runnerConfig.assertDeduplicationOutput) { + const reconstruct = new DataRequestResponseReconstruct(); + a.deepStrictEqual( + reconstruct.reconstructConnectionInputData(dataRequestResponse.inputData), + job.data.connectionInputData, + ); + a.deepStrictEqual( + reconstruct.reconstructExecuteData(dataRequestResponse), + job.data.executeData, + ); + } + + const strippedData = new DataRequestResponseStripper( + dataRequestResponse, + requestParams, + ).strip(); this.sendMessage({ type: 'requester:taskdataresponse', taskId, requestId, - data: requestedData, + data: strippedData, + }); + } + + sendNodeTypes( + taskId: string, + requestId: string, + neededNodeTypes: BrokerMessage.ToRequester.NodeTypesRequest['requestParams'], + ) { + const nodeTypes = this.nodeTypes.getNodeTypeDescriptions(neededNodeTypes); + + this.sendMessage({ + type: 'requester:nodetypesresponse', + taskId, + requestId, + nodeTypes, }); } diff --git a/packages/cli/src/runners/task-runner-module.ts b/packages/cli/src/runners/task-runner-module.ts index 13521c599b..fe476ad341 100644 --- a/packages/cli/src/runners/task-runner-module.ts +++ b/packages/cli/src/runners/task-runner-module.ts @@ -26,7 +26,7 @@ export class TaskRunnerModule { constructor(private readonly runnerConfig: TaskRunnersConfig) {} async start() { - a.ok(!this.runnerConfig.disabled, 'Task runner is disabled'); + a.ok(this.runnerConfig.enabled, 'Task runner is disabled'); await this.loadTaskManager(); await this.loadTaskRunnerServer(); @@ -54,7 +54,7 @@ export class TaskRunnerModule { private async loadTaskManager() { const { TaskManager } = await import('@/runners/task-managers/task-manager'); const { LocalTaskManager } = await import('@/runners/task-managers/local-task-manager'); - this.taskManager = new LocalTaskManager(); + this.taskManager = Container.get(LocalTaskManager); Container.set(TaskManager, this.taskManager); } diff --git a/packages/cli/src/services/__tests__/ownership.service.test.ts b/packages/cli/src/services/__tests__/ownership.service.test.ts index ee6922dd76..6e79c83bb2 100644 --- a/packages/cli/src/services/__tests__/ownership.service.test.ts +++ b/packages/cli/src/services/__tests__/ownership.service.test.ts @@ -1,4 +1,5 @@ import { mock } from 'jest-mock-extended'; +import { v4 as uuid } from 'uuid'; import { Project } from '@/databases/entities/project'; import { ProjectRelation } from '@/databases/entities/project-relation'; @@ -13,12 +14,15 @@ import { OwnershipService } from '@/services/ownership.service'; import { mockCredential, mockProject } from '@test/mock-objects'; import { mockInstance } from '@test/mocking'; +import { CacheService } from '../cache/cache.service'; + describe('OwnershipService', () => { const userRepository = mockInstance(UserRepository); const sharedWorkflowRepository = mockInstance(SharedWorkflowRepository); const projectRelationRepository = mockInstance(ProjectRelationRepository); + const cacheService = mockInstance(CacheService); const ownershipService = new OwnershipService( - mock(), + cacheService, userRepository, mock(), projectRelationRepository, @@ -52,22 +56,22 @@ describe('OwnershipService', () => { }); }); - describe('getProjectOwnerCached()', () => { + describe('getPersonalProjectOwnerCached()', () => { test('should retrieve a project owner', async () => { - const mockProject = new Project(); - const mockOwner = new User(); - - const projectRelation = Object.assign(new ProjectRelation(), { - role: 'project:personalOwner', - project: mockProject, - user: mockOwner, - }); + // ARRANGE + const project = new Project(); + const owner = new User(); + const projectRelation = new ProjectRelation(); + projectRelation.role = 'project:personalOwner'; + (projectRelation.project = project), (projectRelation.user = owner); projectRelationRepository.getPersonalProjectOwners.mockResolvedValueOnce([projectRelation]); + // ACT const returnedOwner = await ownershipService.getPersonalProjectOwnerCached('some-project-id'); - expect(returnedOwner).toBe(mockOwner); + // ASSERT + expect(returnedOwner).toBe(owner); }); test('should not throw if no project owner found, should return null instead', async () => { @@ -77,6 +81,29 @@ describe('OwnershipService', () => { expect(owner).toBeNull(); }); + + test('should not use the repository if the owner was found in the cache', async () => { + // ARRANGE + const project = new Project(); + project.id = uuid(); + const owner = new User(); + owner.id = uuid(); + const projectRelation = new ProjectRelation(); + projectRelation.role = 'project:personalOwner'; + (projectRelation.project = project), (projectRelation.user = owner); + + cacheService.getHashValue.mockResolvedValueOnce(owner); + userRepository.create.mockReturnValueOnce(owner); + + // ACT + const foundOwner = await ownershipService.getPersonalProjectOwnerCached(project.id); + + // ASSERT + expect(cacheService.getHashValue).toHaveBeenCalledTimes(1); + expect(cacheService.getHashValue).toHaveBeenCalledWith('project-owner', project.id); + expect(projectRelationRepository.getPersonalProjectOwners).not.toHaveBeenCalled(); + expect(foundOwner).toEqual(owner); + }); }); describe('getProjectOwnerCached()', () => { diff --git a/packages/cli/src/services/orchestration.service.ts b/packages/cli/src/services/orchestration.service.ts index 225badbf18..19da88e412 100644 --- a/packages/cli/src/services/orchestration.service.ts +++ b/packages/cli/src/services/orchestration.service.ts @@ -20,7 +20,7 @@ export class OrchestrationService { private subscriber: Subscriber; - protected isInitialized = false; + isInitialized = false; private isMultiMainSetupLicensed = false; diff --git a/packages/cli/src/services/ownership.service.ts b/packages/cli/src/services/ownership.service.ts index 65e83e53eb..d0ad442bc1 100644 --- a/packages/cli/src/services/ownership.service.ts +++ b/packages/cli/src/services/ownership.service.ts @@ -45,13 +45,9 @@ export class OwnershipService { * Personal project ownership is **immutable**. */ async getPersonalProjectOwnerCached(projectId: string): Promise { - const cachedValue = await this.cacheService.getHashValue( - 'project-owner', - projectId, - ); + const cachedValue = await this.cacheService.getHashValue('project-owner', projectId); - if (cachedValue) this.userRepository.create(cachedValue); - if (cachedValue === null) return null; + if (cachedValue) return this.userRepository.create(cachedValue); const ownerRel = await this.projectRelationRepository.getPersonalProjectOwners([projectId]); const owner = ownerRel[0]?.user ?? null; diff --git a/packages/cli/src/services/pruning/__tests__/pruning.service.test.ts b/packages/cli/src/services/pruning/__tests__/pruning.service.test.ts index 5ea26ad540..64cecd1c06 100644 --- a/packages/cli/src/services/pruning/__tests__/pruning.service.test.ts +++ b/packages/cli/src/services/pruning/__tests__/pruning.service.test.ts @@ -1,4 +1,4 @@ -import type { GlobalConfig } from '@n8n/config'; +import type { PruningConfig } from '@n8n/config'; import { mock } from 'jest-mock-extended'; import type { InstanceSettings } from 'n8n-core'; @@ -8,9 +8,13 @@ import { mockLogger } from '@test/mocking'; import { PruningService } from '../pruning.service'; +jest.mock('@/db', () => ({ + connectionState: { migrated: true }, +})); + describe('PruningService', () => { describe('init', () => { - it('should start pruning if leader', () => { + it('should start pruning on main instance that is the leader', () => { const pruningService = new PruningService( mockLogger(), mock({ isLeader: true }), @@ -29,7 +33,7 @@ describe('PruningService', () => { expect(startPruningSpy).toHaveBeenCalled(); }); - it('should not start pruning if follower', () => { + it('should not start pruning on main instance that is a follower', () => { const pruningService = new PruningService( mockLogger(), mock({ isLeader: false }), @@ -48,7 +52,7 @@ describe('PruningService', () => { expect(startPruningSpy).not.toHaveBeenCalled(); }); - it('should register leadership events if multi-main setup is enabled', () => { + it('should register leadership events if main on multi-main setup', () => { const pruningService = new PruningService( mockLogger(), mock({ isLeader: true }), @@ -88,13 +92,10 @@ describe('PruningService', () => { isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), - mock({ pruning: { isEnabled: true } }), + mock({ isEnabled: true }), ); - // @ts-expect-error Private method - const isEnabled = pruningService.isEnabled(); - - expect(isEnabled).toBe(true); + expect(pruningService.isEnabled).toBe(true); }); it('should return `false` based on config if leader main', () => { @@ -107,16 +108,13 @@ describe('PruningService', () => { isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), - mock({ pruning: { isEnabled: false } }), + mock({ isEnabled: false }), ); - // @ts-expect-error Private method - const isEnabled = pruningService.isEnabled(); - - expect(isEnabled).toBe(false); + expect(pruningService.isEnabled).toBe(false); }); - it('should return `false` if non-main even if enabled', () => { + it('should return `false` if non-main even if config is enabled', () => { const pruningService = new PruningService( mockLogger(), mock({ isLeader: false, instanceType: 'worker' }), @@ -126,16 +124,13 @@ describe('PruningService', () => { isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), - mock({ pruning: { isEnabled: true } }), + mock({ isEnabled: true }), ); - // @ts-expect-error Private method - const isEnabled = pruningService.isEnabled(); - - expect(isEnabled).toBe(false); + expect(pruningService.isEnabled).toBe(false); }); - it('should return `false` if follower main even if enabled', () => { + it('should return `false` if follower main even if config is enabled', () => { const pruningService = new PruningService( mockLogger(), mock({ isLeader: false, isFollower: true, instanceType: 'main' }), @@ -145,13 +140,10 @@ describe('PruningService', () => { isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), - mock({ pruning: { isEnabled: true }, multiMainSetup: { enabled: true } }), + mock({ isEnabled: true }), ); - // @ts-expect-error Private method - const isEnabled = pruningService.isEnabled(); - - expect(isEnabled).toBe(false); + expect(pruningService.isEnabled).toBe(false); }); }); @@ -166,22 +158,25 @@ describe('PruningService', () => { isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), - mock({ pruning: { isEnabled: false } }), + mock({ isEnabled: false }), + ); + + const scheduleRollingSoftDeletionsSpy = jest.spyOn( + pruningService, + // @ts-expect-error Private method + 'scheduleRollingSoftDeletions', ); // @ts-expect-error Private method - const setSoftDeletionInterval = jest.spyOn(pruningService, 'setSoftDeletionInterval'); - - // @ts-expect-error Private method - const scheduleHardDeletion = jest.spyOn(pruningService, 'scheduleHardDeletion'); + const scheduleNextHardDeletionSpy = jest.spyOn(pruningService, 'scheduleNextHardDeletion'); pruningService.startPruning(); - expect(setSoftDeletionInterval).not.toHaveBeenCalled(); - expect(scheduleHardDeletion).not.toHaveBeenCalled(); + expect(scheduleRollingSoftDeletionsSpy).not.toHaveBeenCalled(); + expect(scheduleNextHardDeletionSpy).not.toHaveBeenCalled(); }); - it('should start pruning if service is enabled', () => { + it('should start pruning if service is enabled and DB is migrated', () => { const pruningService = new PruningService( mockLogger(), mock({ isLeader: true, instanceType: 'main' }), @@ -191,23 +186,23 @@ describe('PruningService', () => { isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), - mock({ pruning: { isEnabled: true } }), + mock({ isEnabled: true }), ); - const setSoftDeletionInterval = jest + const scheduleRollingSoftDeletionsSpy = jest // @ts-expect-error Private method - .spyOn(pruningService, 'setSoftDeletionInterval') + .spyOn(pruningService, 'scheduleRollingSoftDeletions') .mockImplementation(); - const scheduleHardDeletion = jest + const scheduleNextHardDeletionSpy = jest // @ts-expect-error Private method - .spyOn(pruningService, 'scheduleHardDeletion') + .spyOn(pruningService, 'scheduleNextHardDeletion') .mockImplementation(); pruningService.startPruning(); - expect(setSoftDeletionInterval).toHaveBeenCalled(); - expect(scheduleHardDeletion).toHaveBeenCalled(); + expect(scheduleRollingSoftDeletionsSpy).toHaveBeenCalled(); + expect(scheduleNextHardDeletionSpy).toHaveBeenCalled(); }); }); }); diff --git a/packages/cli/src/services/pruning/pruning.service.ts b/packages/cli/src/services/pruning/pruning.service.ts index 7314015091..34be37cf21 100644 --- a/packages/cli/src/services/pruning/pruning.service.ts +++ b/packages/cli/src/services/pruning/pruning.service.ts @@ -1,27 +1,37 @@ -import { GlobalConfig } from '@n8n/config'; +import { PruningConfig } from '@n8n/config'; import { BinaryDataService, InstanceSettings } from 'n8n-core'; -import { jsonStringify } from 'n8n-workflow'; +import { ensureError } from 'n8n-workflow'; +import { strict } from 'node:assert'; import { Service } from 'typedi'; -import { TIME } from '@/constants'; +import { Time } from '@/constants'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; +import { connectionState as dbConnectionState } from '@/db'; import { OnShutdown } from '@/decorators/on-shutdown'; import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '../orchestration.service'; +/** + * Responsible for pruning executions from the database and their associated binary data + * from the filesystem, on a rolling basis. By default we soft-delete execution rows + * every cycle and hard-delete them and their binary data every 4th cycle. + */ @Service() export class PruningService { - private hardDeletionBatchSize = 100; + /** Timer for soft-deleting executions on a rolling basis. */ + private softDeletionInterval: NodeJS.Timer | undefined; - private rates: Record = { - softDeletion: this.globalConfig.pruning.softDeleteInterval * TIME.MINUTE, - hardDeletion: this.globalConfig.pruning.hardDeleteInterval * TIME.MINUTE, + /** Timeout for next hard-deletion of soft-deleted executions. */ + private hardDeletionTimeout: NodeJS.Timeout | undefined; + + private readonly rates = { + softDeletion: this.pruningConfig.softDeleteInterval * Time.minutes.toMilliseconds, + hardDeletion: this.pruningConfig.hardDeleteInterval * Time.minutes.toMilliseconds, }; - public softDeletionInterval: NodeJS.Timer | undefined; - - public hardDeletionTimeout: NodeJS.Timeout | undefined; + /** Max number of executions to hard-delete in a cycle. */ + private readonly batchSize = 100; private isShuttingDown = false; @@ -31,103 +41,68 @@ export class PruningService { private readonly executionRepository: ExecutionRepository, private readonly binaryDataService: BinaryDataService, private readonly orchestrationService: OrchestrationService, - private readonly globalConfig: GlobalConfig, + private readonly pruningConfig: PruningConfig, ) { this.logger = this.logger.scoped('pruning'); } - /** - * @important Requires `OrchestrationService` to be initialized. - */ init() { - const { isLeader } = this.instanceSettings; - const { isMultiMainSetupEnabled } = this.orchestrationService; + strict(this.instanceSettings.instanceRole !== 'unset', 'Instance role is not set'); - if (isLeader) this.startPruning(); + if (this.instanceSettings.isLeader) this.startPruning(); - if (isMultiMainSetupEnabled) { + if (this.orchestrationService.isMultiMainSetupEnabled) { this.orchestrationService.multiMainSetup.on('leader-takeover', () => this.startPruning()); this.orchestrationService.multiMainSetup.on('leader-stepdown', () => this.stopPruning()); } } - private isEnabled() { - const { instanceType, isFollower } = this.instanceSettings; - if (!this.globalConfig.pruning.isEnabled || instanceType !== 'main') { - return false; - } - - if (this.globalConfig.multiMainSetup.enabled && instanceType === 'main' && isFollower) { - return false; - } - - return true; + get isEnabled() { + return ( + this.pruningConfig.isEnabled && + this.instanceSettings.instanceType === 'main' && + this.instanceSettings.isLeader + ); } - /** - * @important Call this method only after DB migrations have completed. - */ startPruning() { - if (!this.isEnabled()) return; + if (!this.isEnabled || !dbConnectionState.migrated || this.isShuttingDown) return; - if (this.isShuttingDown) { - this.logger.warn('Cannot start pruning while shutting down'); - return; - } - - this.logger.debug('Starting soft-deletion and hard-deletion timers'); - - this.setSoftDeletionInterval(); - this.scheduleHardDeletion(); + this.scheduleRollingSoftDeletions(); + this.scheduleNextHardDeletion(); } stopPruning() { - if (!this.isEnabled()) return; - - this.logger.debug('Removing soft-deletion and hard-deletion timers'); + if (!this.isEnabled) return; clearInterval(this.softDeletionInterval); clearTimeout(this.hardDeletionTimeout); } - private setSoftDeletionInterval(rateMs = this.rates.softDeletion) { - const when = [rateMs / TIME.MINUTE, 'min'].join(' '); - + private scheduleRollingSoftDeletions(rateMs = this.rates.softDeletion) { this.softDeletionInterval = setInterval( - async () => await this.softDeleteOnPruningCycle(), + async () => await this.softDelete(), this.rates.softDeletion, ); - this.logger.debug(`Soft-deletion scheduled every ${when}`); + this.logger.debug(`Soft-deletion every ${rateMs * Time.milliseconds.toMinutes} minutes`); } - private scheduleHardDeletion(rateMs = this.rates.hardDeletion) { - const when = [rateMs / TIME.MINUTE, 'min'].join(' '); - + private scheduleNextHardDeletion(rateMs = this.rates.hardDeletion) { this.hardDeletionTimeout = setTimeout(() => { - this.hardDeleteOnPruningCycle() - .then((rate) => this.scheduleHardDeletion(rate)) + this.hardDelete() + .then((rate) => this.scheduleNextHardDeletion(rate)) .catch((error) => { - this.scheduleHardDeletion(1 * TIME.SECOND); - - const errorMessage = - error instanceof Error - ? error.message - : jsonStringify(error, { replaceCircularRefs: true }); - - this.logger.error('Failed to hard-delete executions', { errorMessage }); + this.scheduleNextHardDeletion(1_000); + this.logger.error('Failed to hard-delete executions', { error: ensureError(error) }); }); }, rateMs); - this.logger.debug(`Hard-deletion scheduled for next ${when}`); + this.logger.debug(`Hard-deletion in next ${rateMs * Time.milliseconds.toMinutes} minutes`); } - /** - * Mark executions as deleted based on age and count, in a pruning cycle. - */ - async softDeleteOnPruningCycle() { - this.logger.debug('Starting soft-deletion of executions'); - + /** Soft-delete executions based on max age and/or max count. */ + async softDelete() { const result = await this.executionRepository.softDeletePrunableExecutions(); if (result.affected === 0) { @@ -145,10 +120,11 @@ export class PruningService { } /** - * Permanently remove all soft-deleted executions and their binary data, in a pruning cycle. - * @return Delay in ms after which the next cycle should be started + * Delete all soft-deleted executions and their binary data. + * + * @returns Delay in milliseconds until next hard-deletion */ - private async hardDeleteOnPruningCycle() { + private async hardDelete(): Promise { const ids = await this.executionRepository.findSoftDeletedExecutions(); const executionIds = ids.map((o) => o.executionId); @@ -160,8 +136,6 @@ export class PruningService { } try { - this.logger.debug('Starting hard-deletion of executions', { executionIds }); - await this.binaryDataService.deleteMany(ids); await this.executionRepository.deleteByIds(executionIds); @@ -170,16 +144,13 @@ export class PruningService { } catch (error) { this.logger.error('Failed to hard-delete executions', { executionIds, - error: error instanceof Error ? error.message : `${error}`, + error: ensureError(error), }); } - /** - * For next batch, speed up hard-deletion cycle in high-volume case - * to prevent high concurrency from causing duplicate deletions. - */ - const isHighVolume = executionIds.length >= this.hardDeletionBatchSize; + // if high volume, speed up next hard-deletion + if (executionIds.length >= this.batchSize) return 1 * Time.seconds.toMilliseconds; - return isHighVolume ? 1 * TIME.SECOND : this.rates.hardDeletion; + return this.rates.hardDeletion; } } diff --git a/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts b/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts index 5dda04dc18..9692cb9ec7 100644 --- a/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts +++ b/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts @@ -2,22 +2,29 @@ import type express from 'express'; import { mock } from 'jest-mock-extended'; import type { IdentityProviderInstance, ServiceProviderInstance } from 'samlify'; +import { SettingsRepository } from '@/databases/repositories/settings.repository'; import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; import * as samlHelpers from '@/sso/saml/saml-helpers'; import { SamlService } from '@/sso/saml/saml.service.ee'; import { mockInstance } from '@test/mocking'; +import { SAML_PREFERENCES_DB_KEY } from '../constants'; +import { InvalidSamlMetadataError } from '../errors/invalid-saml-metadata.error'; + describe('SamlService', () => { const logger = mockInstance(Logger); const urlService = mockInstance(UrlService); const samlService = new SamlService(logger, urlService); + const settingsRepository = mockInstance(SettingsRepository); + + beforeEach(() => { + jest.restoreAllMocks(); + }); describe('getAttributesFromLoginResponse', () => { test('throws when any attribute is missing', async () => { - // // ARRANGE - // jest .spyOn(samlService, 'getIdentityProviderInstance') .mockReturnValue(mock()); @@ -41,9 +48,7 @@ describe('SamlService', () => { ], }); - // // ACT & ASSERT - // await expect( samlService.getAttributesFromLoginResponse(mock(), 'post'), ).rejects.toThrowError( @@ -51,4 +56,74 @@ describe('SamlService', () => { ); }); }); + + describe('init', () => { + test('calls `reset` if an InvalidSamlMetadataError is thrown', async () => { + // ARRANGE + jest + .spyOn(samlService, 'loadFromDbAndApplySamlPreferences') + .mockRejectedValue(new InvalidSamlMetadataError()); + jest.spyOn(samlService, 'reset'); + + // ACT + await samlService.init(); + + // ASSERT + expect(samlService.reset).toHaveBeenCalledTimes(1); + }); + + test('calls `reset` if a SyntaxError is thrown', async () => { + // ARRANGE + jest + .spyOn(samlService, 'loadFromDbAndApplySamlPreferences') + .mockRejectedValue(new SyntaxError()); + jest.spyOn(samlService, 'reset'); + + // ACT + await samlService.init(); + + // ASSERT + expect(samlService.reset).toHaveBeenCalledTimes(1); + }); + + test('does not call reset and rethrows if another error is thrown', async () => { + // ARRANGE + jest + .spyOn(samlService, 'loadFromDbAndApplySamlPreferences') + .mockRejectedValue(new TypeError()); + jest.spyOn(samlService, 'reset'); + + // ACT & ASSERT + await expect(samlService.init()).rejects.toThrowError(TypeError); + expect(samlService.reset).toHaveBeenCalledTimes(0); + }); + + test('does not call reset if no error is trown', async () => { + // ARRANGE + jest.spyOn(samlService, 'reset'); + + // ACT + await samlService.init(); + + // ASSERT + expect(samlService.reset).toHaveBeenCalledTimes(0); + }); + }); + + describe('reset', () => { + test('disables saml login and deletes the saml `features.saml` key in the db', async () => { + // ARRANGE + jest.spyOn(samlHelpers, 'setSamlLoginEnabled'); + jest.spyOn(settingsRepository, 'delete'); + + // ACT + await samlService.reset(); + + // ASSERT + expect(samlHelpers.setSamlLoginEnabled).toHaveBeenCalledTimes(1); + expect(samlHelpers.setSamlLoginEnabled).toHaveBeenCalledWith(false); + expect(settingsRepository.delete).toHaveBeenCalledTimes(1); + expect(settingsRepository.delete).toHaveBeenCalledWith({ key: SAML_PREFERENCES_DB_KEY }); + }); + }); }); diff --git a/packages/cli/src/sso/saml/errors/invalid-saml-metadata.error.ts b/packages/cli/src/sso/saml/errors/invalid-saml-metadata.error.ts new file mode 100644 index 0000000000..3d2ceaddb6 --- /dev/null +++ b/packages/cli/src/sso/saml/errors/invalid-saml-metadata.error.ts @@ -0,0 +1,7 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class InvalidSamlMetadataError extends ApplicationError { + constructor() { + super('Invalid SAML metadata', { level: 'warning' }); + } +} diff --git a/packages/cli/src/sso/saml/saml.service.ee.ts b/packages/cli/src/sso/saml/saml.service.ee.ts index 6b07919730..ce69a39e6d 100644 --- a/packages/cli/src/sso/saml/saml.service.ee.ts +++ b/packages/cli/src/sso/saml/saml.service.ee.ts @@ -16,6 +16,7 @@ import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; import { SAML_PREFERENCES_DB_KEY } from './constants'; +import { InvalidSamlMetadataError } from './errors/invalid-saml-metadata.error'; import { createUserFromSamlAttributes, getMappedSamlAttributesFromFlowResult, @@ -81,11 +82,24 @@ export class SamlService { ) {} async init(): Promise { - // load preferences first but do not apply so as to not load samlify unnecessarily - await this.loadFromDbAndApplySamlPreferences(false); - if (isSamlLicensedAndEnabled()) { - await this.loadSamlify(); - await this.loadFromDbAndApplySamlPreferences(true); + try { + // load preferences first but do not apply so as to not load samlify unnecessarily + await this.loadFromDbAndApplySamlPreferences(false); + if (isSamlLicensedAndEnabled()) { + await this.loadSamlify(); + await this.loadFromDbAndApplySamlPreferences(true); + } + } catch (error) { + // If the SAML configuration has been corrupted in the database we'll + // delete the corrupted configuration and enable email logins again. + if (error instanceof InvalidSamlMetadataError || error instanceof SyntaxError) { + this.logger.warn( + `SAML initialization failed because of invalid metadata in database: ${error.message}. IMPORTANT: Disabling SAML and switching to email-based login for all users. Please review your configuration and re-enable SAML.`, + ); + await this.reset(); + } else { + throw error; + } } } @@ -98,7 +112,7 @@ export class SamlService { validate: async (response: string) => { const valid = await validateResponse(response); if (!valid) { - throw new ApplicationError('Invalid SAML response'); + throw new InvalidSamlMetadataError(); } }, }); @@ -230,7 +244,7 @@ export class SamlService { } else if (prefs.metadata) { const validationResult = await validateMetadata(prefs.metadata); if (!validationResult) { - throw new ApplicationError('Invalid SAML metadata'); + throw new InvalidSamlMetadataError(); } } this.getIdentityProviderInstance(true); @@ -371,4 +385,13 @@ export class SamlService { } return attributes; } + + /** + * Disables SAML, switches to email based logins and deletes the SAML + * configuration from the database. + */ + async reset() { + await setSamlLoginEnabled(false); + await Container.get(SettingsRepository).delete({ key: SAML_PREFERENCES_DB_KEY }); + } } diff --git a/packages/cli/src/sso/sso-helpers.ts b/packages/cli/src/sso/sso-helpers.ts index 925c7a1eb1..f600e72cfd 100644 --- a/packages/cli/src/sso/sso-helpers.ts +++ b/packages/cli/src/sso/sso-helpers.ts @@ -5,10 +5,10 @@ import type { AuthProviderType } from '@/databases/entities/auth-identity'; import { SettingsRepository } from '@/databases/repositories/settings.repository'; /** - * Only one authentication method can be active at a time. This function sets the current authentication method - * and saves it to the database. - * SSO methods should only switch to email and then to another method. Email can switch to any method. - * @param authenticationMethod + * Only one authentication method can be active at a time. This function sets + * the current authentication method and saves it to the database. + * SSO methods should only switch to email and then to another method. Email + * can switch to any method. */ export async function setCurrentAuthenticationMethod( authenticationMethod: AuthProviderType, diff --git a/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts b/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts index cf9b122e72..6c64fc0b3a 100644 --- a/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts +++ b/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts @@ -1,6 +1,5 @@ import { GlobalConfig } from '@n8n/config'; import { type Workflow, type INode, type WorkflowSettings } from 'n8n-workflow'; -import { strict as assert } from 'node:assert'; import { Service } from 'typedi'; import type { Project } from '@/databases/entities/project'; @@ -68,11 +67,9 @@ export class SubworkflowPolicyChecker { const owner = await this.ownershipService.getPersonalProjectOwnerCached(subworkflowProject.id); - assert(owner !== null); // only `null` if not personal - return { hasReadAccess, - ownerName: owner.firstName + ' ' + owner.lastName, + ownerName: owner ? owner.firstName + ' ' + owner.lastName : 'No owner (team project)', }; } diff --git a/packages/cli/test/integration/commands/worker.cmd.test.ts b/packages/cli/test/integration/commands/worker.cmd.test.ts index ce3280aa48..e17a8d2279 100644 --- a/packages/cli/test/integration/commands/worker.cmd.test.ts +++ b/packages/cli/test/integration/commands/worker.cmd.test.ts @@ -26,7 +26,7 @@ import { mockInstance } from '../../shared/mocking'; config.set('executions.mode', 'queue'); config.set('binaryDataManager.availableModes', 'filesystem'); -Container.get(TaskRunnersConfig).disabled = false; +Container.get(TaskRunnersConfig).enabled = true; mockInstance(LoadNodesAndCredentials); const binaryDataService = mockInstance(BinaryDataService); const externalHooks = mockInstance(ExternalHooks); diff --git a/packages/cli/test/integration/pruning.service.test.ts b/packages/cli/test/integration/pruning.service.test.ts index 76ee107903..4ea8455b94 100644 --- a/packages/cli/test/integration/pruning.service.test.ts +++ b/packages/cli/test/integration/pruning.service.test.ts @@ -1,4 +1,4 @@ -import { GlobalConfig } from '@n8n/config'; +import { PruningConfig } from '@n8n/config'; import { mock } from 'jest-mock-extended'; import { BinaryDataService, InstanceSettings } from 'n8n-core'; import type { ExecutionStatus } from 'n8n-workflow'; @@ -27,19 +27,19 @@ describe('softDeleteOnPruningCycle()', () => { const now = new Date(); const yesterday = new Date(Date.now() - TIME.DAY); let workflow: WorkflowEntity; - let globalConfig: GlobalConfig; + let pruningConfig: PruningConfig; beforeAll(async () => { await testDb.init(); - globalConfig = Container.get(GlobalConfig); + pruningConfig = Container.get(PruningConfig); pruningService = new PruningService( mockLogger(), instanceSettings, Container.get(ExecutionRepository), mockInstance(BinaryDataService), mock(), - globalConfig, + pruningConfig, ); workflow = await createWorkflow(); @@ -62,8 +62,8 @@ describe('softDeleteOnPruningCycle()', () => { describe('when EXECUTIONS_DATA_PRUNE_MAX_COUNT is set', () => { beforeAll(() => { - globalConfig.pruning.maxAge = 336; - globalConfig.pruning.maxCount = 1; + pruningConfig.maxAge = 336; + pruningConfig.maxCount = 1; }); test('should mark as deleted based on EXECUTIONS_DATA_PRUNE_MAX_COUNT', async () => { @@ -73,7 +73,7 @@ describe('softDeleteOnPruningCycle()', () => { await createSuccessfulExecution(workflow), ]; - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -92,7 +92,7 @@ describe('softDeleteOnPruningCycle()', () => { await createSuccessfulExecution(workflow), ]; - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -113,7 +113,7 @@ describe('softDeleteOnPruningCycle()', () => { await createSuccessfulExecution(workflow), ]; - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -132,7 +132,7 @@ describe('softDeleteOnPruningCycle()', () => { await createSuccessfulExecution(workflow), ]; - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -150,7 +150,7 @@ describe('softDeleteOnPruningCycle()', () => { await annotateExecution(executions[0].id, { vote: 'up' }, [workflow.id]); - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -163,8 +163,8 @@ describe('softDeleteOnPruningCycle()', () => { describe('when EXECUTIONS_DATA_MAX_AGE is set', () => { beforeAll(() => { - globalConfig.pruning.maxAge = 1; - globalConfig.pruning.maxCount = 0; + pruningConfig.maxAge = 1; + pruningConfig.maxCount = 0; }); test('should mark as deleted based on EXECUTIONS_DATA_MAX_AGE', async () => { @@ -179,7 +179,7 @@ describe('softDeleteOnPruningCycle()', () => { ), ]; - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -203,7 +203,7 @@ describe('softDeleteOnPruningCycle()', () => { await createSuccessfulExecution(workflow), ]; - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -221,7 +221,7 @@ describe('softDeleteOnPruningCycle()', () => { ])('should prune %s executions', async (status, attributes) => { const execution = await createExecution({ status, ...attributes }, workflow); - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -239,7 +239,7 @@ describe('softDeleteOnPruningCycle()', () => { await createSuccessfulExecution(workflow), ]; - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ @@ -266,7 +266,7 @@ describe('softDeleteOnPruningCycle()', () => { await annotateExecution(executions[0].id, { vote: 'up' }, [workflow.id]); - await pruningService.softDeleteOnPruningCycle(); + await pruningService.softDelete(); const result = await findAllExecutions(); expect(result).toEqual([ diff --git a/packages/cli/test/integration/runners/task-runner-module.external.test.ts b/packages/cli/test/integration/runners/task-runner-module.external.test.ts index e8a7e54f1a..4974abfb39 100644 --- a/packages/cli/test/integration/runners/task-runner-module.external.test.ts +++ b/packages/cli/test/integration/runners/task-runner-module.external.test.ts @@ -18,14 +18,14 @@ describe('TaskRunnerModule in external mode', () => { describe('start', () => { it('should throw if the task runner is disabled', async () => { - runnerConfig.disabled = true; + runnerConfig.enabled = false; // Act await expect(module.start()).rejects.toThrow('Task runner is disabled'); }); it('should start the task runner', async () => { - runnerConfig.disabled = false; + runnerConfig.enabled = true; // Act await module.start(); diff --git a/packages/cli/test/integration/runners/task-runner-module.internal.test.ts b/packages/cli/test/integration/runners/task-runner-module.internal.test.ts index 922f7fee4b..444d576e87 100644 --- a/packages/cli/test/integration/runners/task-runner-module.internal.test.ts +++ b/packages/cli/test/integration/runners/task-runner-module.internal.test.ts @@ -18,14 +18,14 @@ describe('TaskRunnerModule in internal_childprocess mode', () => { describe('start', () => { it('should throw if the task runner is disabled', async () => { - runnerConfig.disabled = true; + runnerConfig.enabled = false; // Act await expect(module.start()).rejects.toThrow('Task runner is disabled'); }); it('should start the task runner', async () => { - runnerConfig.disabled = false; + runnerConfig.enabled = true; // Act await module.start(); diff --git a/packages/cli/test/integration/runners/task-runner-process.test.ts b/packages/cli/test/integration/runners/task-runner-process.test.ts index 219fbc8813..8c5289a5c7 100644 --- a/packages/cli/test/integration/runners/task-runner-process.test.ts +++ b/packages/cli/test/integration/runners/task-runner-process.test.ts @@ -10,7 +10,7 @@ import { retryUntil } from '@test-integration/retry-until'; describe('TaskRunnerProcess', () => { const authToken = 'token'; const runnerConfig = Container.get(TaskRunnersConfig); - runnerConfig.disabled = false; + runnerConfig.enabled = true; runnerConfig.mode = 'internal_childprocess'; runnerConfig.authToken = authToken; runnerConfig.port = 0; // Use any port diff --git a/packages/core/package.json b/packages/core/package.json index 32fa66a297..8628aa92ef 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "1.66.0", + "version": "1.67.0", "description": "Core functionality of n8n", "main": "dist/index", "types": "dist/index.d.ts", diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index e50d049f59..e645309644 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -76,7 +76,6 @@ import type { IPollFunctions, IRequestOptions, IRunExecutionData, - ISourceData, ITaskData, ITaskDataConnections, ITriggerFunctions, @@ -109,6 +108,7 @@ import type { AiEvent, ISupplyDataFunctions, WebhookType, + SchedulingFunctions, } from 'n8n-workflow'; import { NodeConnectionType, @@ -166,7 +166,14 @@ import { extractValue } from './ExtractValue'; import { InstanceSettings } from './InstanceSettings'; import type { ExtendedValidationResult, IResponseError } from './Interfaces'; // eslint-disable-next-line import/no-cycle -import { HookContext, PollContext, TriggerContext, WebhookContext } from './node-execution-context'; +import { + ExecuteSingleContext, + HookContext, + PollContext, + TriggerContext, + WebhookContext, +} from './node-execution-context'; +import { ScheduledTaskManager } from './ScheduledTaskManager'; import { getSecretsProxy } from './Secrets'; import { SSHClientsManager } from './SSHClientsManager'; @@ -3018,7 +3025,7 @@ const executionCancellationFunctions = ( }, }); -const getRequestHelperFunctions = ( +export const getRequestHelperFunctions = ( workflow: Workflow, node: INode, additionalData: IWorkflowExecuteAdditionalData, @@ -3338,11 +3345,19 @@ const getRequestHelperFunctions = ( }; }; -const getSSHTunnelFunctions = (): SSHTunnelFunctions => ({ +export const getSSHTunnelFunctions = (): SSHTunnelFunctions => ({ getSSHClient: async (credentials) => await Container.get(SSHClientsManager).getClient(credentials), }); +export const getSchedulingFunctions = (workflow: Workflow): SchedulingFunctions => { + const scheduledTaskManager = Container.get(ScheduledTaskManager); + return { + registerCron: (cronExpression, onTick) => + scheduledTaskManager.registerCron(workflow, cronExpression, onTick), + }; +}; + const getAllowedPaths = () => { const restrictFileAccessTo = process.env[RESTRICT_FILE_ACCESS_TO]; if (!restrictFileAccessTo) { @@ -3409,7 +3424,7 @@ export function isFilePathBlocked(filePath: string): boolean { return false; } -const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions => ({ +export const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions => ({ async createReadStream(filePath) { try { await fsAccess(filePath); @@ -3445,7 +3460,7 @@ const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions => }, }); -const getNodeHelperFunctions = ( +export const getNodeHelperFunctions = ( { executionId }: IWorkflowExecuteAdditionalData, workflowId: string, ): NodeHelperFunctions => ({ @@ -3453,7 +3468,7 @@ const getNodeHelperFunctions = ( await copyBinaryFile(workflowId, executionId!, filePath, fileName, mimeType), }); -const getBinaryHelperFunctions = ( +export const getBinaryHelperFunctions = ( { executionId }: IWorkflowExecuteAdditionalData, workflowId: string, ): BinaryHelperFunctions => ({ @@ -3471,7 +3486,7 @@ const getBinaryHelperFunctions = ( }, }); -const getCheckProcessedHelperFunctions = ( +export const getCheckProcessedHelperFunctions = ( workflow: Workflow, node: INode, ): DeduplicationHelperFunctions => ({ @@ -4180,145 +4195,19 @@ export function getExecuteSingleFunctions( mode: WorkflowExecuteMode, abortSignal?: AbortSignal, ): IExecuteSingleFunctions { - return ((workflow, runExecutionData, connectionInputData, inputData, node, itemIndex) => { - return { - ...getCommonWorkflowFunctions(workflow, node, additionalData), - ...executionCancellationFunctions(abortSignal), - continueOnFail: () => continueOnFail(node), - evaluateExpression: (expression: string, evaluateItemIndex: number | undefined) => { - evaluateItemIndex = evaluateItemIndex === undefined ? itemIndex : evaluateItemIndex; - return workflow.expression.resolveSimpleParameterValue( - `=${expression}`, - {}, - runExecutionData, - runIndex, - evaluateItemIndex, - node.name, - connectionInputData, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - executeData, - ); - }, - getContext(type: ContextType): IContextObject { - return NodeHelpers.getContext(runExecutionData, type, node); - }, - getCredentials: async (type) => - await getCredentials( - workflow, - node, - type, - additionalData, - mode, - executeData, - runExecutionData, - runIndex, - connectionInputData, - itemIndex, - ), - getInputData: (inputIndex = 0, inputName = 'main') => { - if (!inputData.hasOwnProperty(inputName)) { - // Return empty array because else it would throw error when nothing is connected to input - return { json: {} }; - } - - // TODO: Check if nodeType has input with that index defined - if (inputData[inputName].length < inputIndex) { - throw new ApplicationError('Could not get input index', { - extra: { inputIndex, inputName }, - }); - } - - const allItems = inputData[inputName][inputIndex]; - - if (allItems === null) { - throw new ApplicationError('Input index was not set', { - extra: { inputIndex, inputName }, - }); - } - - if (allItems[itemIndex] === null) { - throw new ApplicationError('Value of input with given index was not set', { - extra: { inputIndex, inputName, itemIndex }, - }); - } - - return allItems[itemIndex]; - }, - getInputSourceData: (inputIndex = 0, inputName = 'main') => { - if (executeData?.source === null) { - // Should never happen as n8n sets it automatically - throw new ApplicationError('Source data is missing'); - } - return executeData.source[inputName][inputIndex] as ISourceData; - }, - getItemIndex: () => itemIndex, - getMode: () => mode, - getExecuteData: () => executeData, - getNodeParameter: ( - parameterName: string, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ): NodeParameterValueType | object => { - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - node, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - executeData, - fallbackValue, - options, - ); - }, - getWorkflowDataProxy: (): IWorkflowDataProxyData => { - const dataProxy = new WorkflowDataProxy( - workflow, - runExecutionData, - runIndex, - itemIndex, - node.name, - connectionInputData, - {}, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - executeData, - ); - return dataProxy.getDataProxy(); - }, - helpers: { - createDeferredPromise, - returnJsonArray, - ...getRequestHelperFunctions( - workflow, - node, - additionalData, - runExecutionData, - connectionInputData, - ), - ...getBinaryHelperFunctions(additionalData, workflow.id), - - assertBinaryData: (propertyName, inputIndex = 0) => - assertBinaryData(inputData, node, itemIndex, propertyName, inputIndex), - getBinaryDataBuffer: async (propertyName, inputIndex = 0) => - await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex), - }, - logAiEvent: (eventName: AiEvent, msg: string) => { - return additionalData.logAiEvent(eventName, { - executionId: additionalData.executionId ?? 'unsaved-execution', - nodeName: node.name, - workflowName: workflow.name ?? 'Unnamed workflow', - nodeType: node.type, - workflowId: workflow.id ?? 'unsaved-workflow', - msg, - }); - }, - }; - })(workflow, runExecutionData, connectionInputData, inputData, node, itemIndex); + return new ExecuteSingleContext( + workflow, + node, + additionalData, + mode, + runExecutionData, + runIndex, + connectionInputData, + inputData, + itemIndex, + executeData, + abortSignal, + ); } export function getCredentialTestFunctions(): ICredentialTestFunctions { diff --git a/packages/core/src/node-execution-context/__tests__/execute-single-context.test.ts b/packages/core/src/node-execution-context/__tests__/execute-single-context.test.ts new file mode 100644 index 0000000000..dcd8509c1d --- /dev/null +++ b/packages/core/src/node-execution-context/__tests__/execute-single-context.test.ts @@ -0,0 +1,301 @@ +import { mock } from 'jest-mock-extended'; +import type { + INode, + IWorkflowExecuteAdditionalData, + IRunExecutionData, + INodeExecutionData, + ITaskDataConnections, + IExecuteData, + Workflow, + WorkflowExecuteMode, + ICredentialsHelper, + Expression, + INodeType, + INodeTypes, + OnError, + ContextType, + IContextObject, + ICredentialDataDecryptedObject, + ISourceData, +} from 'n8n-workflow'; +import { ApplicationError, NodeHelpers } from 'n8n-workflow'; + +import { ExecuteSingleContext } from '../execute-single-context'; + +describe('ExecuteSingleContext', () => { + const testCredentialType = 'testCredential'; + const nodeType = mock({ + description: { + credentials: [ + { + name: testCredentialType, + required: true, + }, + ], + properties: [ + { + name: 'testParameter', + required: true, + }, + ], + }, + }); + const nodeTypes = mock(); + const expression = mock(); + const workflow = mock({ expression, nodeTypes }); + const node = mock({ + credentials: { + [testCredentialType]: { + id: 'testCredentialId', + }, + }, + }); + node.parameters = { + testParameter: 'testValue', + }; + const credentialsHelper = mock(); + const additionalData = mock({ credentialsHelper }); + const mode: WorkflowExecuteMode = 'manual'; + const runExecutionData = mock(); + const connectionInputData = mock(); + const inputData: ITaskDataConnections = { main: [[{ json: { test: 'data' } }]] }; + const executeData = mock(); + const runIndex = 0; + const itemIndex = 0; + const abortSignal = mock(); + + const executeSingleContext = new ExecuteSingleContext( + workflow, + node, + additionalData, + mode, + runExecutionData, + runIndex, + connectionInputData, + inputData, + itemIndex, + executeData, + abortSignal, + ); + + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + describe('getExecutionCancelSignal', () => { + it('should return the abort signal', () => { + expect(executeSingleContext.getExecutionCancelSignal()).toBe(abortSignal); + }); + }); + + describe('continueOnFail', () => { + afterEach(() => { + node.onError = undefined; + node.continueOnFail = false; + }); + + it('should return false for nodes by default', () => { + expect(executeSingleContext.continueOnFail()).toEqual(false); + }); + + it('should return true if node has continueOnFail set to true', () => { + node.continueOnFail = true; + expect(executeSingleContext.continueOnFail()).toEqual(true); + }); + + test.each([ + ['continueRegularOutput', true], + ['continueErrorOutput', true], + ['stopWorkflow', false], + ])('if node has onError set to %s, it should return %s', (onError, expected) => { + node.onError = onError as OnError; + expect(executeSingleContext.continueOnFail()).toEqual(expected); + }); + }); + + describe('evaluateExpression', () => { + it('should evaluate the expression correctly', () => { + const expression = '$json.test'; + const expectedResult = 'data'; + const resolveSimpleParameterValueSpy = jest.spyOn( + workflow.expression, + 'resolveSimpleParameterValue', + ); + resolveSimpleParameterValueSpy.mockReturnValue(expectedResult); + + expect(executeSingleContext.evaluateExpression(expression, itemIndex)).toEqual( + expectedResult, + ); + + expect(resolveSimpleParameterValueSpy).toHaveBeenCalledWith( + `=${expression}`, + {}, + runExecutionData, + runIndex, + itemIndex, + node.name, + connectionInputData, + mode, + expect.objectContaining({}), + executeData, + ); + + resolveSimpleParameterValueSpy.mockRestore(); + }); + }); + + describe('getContext', () => { + it('should return the context object', () => { + const contextType: ContextType = 'node'; + const expectedContext = mock(); + const getContextSpy = jest.spyOn(NodeHelpers, 'getContext'); + getContextSpy.mockReturnValue(expectedContext); + + expect(executeSingleContext.getContext(contextType)).toEqual(expectedContext); + + expect(getContextSpy).toHaveBeenCalledWith(runExecutionData, contextType, node); + + getContextSpy.mockRestore(); + }); + }); + + describe('getInputData', () => { + const inputIndex = 0; + const inputName = 'main'; + + afterEach(() => { + inputData[inputName] = [[{ json: { test: 'data' } }]]; + }); + + it('should return the input data correctly', () => { + const expectedData = { json: { test: 'data' } }; + + expect(executeSingleContext.getInputData(inputIndex, inputName)).toEqual(expectedData); + }); + + it('should return an empty object if the input name does not exist', () => { + const inputName = 'nonExistent'; + const expectedData = { json: {} }; + + expect(executeSingleContext.getInputData(inputIndex, inputName)).toEqual(expectedData); + }); + + it('should throw an error if the input index is out of range', () => { + const inputIndex = 1; + + expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow( + ApplicationError, + ); + }); + + it('should throw an error if the input index was not set', () => { + inputData.main[inputIndex] = null; + + expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow( + ApplicationError, + ); + }); + + it('should throw an error if the value of input with given index was not set', () => { + delete inputData.main[inputIndex]![itemIndex]; + + expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow( + ApplicationError, + ); + }); + }); + + describe('getItemIndex', () => { + it('should return the item index correctly', () => { + expect(executeSingleContext.getItemIndex()).toEqual(itemIndex); + }); + }); + + describe('getNodeParameter', () => { + beforeEach(() => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + expression.getParameterValue.mockImplementation((value) => value); + }); + + it('should return parameter value when it exists', () => { + const parameter = executeSingleContext.getNodeParameter('testParameter'); + + expect(parameter).toBe('testValue'); + }); + + it('should return the fallback value when the parameter does not exist', () => { + const parameter = executeSingleContext.getNodeParameter('otherParameter', 'fallback'); + + expect(parameter).toBe('fallback'); + }); + }); + + describe('getCredentials', () => { + it('should get decrypted credentials', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' }); + + const credentials = + await executeSingleContext.getCredentials( + testCredentialType, + ); + + expect(credentials).toEqual({ secret: 'token' }); + }); + }); + + describe('getExecuteData', () => { + it('should return the execute data correctly', () => { + expect(executeSingleContext.getExecuteData()).toEqual(executeData); + }); + }); + + describe('getWorkflowDataProxy', () => { + it('should return the workflow data proxy correctly', () => { + const workflowDataProxy = executeSingleContext.getWorkflowDataProxy(); + expect(workflowDataProxy.isProxy).toBe(true); + expect(Object.keys(workflowDataProxy.$input)).toEqual([ + 'all', + 'context', + 'first', + 'item', + 'last', + 'params', + ]); + }); + }); + + describe('getInputSourceData', () => { + it('should return the input source data correctly', () => { + const inputSourceData = mock(); + executeData.source = { main: [inputSourceData] }; + + expect(executeSingleContext.getInputSourceData()).toEqual(inputSourceData); + }); + + it('should throw an error if the source data is missing', () => { + executeData.source = null; + + expect(() => executeSingleContext.getInputSourceData()).toThrow(ApplicationError); + }); + }); + + describe('logAiEvent', () => { + it('should log the AI event correctly', () => { + const eventName = 'ai-tool-called'; + const msg = 'test message'; + + executeSingleContext.logAiEvent(eventName, msg); + + expect(additionalData.logAiEvent).toHaveBeenCalledWith(eventName, { + executionId: additionalData.executionId, + nodeName: node.name, + workflowName: workflow.name, + nodeType: node.type, + workflowId: workflow.id, + msg, + }); + }); + }); +}); diff --git a/packages/core/src/node-execution-context/execute-single-context.ts b/packages/core/src/node-execution-context/execute-single-context.ts new file mode 100644 index 0000000000..2b03a81974 --- /dev/null +++ b/packages/core/src/node-execution-context/execute-single-context.ts @@ -0,0 +1,218 @@ +import type { + ICredentialDataDecryptedObject, + IGetNodeParameterOptions, + INode, + INodeExecutionData, + IRunExecutionData, + IExecuteSingleFunctions, + IWorkflowExecuteAdditionalData, + Workflow, + WorkflowExecuteMode, + ITaskDataConnections, + IExecuteData, + ContextType, + AiEvent, + ISourceData, +} from 'n8n-workflow'; +import { + ApplicationError, + createDeferredPromise, + NodeHelpers, + WorkflowDataProxy, +} from 'n8n-workflow'; + +// eslint-disable-next-line import/no-cycle +import { + assertBinaryData, + continueOnFail, + getAdditionalKeys, + getBinaryDataBuffer, + getBinaryHelperFunctions, + getCredentials, + getNodeParameter, + getRequestHelperFunctions, + returnJsonArray, +} from '@/NodeExecuteFunctions'; + +import { NodeExecutionContext } from './node-execution-context'; + +export class ExecuteSingleContext extends NodeExecutionContext implements IExecuteSingleFunctions { + readonly helpers: IExecuteSingleFunctions['helpers']; + + constructor( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + private readonly runExecutionData: IRunExecutionData, + private readonly runIndex: number, + private readonly connectionInputData: INodeExecutionData[], + private readonly inputData: ITaskDataConnections, + private readonly itemIndex: number, + private readonly executeData: IExecuteData, + private readonly abortSignal?: AbortSignal, + ) { + super(workflow, node, additionalData, mode); + + this.helpers = { + createDeferredPromise, + returnJsonArray, + ...getRequestHelperFunctions( + workflow, + node, + additionalData, + runExecutionData, + connectionInputData, + ), + ...getBinaryHelperFunctions(additionalData, workflow.id), + + assertBinaryData: (propertyName, inputIndex = 0) => + assertBinaryData(inputData, node, itemIndex, propertyName, inputIndex), + getBinaryDataBuffer: async (propertyName, inputIndex = 0) => + await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex), + }; + } + + getExecutionCancelSignal() { + return this.abortSignal; + } + + onExecutionCancellation(handler: () => unknown) { + const fn = () => { + this.abortSignal?.removeEventListener('abort', fn); + handler(); + }; + this.abortSignal?.addEventListener('abort', fn); + } + + continueOnFail() { + return continueOnFail(this.node); + } + + evaluateExpression(expression: string, evaluateItemIndex: number | undefined) { + evaluateItemIndex = evaluateItemIndex ?? this.itemIndex; + return this.workflow.expression.resolveSimpleParameterValue( + `=${expression}`, + {}, + this.runExecutionData, + this.runIndex, + evaluateItemIndex, + this.node.name, + this.connectionInputData, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData), + this.executeData, + ); + } + + getContext(type: ContextType) { + return NodeHelpers.getContext(this.runExecutionData, type, this.node); + } + + getInputData(inputIndex = 0, inputName = 'main') { + if (!this.inputData.hasOwnProperty(inputName)) { + // Return empty array because else it would throw error when nothing is connected to input + return { json: {} }; + } + + // TODO: Check if nodeType has input with that index defined + if (this.inputData[inputName].length < inputIndex) { + throw new ApplicationError('Could not get input index', { + extra: { inputIndex, inputName }, + }); + } + + const allItems = this.inputData[inputName][inputIndex]; + + if (allItems === null || allItems === undefined) { + throw new ApplicationError('Input index was not set', { + extra: { inputIndex, inputName }, + }); + } + + const data = allItems[this.itemIndex]; + if (data === null || data === undefined) { + throw new ApplicationError('Value of input with given index was not set', { + extra: { inputIndex, inputName, itemIndex: this.itemIndex }, + }); + } + + return data; + } + + getItemIndex() { + return this.itemIndex; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + getNodeParameter(parameterName: string, fallbackValue?: any, options?: IGetNodeParameterOptions) { + return getNodeParameter( + this.workflow, + this.runExecutionData, + this.runIndex, + this.connectionInputData, + this.node, + parameterName, + this.itemIndex, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData), + this.executeData, + fallbackValue, + options, + ); + } + + // TODO: extract out in a BaseExecutionContext + async getCredentials(type: string) { + return await getCredentials( + this.workflow, + this.node, + type, + this.additionalData, + this.mode, + this.executeData, + this.runExecutionData, + this.runIndex, + this.connectionInputData, + this.itemIndex, + ); + } + + getExecuteData() { + return this.executeData; + } + + getWorkflowDataProxy() { + return new WorkflowDataProxy( + this.workflow, + this.runExecutionData, + this.runIndex, + this.itemIndex, + this.node.name, + this.connectionInputData, + {}, + this.mode, + getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData), + this.executeData, + ).getDataProxy(); + } + + getInputSourceData(inputIndex = 0, inputName = 'main'): ISourceData { + if (this.executeData?.source === null) { + // Should never happen as n8n sets it automatically + throw new ApplicationError('Source data is missing'); + } + return this.executeData.source[inputName][inputIndex] as ISourceData; + } + + logAiEvent(eventName: AiEvent, msg: string) { + return this.additionalData.logAiEvent(eventName, { + executionId: this.additionalData.executionId ?? 'unsaved-execution', + nodeName: this.node.name, + workflowName: this.workflow.name ?? 'Unnamed workflow', + nodeType: this.node.type, + workflowId: this.workflow.id ?? 'unsaved-workflow', + msg, + }); + } +} diff --git a/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts deleted file mode 100644 index 302713954f..0000000000 --- a/packages/core/src/node-execution-context/helpers/__tests__/binary-helpers.test.ts +++ /dev/null @@ -1,136 +0,0 @@ -import FileType from 'file-type'; -import { IncomingMessage, type ClientRequest } from 'http'; -import { mock } from 'jest-mock-extended'; -import type { Workflow, IWorkflowExecuteAdditionalData, IBinaryData } from 'n8n-workflow'; -import type { Socket } from 'net'; -import { Container } from 'typedi'; - -import { BinaryDataService } from '@/BinaryData/BinaryData.service'; - -import { BinaryHelpers } from '../binary-helpers'; - -jest.mock('file-type'); - -describe('BinaryHelpers', () => { - let binaryDataService = mock(); - Container.set(BinaryDataService, binaryDataService); - const workflow = mock({ id: '123' }); - const additionalData = mock({ executionId: '456' }); - const binaryHelpers = new BinaryHelpers(workflow, additionalData); - - beforeEach(() => { - jest.clearAllMocks(); - - binaryDataService.store.mockImplementation( - async (_workflowId, _executionId, _buffer, value) => value, - ); - }); - - describe('getBinaryPath', () => { - it('should call getPath method of BinaryDataService', () => { - binaryHelpers.getBinaryPath('mock-binary-data-id'); - expect(binaryDataService.getPath).toHaveBeenCalledWith('mock-binary-data-id'); - }); - }); - - describe('getBinaryMetadata', () => { - it('should call getMetadata method of BinaryDataService', async () => { - await binaryHelpers.getBinaryMetadata('mock-binary-data-id'); - expect(binaryDataService.getMetadata).toHaveBeenCalledWith('mock-binary-data-id'); - }); - }); - - describe('getBinaryStream', () => { - it('should call getStream method of BinaryDataService', async () => { - await binaryHelpers.getBinaryStream('mock-binary-data-id'); - expect(binaryDataService.getAsStream).toHaveBeenCalledWith('mock-binary-data-id', undefined); - }); - }); - - describe('prepareBinaryData', () => { - it('should guess the mime type and file extension if not provided', async () => { - const buffer = Buffer.from('test'); - const fileTypeData = { mime: 'application/pdf', ext: 'pdf' }; - (FileType.fromBuffer as jest.Mock).mockResolvedValue(fileTypeData); - - const binaryData = await binaryHelpers.prepareBinaryData(buffer); - - expect(binaryData.mimeType).toEqual('application/pdf'); - expect(binaryData.fileExtension).toEqual('pdf'); - expect(binaryData.fileType).toEqual('pdf'); - expect(binaryData.fileName).toBeUndefined(); - expect(binaryData.directory).toBeUndefined(); - expect(binaryDataService.store).toHaveBeenCalledWith( - workflow.id, - additionalData.executionId!, - buffer, - binaryData, - ); - }); - - it('should use the provided mime type and file extension if provided', async () => { - const buffer = Buffer.from('test'); - const mimeType = 'application/octet-stream'; - - const binaryData = await binaryHelpers.prepareBinaryData(buffer, undefined, mimeType); - - expect(binaryData.mimeType).toEqual(mimeType); - expect(binaryData.fileExtension).toEqual('bin'); - expect(binaryData.fileType).toBeUndefined(); - expect(binaryData.fileName).toBeUndefined(); - expect(binaryData.directory).toBeUndefined(); - expect(binaryDataService.store).toHaveBeenCalledWith( - workflow.id, - additionalData.executionId!, - buffer, - binaryData, - ); - }); - - const mockSocket = mock({ readableHighWaterMark: 0 }); - - it('should use the contentDisposition.filename, responseUrl, and contentType properties to set the fileName, directory, and mimeType properties of the binaryData object', async () => { - const incomingMessage = new IncomingMessage(mockSocket); - incomingMessage.contentDisposition = { filename: 'test.txt', type: 'attachment' }; - incomingMessage.contentType = 'text/plain'; - incomingMessage.responseUrl = 'https://example.com/test.txt'; - - const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage); - - expect(binaryData.fileName).toEqual('test.txt'); - expect(binaryData.fileType).toEqual('text'); - expect(binaryData.directory).toBeUndefined(); - expect(binaryData.mimeType).toEqual('text/plain'); - expect(binaryData.fileExtension).toEqual('txt'); - }); - - it('should use the req.path property to set the fileName property of the binaryData object if contentDisposition.filename and responseUrl are not provided', async () => { - const incomingMessage = new IncomingMessage(mockSocket); - incomingMessage.contentType = 'text/plain'; - incomingMessage.req = mock({ path: '/test.txt' }); - - const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage); - - expect(binaryData.fileName).toEqual('test.txt'); - expect(binaryData.directory).toBeUndefined(); - expect(binaryData.mimeType).toEqual('text/plain'); - expect(binaryData.fileExtension).toEqual('txt'); - }); - }); - - describe('setBinaryDataBuffer', () => { - it('should call store method of BinaryDataService', async () => { - const binaryData = mock(); - const bufferOrStream = mock(); - - await binaryHelpers.setBinaryDataBuffer(binaryData, bufferOrStream); - - expect(binaryDataService.store).toHaveBeenCalledWith( - workflow.id, - additionalData.executionId, - bufferOrStream, - binaryData, - ); - }); - }); -}); diff --git a/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts deleted file mode 100644 index 06abae8204..0000000000 --- a/packages/core/src/node-execution-context/helpers/__tests__/scheduling-helpers.test.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { mock } from 'jest-mock-extended'; -import type { Workflow } from 'n8n-workflow'; -import { Container } from 'typedi'; - -import { ScheduledTaskManager } from '@/ScheduledTaskManager'; - -import { SchedulingHelpers } from '../scheduling-helpers'; - -describe('SchedulingHelpers', () => { - const scheduledTaskManager = mock(); - Container.set(ScheduledTaskManager, scheduledTaskManager); - const workflow = mock(); - const schedulingHelpers = new SchedulingHelpers(workflow); - - beforeEach(() => { - jest.clearAllMocks(); - }); - - describe('registerCron', () => { - it('should call registerCron method of ScheduledTaskManager', () => { - const cronExpression = '* * * * * *'; - const onTick = jest.fn(); - - schedulingHelpers.registerCron(cronExpression, onTick); - - expect(scheduledTaskManager.registerCron).toHaveBeenCalledWith( - workflow, - cronExpression, - onTick, - ); - }); - }); -}); diff --git a/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts b/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts deleted file mode 100644 index cbe6916eea..0000000000 --- a/packages/core/src/node-execution-context/helpers/__tests__/ssh-tunnel-helpers.test.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { mock } from 'jest-mock-extended'; -import type { SSHCredentials } from 'n8n-workflow'; -import type { Client } from 'ssh2'; -import { Container } from 'typedi'; - -import { SSHClientsManager } from '@/SSHClientsManager'; - -import { SSHTunnelHelpers } from '../ssh-tunnel-helpers'; - -describe('SSHTunnelHelpers', () => { - const sshClientsManager = mock(); - Container.set(SSHClientsManager, sshClientsManager); - const sshTunnelHelpers = new SSHTunnelHelpers(); - - beforeEach(() => { - jest.clearAllMocks(); - }); - - describe('getSSHClient', () => { - const credentials = mock(); - - it('should call SSHClientsManager.getClient with the given credentials', async () => { - const mockClient = mock(); - sshClientsManager.getClient.mockResolvedValue(mockClient); - - const client = await sshTunnelHelpers.getSSHClient(credentials); - - expect(sshClientsManager.getClient).toHaveBeenCalledWith(credentials); - expect(client).toBe(mockClient); - }); - }); -}); diff --git a/packages/core/src/node-execution-context/helpers/binary-helpers.ts b/packages/core/src/node-execution-context/helpers/binary-helpers.ts deleted file mode 100644 index a15c59139b..0000000000 --- a/packages/core/src/node-execution-context/helpers/binary-helpers.ts +++ /dev/null @@ -1,148 +0,0 @@ -import FileType from 'file-type'; -import { IncomingMessage } from 'http'; -import MimeTypes from 'mime-types'; -import { ApplicationError, fileTypeFromMimeType } from 'n8n-workflow'; -import type { - BinaryHelperFunctions, - IWorkflowExecuteAdditionalData, - Workflow, - IBinaryData, -} from 'n8n-workflow'; -import path from 'path'; -import type { Readable } from 'stream'; -import Container from 'typedi'; - -import { BinaryDataService } from '@/BinaryData/BinaryData.service'; -import { binaryToBuffer } from '@/BinaryData/utils'; -// eslint-disable-next-line import/no-cycle -import { binaryToString } from '@/NodeExecuteFunctions'; - -export class BinaryHelpers { - private readonly binaryDataService = Container.get(BinaryDataService); - - constructor( - private readonly workflow: Workflow, - private readonly additionalData: IWorkflowExecuteAdditionalData, - ) {} - - get exported(): BinaryHelperFunctions { - return { - getBinaryPath: this.getBinaryPath.bind(this), - getBinaryMetadata: this.getBinaryMetadata.bind(this), - getBinaryStream: this.getBinaryStream.bind(this), - binaryToBuffer, - binaryToString, - prepareBinaryData: this.prepareBinaryData.bind(this), - setBinaryDataBuffer: this.setBinaryDataBuffer.bind(this), - copyBinaryFile: this.copyBinaryFile.bind(this), - }; - } - - getBinaryPath(binaryDataId: string) { - return this.binaryDataService.getPath(binaryDataId); - } - - async getBinaryMetadata(binaryDataId: string) { - return await this.binaryDataService.getMetadata(binaryDataId); - } - - async getBinaryStream(binaryDataId: string, chunkSize?: number) { - return await this.binaryDataService.getAsStream(binaryDataId, chunkSize); - } - - // eslint-disable-next-line complexity - async prepareBinaryData(binaryData: Buffer | Readable, filePath?: string, mimeType?: string) { - let fileExtension: string | undefined; - if (binaryData instanceof IncomingMessage) { - if (!filePath) { - try { - const { responseUrl } = binaryData; - filePath = - binaryData.contentDisposition?.filename ?? - ((responseUrl && new URL(responseUrl).pathname) ?? binaryData.req?.path)?.slice(1); - } catch {} - } - if (!mimeType) { - mimeType = binaryData.contentType; - } - } - - if (!mimeType) { - // If no mime type is given figure it out - - if (filePath) { - // Use file path to guess mime type - const mimeTypeLookup = MimeTypes.lookup(filePath); - if (mimeTypeLookup) { - mimeType = mimeTypeLookup; - } - } - - if (!mimeType) { - if (Buffer.isBuffer(binaryData)) { - // Use buffer to guess mime type - const fileTypeData = await FileType.fromBuffer(binaryData); - if (fileTypeData) { - mimeType = fileTypeData.mime; - fileExtension = fileTypeData.ext; - } - } else if (binaryData instanceof IncomingMessage) { - mimeType = binaryData.headers['content-type']; - } else { - // TODO: detect filetype from other kind of streams - } - } - } - - if (!fileExtension && mimeType) { - fileExtension = MimeTypes.extension(mimeType) || undefined; - } - - if (!mimeType) { - // Fall back to text - mimeType = 'text/plain'; - } - - const returnData: IBinaryData = { - mimeType, - fileType: fileTypeFromMimeType(mimeType), - fileExtension, - data: '', - }; - - if (filePath) { - if (filePath.includes('?')) { - // Remove maybe present query parameters - filePath = filePath.split('?').shift(); - } - - const filePathParts = path.parse(filePath as string); - - if (filePathParts.dir !== '') { - returnData.directory = filePathParts.dir; - } - returnData.fileName = filePathParts.base; - - // Remove the dot - const extractedFileExtension = filePathParts.ext.slice(1); - if (extractedFileExtension) { - returnData.fileExtension = extractedFileExtension; - } - } - - return await this.setBinaryDataBuffer(returnData, binaryData); - } - - async setBinaryDataBuffer(binaryData: IBinaryData, bufferOrStream: Buffer | Readable) { - return await this.binaryDataService.store( - this.workflow.id, - this.additionalData.executionId!, - bufferOrStream, - binaryData, - ); - } - - async copyBinaryFile(): Promise { - throw new ApplicationError('`copyBinaryFile` has been removed. Please upgrade this node.'); - } -} diff --git a/packages/core/src/node-execution-context/helpers/request-helpers.ts b/packages/core/src/node-execution-context/helpers/request-helpers.ts deleted file mode 100644 index 2c5eb19290..0000000000 --- a/packages/core/src/node-execution-context/helpers/request-helpers.ts +++ /dev/null @@ -1,381 +0,0 @@ -import { createHash } from 'crypto'; -import { pick } from 'lodash'; -import { jsonParse, NodeOperationError, sleep } from 'n8n-workflow'; -import type { - RequestHelperFunctions, - IAdditionalCredentialOptions, - IAllExecuteFunctions, - IExecuteData, - IHttpRequestOptions, - IN8nHttpFullResponse, - IN8nHttpResponse, - INode, - INodeExecutionData, - IOAuth2Options, - IRequestOptions, - IRunExecutionData, - IWorkflowDataProxyAdditionalKeys, - IWorkflowExecuteAdditionalData, - NodeParameterValueType, - PaginationOptions, - Workflow, - WorkflowExecuteMode, -} from 'n8n-workflow'; -import { Readable } from 'stream'; - -// eslint-disable-next-line import/no-cycle -import { - applyPaginationRequestData, - binaryToString, - httpRequest, - httpRequestWithAuthentication, - proxyRequestToAxios, - requestOAuth1, - requestOAuth2, - requestWithAuthentication, - validateUrl, -} from '@/NodeExecuteFunctions'; - -export class RequestHelpers { - constructor( - private readonly context: IAllExecuteFunctions, - private readonly workflow: Workflow, - private readonly node: INode, - private readonly additionalData: IWorkflowExecuteAdditionalData, - private readonly runExecutionData: IRunExecutionData | null = null, - private readonly connectionInputData: INodeExecutionData[] = [], - ) {} - - get exported(): RequestHelperFunctions { - return { - httpRequest, - httpRequestWithAuthentication: this.httpRequestWithAuthentication.bind(this), - requestWithAuthenticationPaginated: this.requestWithAuthenticationPaginated.bind(this), - request: this.request.bind(this), - requestWithAuthentication: this.requestWithAuthentication.bind(this), - requestOAuth1: this.requestOAuth1.bind(this), - requestOAuth2: this.requestOAuth2.bind(this), - }; - } - - get httpRequest() { - return httpRequest; - } - - async httpRequestWithAuthentication( - credentialsType: string, - requestOptions: IHttpRequestOptions, - additionalCredentialOptions?: IAdditionalCredentialOptions, - ) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return await httpRequestWithAuthentication.call( - this.context, - credentialsType, - requestOptions, - this.workflow, - this.node, - this.additionalData, - additionalCredentialOptions, - ); - } - - // eslint-disable-next-line complexity - async requestWithAuthenticationPaginated( - requestOptions: IRequestOptions, - itemIndex: number, - paginationOptions: PaginationOptions, - credentialsType?: string, - additionalCredentialOptions?: IAdditionalCredentialOptions, - ): Promise { - const responseData = []; - if (!requestOptions.qs) { - requestOptions.qs = {}; - } - requestOptions.resolveWithFullResponse = true; - requestOptions.simple = false; - - let tempResponseData: IN8nHttpFullResponse; - let makeAdditionalRequest: boolean; - let paginateRequestData: PaginationOptions['request']; - - const runIndex = 0; - - const additionalKeys = { - $request: requestOptions, - $response: {} as IN8nHttpFullResponse, - $version: this.node.typeVersion, - $pageCount: 0, - }; - - const executeData: IExecuteData = { - data: {}, - node: this.node, - source: null, - }; - - const hashData = { - identicalCount: 0, - previousLength: 0, - previousHash: '', - }; - - do { - paginateRequestData = this.getResolvedValue( - paginationOptions.request as unknown as NodeParameterValueType, - itemIndex, - runIndex, - executeData, - additionalKeys, - false, - ) as object as PaginationOptions['request']; - - const tempRequestOptions = applyPaginationRequestData(requestOptions, paginateRequestData); - - if (!validateUrl(tempRequestOptions.uri as string)) { - throw new NodeOperationError( - this.node, - `'${paginateRequestData.url}' is not a valid URL.`, - { - itemIndex, - runIndex, - type: 'invalid_url', - }, - ); - } - - if (credentialsType) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - tempResponseData = await this.requestWithAuthentication( - credentialsType, - tempRequestOptions, - additionalCredentialOptions, - ); - } else { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - tempResponseData = await this.request(tempRequestOptions); - } - - const newResponse: IN8nHttpFullResponse = Object.assign( - { - body: {}, - headers: {}, - statusCode: 0, - }, - pick(tempResponseData, ['body', 'headers', 'statusCode']), - ); - - let contentBody: Exclude; - - if (newResponse.body instanceof Readable && paginationOptions.binaryResult !== true) { - // Keep the original string version that we can use it to hash if needed - contentBody = await binaryToString(newResponse.body as Buffer | Readable); - - const responseContentType = newResponse.headers['content-type']?.toString() ?? ''; - if (responseContentType.includes('application/json')) { - newResponse.body = jsonParse(contentBody, { fallbackValue: {} }); - } else { - newResponse.body = contentBody; - } - tempResponseData.__bodyResolved = true; - tempResponseData.body = newResponse.body; - } else { - contentBody = newResponse.body; - } - - if (paginationOptions.binaryResult !== true || tempResponseData.headers.etag) { - // If the data is not binary (and so not a stream), or an etag is present, - // we check via etag or hash if identical data is received - - let contentLength = 0; - if ('content-length' in tempResponseData.headers) { - contentLength = parseInt(tempResponseData.headers['content-length'] as string) || 0; - } - - if (hashData.previousLength === contentLength) { - let hash: string; - if (tempResponseData.headers.etag) { - // If an etag is provided, we use it as "hash" - hash = tempResponseData.headers.etag as string; - } else { - // If there is no etag, we calculate a hash from the data in the body - if (typeof contentBody !== 'string') { - contentBody = JSON.stringify(contentBody); - } - hash = createHash('md5').update(contentBody).digest('base64'); - } - - if (hashData.previousHash === hash) { - hashData.identicalCount += 1; - if (hashData.identicalCount > 2) { - // Length was identical 5x and hash 3x - throw new NodeOperationError( - this.node, - 'The returned response was identical 5x, so requests got stopped', - { - itemIndex, - description: - 'Check if "Pagination Completed When" has been configured correctly.', - }, - ); - } - } else { - hashData.identicalCount = 0; - } - hashData.previousHash = hash; - } else { - hashData.identicalCount = 0; - } - hashData.previousLength = contentLength; - } - - responseData.push(tempResponseData); - - additionalKeys.$response = newResponse; - additionalKeys.$pageCount = additionalKeys.$pageCount + 1; - - const maxRequests = this.getResolvedValue( - paginationOptions.maxRequests, - itemIndex, - runIndex, - executeData, - additionalKeys, - false, - ) as number; - - if (maxRequests && additionalKeys.$pageCount >= maxRequests) { - break; - } - - makeAdditionalRequest = this.getResolvedValue( - paginationOptions.continue, - itemIndex, - runIndex, - executeData, - additionalKeys, - false, - ) as boolean; - - if (makeAdditionalRequest) { - if (paginationOptions.requestInterval) { - const requestInterval = this.getResolvedValue( - paginationOptions.requestInterval, - itemIndex, - runIndex, - executeData, - additionalKeys, - false, - ) as number; - - await sleep(requestInterval); - } - if (tempResponseData.statusCode < 200 || tempResponseData.statusCode >= 300) { - // We have it configured to let all requests pass no matter the response code - // via "requestOptions.simple = false" to not by default fail if it is for example - // configured to stop on 404 response codes. For that reason we have to throw here - // now an error manually if the response code is not a success one. - let data = tempResponseData.body; - if (data instanceof Readable && paginationOptions.binaryResult !== true) { - data = await binaryToString(data as Buffer | Readable); - } else if (typeof data === 'object') { - data = JSON.stringify(data); - } - - throw Object.assign(new Error(`${tempResponseData.statusCode} - "${data?.toString()}"`), { - statusCode: tempResponseData.statusCode, - error: data, - isAxiosError: true, - response: { - headers: tempResponseData.headers, - status: tempResponseData.statusCode, - statusText: tempResponseData.statusMessage, - }, - }); - } - } - } while (makeAdditionalRequest); - - return responseData; - } - - async request(uriOrObject: string | IRequestOptions, options?: IRequestOptions) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return await proxyRequestToAxios( - this.workflow, - this.additionalData, - this.node, - uriOrObject, - options, - ); - } - - async requestWithAuthentication( - credentialsType: string, - requestOptions: IRequestOptions, - additionalCredentialOptions?: IAdditionalCredentialOptions, - itemIndex?: number, - ) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return await requestWithAuthentication.call( - this.context, - credentialsType, - requestOptions, - this.workflow, - this.node, - this.additionalData, - additionalCredentialOptions, - itemIndex, - ); - } - - async requestOAuth1(credentialsType: string, requestOptions: IRequestOptions) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return await requestOAuth1.call(this.context, credentialsType, requestOptions); - } - - async requestOAuth2( - credentialsType: string, - requestOptions: IRequestOptions, - oAuth2Options?: IOAuth2Options, - ) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return await requestOAuth2.call( - this.context, - credentialsType, - requestOptions, - this.node, - this.additionalData, - oAuth2Options, - ); - } - - private getResolvedValue( - parameterValue: NodeParameterValueType, - itemIndex: number, - runIndex: number, - executeData: IExecuteData, - additionalKeys?: IWorkflowDataProxyAdditionalKeys, - returnObjectAsString = false, - ): NodeParameterValueType { - const mode: WorkflowExecuteMode = 'internal'; - - if ( - typeof parameterValue === 'object' || - (typeof parameterValue === 'string' && parameterValue.charAt(0) === '=') - ) { - return this.workflow.expression.getParameterValue( - parameterValue, - this.runExecutionData, - runIndex, - itemIndex, - this.node.name, - this.connectionInputData, - mode, - additionalKeys ?? {}, - executeData, - returnObjectAsString, - ); - } - - return parameterValue; - } -} diff --git a/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts b/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts deleted file mode 100644 index e193f2beaf..0000000000 --- a/packages/core/src/node-execution-context/helpers/scheduling-helpers.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { CronExpression, Workflow, SchedulingFunctions } from 'n8n-workflow'; -import { Container } from 'typedi'; - -import { ScheduledTaskManager } from '@/ScheduledTaskManager'; - -export class SchedulingHelpers { - private readonly scheduledTaskManager = Container.get(ScheduledTaskManager); - - constructor(private readonly workflow: Workflow) {} - - get exported(): SchedulingFunctions { - return { - registerCron: this.registerCron.bind(this), - }; - } - - registerCron(cronExpression: CronExpression, onTick: () => void) { - this.scheduledTaskManager.registerCron(this.workflow, cronExpression, onTick); - } -} diff --git a/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts b/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts deleted file mode 100644 index f44df0e166..0000000000 --- a/packages/core/src/node-execution-context/helpers/ssh-tunnel-helpers.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { SSHCredentials, SSHTunnelFunctions } from 'n8n-workflow'; -import { Container } from 'typedi'; - -import { SSHClientsManager } from '@/SSHClientsManager'; - -export class SSHTunnelHelpers { - private readonly sshClientsManager = Container.get(SSHClientsManager); - - get exported(): SSHTunnelFunctions { - return { - getSSHClient: this.getSSHClient.bind(this), - }; - } - - async getSSHClient(credentials: SSHCredentials) { - return await this.sshClientsManager.getClient(credentials); - } -} diff --git a/packages/core/src/node-execution-context/hook-context.ts b/packages/core/src/node-execution-context/hook-context.ts index 7cc6567779..5585d6b8f3 100644 --- a/packages/core/src/node-execution-context/hook-context.ts +++ b/packages/core/src/node-execution-context/hook-context.ts @@ -21,10 +21,10 @@ import { getCredentials, getNodeParameter, getNodeWebhookUrl, + getRequestHelperFunctions, getWebhookDescription, } from '@/NodeExecuteFunctions'; -import { RequestHelpers } from './helpers/request-helpers'; import { NodeExecutionContext } from './node-execution-context'; export class HookContext extends NodeExecutionContext implements IHookFunctions { @@ -40,7 +40,7 @@ export class HookContext extends NodeExecutionContext implements IHookFunctions ) { super(workflow, node, additionalData, mode); - this.helpers = new RequestHelpers(this, workflow, node, additionalData); + this.helpers = getRequestHelperFunctions(workflow, node, additionalData); } getActivationMode() { diff --git a/packages/core/src/node-execution-context/index.ts b/packages/core/src/node-execution-context/index.ts index a6397c60ce..d5c663b2ab 100644 --- a/packages/core/src/node-execution-context/index.ts +++ b/packages/core/src/node-execution-context/index.ts @@ -1,4 +1,5 @@ // eslint-disable-next-line import/no-cycle +export { ExecuteSingleContext } from './execute-single-context'; export { HookContext } from './hook-context'; export { LoadOptionsContext } from './load-options-context'; export { PollContext } from './poll-context'; diff --git a/packages/core/src/node-execution-context/load-options-context.ts b/packages/core/src/node-execution-context/load-options-context.ts index 98dd58210b..bb43d9c2e2 100644 --- a/packages/core/src/node-execution-context/load-options-context.ts +++ b/packages/core/src/node-execution-context/load-options-context.ts @@ -13,10 +13,14 @@ import type { import { extractValue } from '@/ExtractValue'; // eslint-disable-next-line import/no-cycle -import { getAdditionalKeys, getCredentials, getNodeParameter } from '@/NodeExecuteFunctions'; +import { + getAdditionalKeys, + getCredentials, + getNodeParameter, + getRequestHelperFunctions, + getSSHTunnelFunctions, +} from '@/NodeExecuteFunctions'; -import { RequestHelpers } from './helpers/request-helpers'; -import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers'; import { NodeExecutionContext } from './node-execution-context'; export class LoadOptionsContext extends NodeExecutionContext implements ILoadOptionsFunctions { @@ -31,8 +35,8 @@ export class LoadOptionsContext extends NodeExecutionContext implements ILoadOpt super(workflow, node, additionalData, 'internal'); this.helpers = { - ...new RequestHelpers(this, workflow, node, additionalData).exported, - ...new SSHTunnelHelpers().exported, + ...getSSHTunnelFunctions(), + ...getRequestHelperFunctions(workflow, node, additionalData), }; } diff --git a/packages/core/src/node-execution-context/poll-context.ts b/packages/core/src/node-execution-context/poll-context.ts index 88e8caafc8..e3c0dd0cc8 100644 --- a/packages/core/src/node-execution-context/poll-context.ts +++ b/packages/core/src/node-execution-context/poll-context.ts @@ -16,14 +16,14 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; // eslint-disable-next-line import/no-cycle import { getAdditionalKeys, + getBinaryHelperFunctions, getCredentials, getNodeParameter, + getRequestHelperFunctions, + getSchedulingFunctions, returnJsonArray, } from '@/NodeExecuteFunctions'; -import { BinaryHelpers } from './helpers/binary-helpers'; -import { RequestHelpers } from './helpers/request-helpers'; -import { SchedulingHelpers } from './helpers/scheduling-helpers'; import { NodeExecutionContext } from './node-execution-context'; const throwOnEmit = () => { @@ -51,9 +51,9 @@ export class PollContext extends NodeExecutionContext implements IPollFunctions this.helpers = { createDeferredPromise, returnJsonArray, - ...new BinaryHelpers(workflow, additionalData).exported, - ...new RequestHelpers(this, workflow, node, additionalData).exported, - ...new SchedulingHelpers(workflow).exported, + ...getRequestHelperFunctions(workflow, node, additionalData), + ...getBinaryHelperFunctions(additionalData, workflow.id), + ...getSchedulingFunctions(workflow), }; } diff --git a/packages/core/src/node-execution-context/trigger-context.ts b/packages/core/src/node-execution-context/trigger-context.ts index 8535ccfe6c..5ae6ce47df 100644 --- a/packages/core/src/node-execution-context/trigger-context.ts +++ b/packages/core/src/node-execution-context/trigger-context.ts @@ -16,15 +16,15 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; // eslint-disable-next-line import/no-cycle import { getAdditionalKeys, + getBinaryHelperFunctions, getCredentials, getNodeParameter, + getRequestHelperFunctions, + getSchedulingFunctions, + getSSHTunnelFunctions, returnJsonArray, } from '@/NodeExecuteFunctions'; -import { BinaryHelpers } from './helpers/binary-helpers'; -import { RequestHelpers } from './helpers/request-helpers'; -import { SchedulingHelpers } from './helpers/scheduling-helpers'; -import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers'; import { NodeExecutionContext } from './node-execution-context'; const throwOnEmit = () => { @@ -52,10 +52,10 @@ export class TriggerContext extends NodeExecutionContext implements ITriggerFunc this.helpers = { createDeferredPromise, returnJsonArray, - ...new BinaryHelpers(workflow, additionalData).exported, - ...new RequestHelpers(this, workflow, node, additionalData).exported, - ...new SchedulingHelpers(workflow).exported, - ...new SSHTunnelHelpers().exported, + ...getSSHTunnelFunctions(), + ...getRequestHelperFunctions(workflow, node, additionalData), + ...getBinaryHelperFunctions(additionalData, workflow.id), + ...getSchedulingFunctions(workflow), }; } diff --git a/packages/core/src/node-execution-context/webhook-context.ts b/packages/core/src/node-execution-context/webhook-context.ts index a7fa7203c8..4d3eef53e2 100644 --- a/packages/core/src/node-execution-context/webhook-context.ts +++ b/packages/core/src/node-execution-context/webhook-context.ts @@ -24,15 +24,15 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; import { copyBinaryFile, getAdditionalKeys, + getBinaryHelperFunctions, getCredentials, getInputConnectionData, getNodeParameter, getNodeWebhookUrl, + getRequestHelperFunctions, returnJsonArray, } from '@/NodeExecuteFunctions'; -import { BinaryHelpers } from './helpers/binary-helpers'; -import { RequestHelpers } from './helpers/request-helpers'; import { NodeExecutionContext } from './node-execution-context'; export class WebhookContext extends NodeExecutionContext implements IWebhookFunctions { @@ -54,8 +54,8 @@ export class WebhookContext extends NodeExecutionContext implements IWebhookFunc this.helpers = { createDeferredPromise, returnJsonArray, - ...new BinaryHelpers(workflow, additionalData).exported, - ...new RequestHelpers(this, workflow, node, additionalData).exported, + ...getRequestHelperFunctions(workflow, node, additionalData), + ...getBinaryHelperFunctions(additionalData, workflow.id), }; this.nodeHelpers = { diff --git a/packages/design-system/package.json b/packages/design-system/package.json index d2f39c3e0e..94f1cba0aa 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "1.56.0", + "version": "1.57.0", "main": "src/main.ts", "import": "src/main.ts", "scripts": { diff --git a/packages/design-system/src/components/AskAssistantChat/AskAssistantChat.vue b/packages/design-system/src/components/AskAssistantChat/AskAssistantChat.vue index 78b4ccbf51..3cd62205f6 100644 --- a/packages/design-system/src/components/AskAssistantChat/AskAssistantChat.vue +++ b/packages/design-system/src/components/AskAssistantChat/AskAssistantChat.vue @@ -317,6 +317,7 @@ async function onCopyButtonClick(content: string, e: MouseEvent) {