diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c2f569bf6..ab14dc462e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,50 @@ +# [1.63.0](https://github.com/n8n-io/n8n/compare/n8n@1.62.1...n8n@1.63.0) (2024-10-09) + + +### Bug Fixes + +* **Convert to File Node:** Convert to ICS start date defaults to now ([#11114](https://github.com/n8n-io/n8n/issues/11114)) ([1146c4e](https://github.com/n8n-io/n8n/commit/1146c4e98d8c85c15ac67fa1c3bfb731234531e3)) +* **core:** Allow loading nodes from multiple custom directories ([#11130](https://github.com/n8n-io/n8n/issues/11130)) ([1b84b0e](https://github.com/n8n-io/n8n/commit/1b84b0e5e7485d9f99d61a8ae3df49efadca0745)) +* **core:** Always set `startedAt` when executions start running ([#11098](https://github.com/n8n-io/n8n/issues/11098)) ([722f4a8](https://github.com/n8n-io/n8n/commit/722f4a8b771058800b992a482ad5f644b650960d)) +* **core:** Fix AI nodes not working with new partial execution flow ([#11055](https://github.com/n8n-io/n8n/issues/11055)) ([0eee5df](https://github.com/n8n-io/n8n/commit/0eee5dfd597817819dbe0463a63f671fde53432f)) +* **core:** Print errors that happen before the execution starts on the worker instead of just on the main instance ([#11099](https://github.com/n8n-io/n8n/issues/11099)) ([1d14557](https://github.com/n8n-io/n8n/commit/1d145574611661ecd9ab1a39d815c0ea915b9a1c)) +* **core:** Separate error handlers for main and worker ([#11091](https://github.com/n8n-io/n8n/issues/11091)) ([bb59cc7](https://github.com/n8n-io/n8n/commit/bb59cc71acc9e494e54abc8402d58db39e5a664e)) +* **editor:** Shorten overflowing Node Label in InputLabels on hover and focus ([#11110](https://github.com/n8n-io/n8n/issues/11110)) ([87a0b68](https://github.com/n8n-io/n8n/commit/87a0b68f9009c1c776d937c6ca62096e88c95ed6)) +* **editor:** Add safety to prevent undefined errors ([#11104](https://github.com/n8n-io/n8n/issues/11104)) ([565b117](https://github.com/n8n-io/n8n/commit/565b117a52f8eac9202a1a62c43daf78b293dcf8)) +* **editor:** Fix design system form element sizing ([#11040](https://github.com/n8n-io/n8n/issues/11040)) ([67c3453](https://github.com/n8n-io/n8n/commit/67c3453885bc619fedc8338a6dd0d8d66dead931)) +* **editor:** Fix getInitials when Intl.Segmenter is not supported ([#11103](https://github.com/n8n-io/n8n/issues/11103)) ([7e8955b](https://github.com/n8n-io/n8n/commit/7e8955b322b1d2c84c0f479a5977484d8d5e3135)) +* **editor:** Fix schema view in AI tools ([#11089](https://github.com/n8n-io/n8n/issues/11089)) ([09cfdbd](https://github.com/n8n-io/n8n/commit/09cfdbd1817eba46c935308880fe9f95ded252b0)) +* **editor:** Respect tag querystring filter when listing workflows ([#11029](https://github.com/n8n-io/n8n/issues/11029)) ([59c5ff6](https://github.com/n8n-io/n8n/commit/59c5ff61354302562ba5a2340c66811afdd1523b)) +* **editor:** Show previous nodes autocomplete in AI tool nodes ([#11111](https://github.com/n8n-io/n8n/issues/11111)) ([8566b3a](https://github.com/n8n-io/n8n/commit/8566b3a99939f45ac263830eee30d0d4ade9305c)) +* **editor:** Update Usage page for Community+ edition ([#11074](https://github.com/n8n-io/n8n/issues/11074)) ([3974981](https://github.com/n8n-io/n8n/commit/3974981ea5c67f6f2bbb90a96b405d9d0cfa21af)) +* Fix transaction handling for 'revert' command ([#11145](https://github.com/n8n-io/n8n/issues/11145)) ([a782336](https://github.com/n8n-io/n8n/commit/a7823367f13c3dba0c339eaafaad0199bd524b13)) +* Forbid access to files outside source control work directory ([#11152](https://github.com/n8n-io/n8n/issues/11152)) ([606eedb](https://github.com/n8n-io/n8n/commit/606eedbf1b302e153bd13b7cef80847711e3a9ee)) +* **Gitlab Node:** Author name and email not being set ([#11077](https://github.com/n8n-io/n8n/issues/11077)) ([fce1233](https://github.com/n8n-io/n8n/commit/fce1233b58624d502c9c68f4b32a4bb7d76f1814)) +* Incorrect error message on calling wrong webhook method ([#11093](https://github.com/n8n-io/n8n/issues/11093)) ([d974b01](https://github.com/n8n-io/n8n/commit/d974b015d030c608158ff0c3fa3b7f4cbb8eadd3)) +* **n8n Form Trigger Node:** When clicking on a multiple choice label, the wrong one is selected ([#11059](https://github.com/n8n-io/n8n/issues/11059)) ([948edd1](https://github.com/n8n-io/n8n/commit/948edd1a047cf3dbddb3b0e9ec5de4bac3e97b9f)) +* **NASA Node:** Astronomy-Picture-Of-The-Day fails when it's YouTube video ([#11046](https://github.com/n8n-io/n8n/issues/11046)) ([c70969d](https://github.com/n8n-io/n8n/commit/c70969da2bcabeb33394073a69ccef208311461b)) +* **Postgres PGVector Store Node:** Fix filtering in retriever mode ([#11075](https://github.com/n8n-io/n8n/issues/11075)) ([dbd2ae1](https://github.com/n8n-io/n8n/commit/dbd2ae199506a24c2df4c983111a56f2adf63eee)) +* Show result of waiting execution on canvas after execution complete ([#10815](https://github.com/n8n-io/n8n/issues/10815)) ([90b4bfc](https://github.com/n8n-io/n8n/commit/90b4bfc472ef132d2280b175ae7410dfb8e549b2)) +* **Slack Node:** User id not sent correctly to API when updating user profile ([#11153](https://github.com/n8n-io/n8n/issues/11153)) ([ed9e61c](https://github.com/n8n-io/n8n/commit/ed9e61c46055d8e636a70c9c175d7d4ba596dd48)) + + +### Features + +* **core:** Introduce scoped logging ([#11127](https://github.com/n8n-io/n8n/issues/11127)) ([c68782c](https://github.com/n8n-io/n8n/commit/c68782c633b7ef6253ea705c5a222d4536491fd5)) +* **editor:** Add navigation dropdown component ([#11047](https://github.com/n8n-io/n8n/issues/11047)) ([e081fd1](https://github.com/n8n-io/n8n/commit/e081fd1f0b5a0700017a8dc92f013f0abdbad319)) +* **editor:** Add route for create / edit / share credentials ([#11134](https://github.com/n8n-io/n8n/issues/11134)) ([5697de4](https://github.com/n8n-io/n8n/commit/5697de4429c5d94f25ce1bd14c84fb4266ea47a7)) +* **editor:** Community+ enrollment ([#10776](https://github.com/n8n-io/n8n/issues/10776)) ([92cf860](https://github.com/n8n-io/n8n/commit/92cf860f9f2994442facfddc758bc60f5cbec520)) +* Human in the loop ([#10675](https://github.com/n8n-io/n8n/issues/10675)) ([41228b4](https://github.com/n8n-io/n8n/commit/41228b472de11affc8cd0821284427c2c9e8b421)) +* **OpenAI Node:** Allow to specify thread ID for Assistant -> Message operation ([#11080](https://github.com/n8n-io/n8n/issues/11080)) ([6a2f9e7](https://github.com/n8n-io/n8n/commit/6a2f9e72959fb0e89006b69c31fbcee1ead1cde9)) +* Opt in to additional features on community for existing users ([#11166](https://github.com/n8n-io/n8n/issues/11166)) ([c2adfc8](https://github.com/n8n-io/n8n/commit/c2adfc85451c5103eaad068f882066fd36c4aebe)) + + +### Performance Improvements + +* **core:** Optimize worker healthchecks ([#11092](https://github.com/n8n-io/n8n/issues/11092)) ([19fb728](https://github.com/n8n-io/n8n/commit/19fb728da0839c57603e55da4e407715e6c5b081)) + + + ## [1.62.1](https://github.com/n8n-io/n8n/compare/n8n@1.61.0...n8n@1.62.1) (2024-10-02) diff --git a/cypress/composables/ndv.ts b/cypress/composables/ndv.ts index c3fab73f8c..5b3690e6a6 100644 --- a/cypress/composables/ndv.ts +++ b/cypress/composables/ndv.ts @@ -59,7 +59,7 @@ export function setCredentialByName(name: string) { export function clickCreateNewCredential() { openCredentialSelect(); - getCreateNewCredentialOption().click(); + getCreateNewCredentialOption().click({ force: true }); } export function clickGetBackToCanvas() { diff --git a/cypress/e2e/13-pinning.cy.ts b/cypress/e2e/13-pinning.cy.ts index 4558c44bca..4f48fa4529 100644 --- a/cypress/e2e/13-pinning.cy.ts +++ b/cypress/e2e/13-pinning.cy.ts @@ -1,3 +1,6 @@ +import { nanoid } from 'nanoid'; + +import { simpleWebhookCall, waitForWebhook } from './16-webhook-node.cy'; import { HTTP_REQUEST_NODE_NAME, MANUAL_TRIGGER_NODE_NAME, @@ -7,6 +10,7 @@ import { } from '../constants'; import { WorkflowPage, NDV } from '../pages'; import { errorToast } from '../pages/notifications'; +import { getVisiblePopper } from '../utils'; const workflowPage = new WorkflowPage(); const ndv = new NDV(); @@ -212,6 +216,42 @@ describe('Data pinning', () => { }, ); }); + + it('should show pinned data tooltip', () => { + const { callEndpoint } = simpleWebhookCall({ + method: 'GET', + webhookPath: nanoid(), + executeNow: false, + }); + + ndv.actions.close(); + workflowPage.actions.executeWorkflow(); + cy.wait(waitForWebhook); + + // hide other visible popper on workflow execute button + workflowPage.getters.canvasNodes().eq(0).click(); + + callEndpoint((response) => { + expect(response.status).to.eq(200); + getVisiblePopper().should('have.length', 1); + getVisiblePopper() + .eq(0) + .should( + 'have.text', + 'You can pin this output instead of waiting for a test event. Open node to do so.', + ); + }); + }); + + it('should not show pinned data tooltip', () => { + cy.createFixtureWorkflow('Pinned_webhook_node.json', 'Test'); + workflowPage.actions.executeWorkflow(); + + // hide other visible popper on workflow execute button + workflowPage.getters.canvasNodes().eq(0).click(); + + getVisiblePopper().should('have.length', 0); + }); }); function setExpressionOnStringValueInSet(expression: string) { diff --git a/cypress/e2e/16-webhook-node.cy.ts b/cypress/e2e/16-webhook-node.cy.ts index 9346004388..3d6c1049a2 100644 --- a/cypress/e2e/16-webhook-node.cy.ts +++ b/cypress/e2e/16-webhook-node.cy.ts @@ -9,7 +9,7 @@ const workflowPage = new WorkflowPage(); const ndv = new NDV(); const credentialsModal = new CredentialsModal(); -const waitForWebhook = 500; +export const waitForWebhook = 500; interface SimpleWebhookCallOptions { method: string; @@ -21,7 +21,7 @@ interface SimpleWebhookCallOptions { authentication?: string; } -const simpleWebhookCall = (options: SimpleWebhookCallOptions) => { +export const simpleWebhookCall = (options: SimpleWebhookCallOptions) => { const { authentication, method, @@ -65,15 +65,23 @@ const simpleWebhookCall = (options: SimpleWebhookCallOptions) => { getVisibleSelect().find('.option-headline').contains(responseData).click(); } + const callEndpoint = (cb: (response: Cypress.Response) => void) => { + cy.request(method, `${BACKEND_BASE_URL}/webhook-test/${webhookPath}`).then(cb); + }; + if (executeNow) { ndv.actions.execute(); cy.wait(waitForWebhook); - cy.request(method, `${BACKEND_BASE_URL}/webhook-test/${webhookPath}`).then((response) => { + callEndpoint((response) => { expect(response.status).to.eq(200); ndv.getters.outputPanel().contains('headers'); }); } + + return { + callEndpoint, + }; }; describe('Webhook Trigger node', () => { diff --git a/cypress/e2e/45-ai-assistant.cy.ts b/cypress/e2e/45-ai-assistant.cy.ts index 6c69a97708..3b4f61f660 100644 --- a/cypress/e2e/45-ai-assistant.cy.ts +++ b/cypress/e2e/45-ai-assistant.cy.ts @@ -78,11 +78,11 @@ describe('AI Assistant::enabled', () => { }); it('should start chat session from node error view', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Stop and Error'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -96,11 +96,11 @@ describe('AI Assistant::enabled', () => { }); it('should render chat input correctly', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Stop and Error'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -129,11 +129,11 @@ describe('AI Assistant::enabled', () => { }); it('should render and handle quick replies', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/quick_reply_message_response.json', + fixture: 'aiAssistant/responses/quick_reply_message_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Stop and Error'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -145,43 +145,12 @@ describe('AI Assistant::enabled', () => { aiAssistant.getters.chatMessagesUser().eq(0).should('contain.text', "Sure, let's do it"); }); - it('should show quick replies when node is executed after new suggestion', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', (req) => { - req.reply((res) => { - if (['init-error-helper', 'message'].includes(req.body.payload.type)) { - res.send({ statusCode: 200, fixture: 'aiAssistant/simple_message_response.json' }); - } else if (req.body.payload.type === 'event') { - res.send({ statusCode: 200, fixture: 'aiAssistant/node_execution_error_response.json' }); - } else { - res.send({ statusCode: 500 }); - } - }); - }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); - wf.actions.openNode('Edit Fields'); - ndv.getters.nodeExecuteButton().click(); - aiAssistant.getters.nodeErrorViewAssistantButton().click(); - cy.wait('@chatRequest'); - aiAssistant.getters.chatMessagesAssistant().should('have.length', 1); - ndv.getters.nodeExecuteButton().click(); - cy.wait('@chatRequest'); - // Respond 'Yes' to the quick reply (request new suggestion) - aiAssistant.getters.quickReplies().contains('Yes').click(); - cy.wait('@chatRequest'); - // No quick replies at this point - aiAssistant.getters.quickReplies().should('not.exist'); - ndv.getters.nodeExecuteButton().click(); - // But after executing the node again, quick replies should be shown - aiAssistant.getters.chatMessagesAssistant().should('have.length', 4); - aiAssistant.getters.quickReplies().should('have.length', 2); - }); - it('should warn before starting a new session', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Edit Fields'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click({ force: true }); @@ -204,15 +173,15 @@ describe('AI Assistant::enabled', () => { }); it('should apply code diff to code node', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/code_diff_suggestion_response.json', + fixture: 'aiAssistant/responses/code_diff_suggestion_response.json', }).as('chatRequest'); - cy.intercept('POST', '/rest/ai-assistant/chat/apply-suggestion', { + cy.intercept('POST', '/rest/ai/chat/apply-suggestion', { statusCode: 200, - fixture: 'aiAssistant/apply_code_diff_response.json', + fixture: 'aiAssistant/responses/apply_code_diff_response.json', }).as('applySuggestion'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Code'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click({ force: true }); @@ -254,11 +223,11 @@ describe('AI Assistant::enabled', () => { }); it('should end chat session when `end_session` event is received', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/end_session_response.json', + fixture: 'aiAssistant/responses/end_session_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Stop and Error'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -268,12 +237,15 @@ describe('AI Assistant::enabled', () => { }); it('should reset session after it ended and sidebar is closed', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', (req) => { + cy.intercept('POST', '/rest/ai/chat', (req) => { req.reply((res) => { if (['init-support-chat'].includes(req.body.payload.type)) { - res.send({ statusCode: 200, fixture: 'aiAssistant/simple_message_response.json' }); + res.send({ + statusCode: 200, + fixture: 'aiAssistant/responses/simple_message_response.json', + }); } else { - res.send({ statusCode: 200, fixture: 'aiAssistant/end_session_response.json' }); + res.send({ statusCode: 200, fixture: 'aiAssistant/responses/end_session_response.json' }); } }); }).as('chatRequest'); @@ -296,9 +268,9 @@ describe('AI Assistant::enabled', () => { }); it('Should not reset assistant session when workflow is saved', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); wf.actions.addInitialNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); aiAssistant.actions.openChat(); @@ -321,9 +293,9 @@ describe('AI Assistant Credential Help', () => { }); it('should start credential help from node credential', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); wf.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); wf.actions.addNodeToCanvas(GMAIL_NODE_NAME); @@ -347,9 +319,9 @@ describe('AI Assistant Credential Help', () => { }); it('should start credential help from credential list', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); cy.visit(credentialsPage.url); @@ -446,9 +418,9 @@ describe('General help', () => { }); it('assistant returns code snippet', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/code_snippet_response.json', + fixture: 'aiAssistant/responses/code_snippet_response.json', }).as('chatRequest'); aiAssistant.getters.askAssistantFloatingButton().should('be.visible'); @@ -492,4 +464,65 @@ describe('General help', () => { ); aiAssistant.getters.codeSnippet().should('have.text', '{{$json.body.city}}'); }); + + it('should send current context to support chat', () => { + cy.createFixtureWorkflow('aiAssistant/workflows/simple_http_request_workflow.json'); + cy.intercept('POST', '/rest/ai/chat', { + statusCode: 200, + fixture: 'aiAssistant/responses/simple_message_response.json', + }).as('chatRequest'); + + aiAssistant.getters.askAssistantFloatingButton().click(); + aiAssistant.actions.sendMessage('What is wrong with this workflow?'); + + cy.wait('@chatRequest').then((interception) => { + const { body } = interception.request; + // Body should contain the current workflow context + expect(body.payload).to.have.property('context'); + expect(body.payload.context).to.have.property('currentView'); + expect(body.payload.context.currentView.name).to.equal('NodeViewExisting'); + expect(body.payload.context).to.have.property('currentWorkflow'); + }); + }); + + it('should not send workflow context if nothing changed', () => { + cy.createFixtureWorkflow('aiAssistant/workflows/simple_http_request_workflow.json'); + cy.intercept('POST', '/rest/ai/chat', { + statusCode: 200, + fixture: 'aiAssistant/responses/simple_message_response.json', + }).as('chatRequest'); + + aiAssistant.getters.askAssistantFloatingButton().click(); + aiAssistant.actions.sendMessage('What is wrong with this workflow?'); + cy.wait('@chatRequest'); + + // Send another message without changing workflow or executing any node + aiAssistant.actions.sendMessage('And now?'); + + cy.wait('@chatRequest').then((interception) => { + const { body } = interception.request; + // Workflow context should be empty + expect(body.payload).to.have.property('context'); + expect(body.payload.context).not.to.have.property('currentWorkflow'); + }); + + // Update http request node url + wf.actions.openNode('HTTP Request'); + ndv.actions.typeIntoParameterInput('url', 'https://example.com'); + ndv.actions.close(); + // Also execute the workflow + wf.actions.executeWorkflow(); + + // Send another message + aiAssistant.actions.sendMessage('What about now?'); + cy.wait('@chatRequest').then((interception) => { + const { body } = interception.request; + // Both workflow and execution context should be sent + expect(body.payload).to.have.property('context'); + expect(body.payload.context).to.have.property('currentWorkflow'); + expect(body.payload.context.currentWorkflow).not.to.be.empty; + expect(body.payload.context).to.have.property('executionData'); + expect(body.payload.context.executionData).not.to.be.empty; + }); + }); }); diff --git a/cypress/e2e/6-code-node.cy.ts b/cypress/e2e/6-code-node.cy.ts index 5b422b4589..5a6182c25a 100644 --- a/cypress/e2e/6-code-node.cy.ts +++ b/cypress/e2e/6-code-node.cy.ts @@ -91,28 +91,12 @@ return [] }); describe('Ask AI', () => { - it('tab should display based on experiment', () => { - WorkflowPage.actions.visit(); - cy.window().then((win) => { - win.featureFlags.override('011_ask_AI', 'control'); - WorkflowPage.actions.addInitialNodeToCanvas('Manual'); - WorkflowPage.actions.addNodeToCanvas('Code'); - WorkflowPage.actions.openNode('Code'); - - cy.getByTestId('code-node-tab-ai').should('not.exist'); - - ndv.actions.close(); - win.featureFlags.override('011_ask_AI', undefined); - WorkflowPage.actions.openNode('Code'); - cy.getByTestId('code-node-tab-ai').should('not.exist'); - }); - }); - describe('Enabled', () => { beforeEach(() => { + cy.enableFeature('askAi'); WorkflowPage.actions.visit(); - cy.window().then((win) => { - win.featureFlags.override('011_ask_AI', 'gpt3'); + + cy.window().then(() => { WorkflowPage.actions.addInitialNodeToCanvas('Manual'); WorkflowPage.actions.addNodeToCanvas('Code', true, true); }); @@ -157,7 +141,7 @@ return [] cy.getByTestId('ask-ai-prompt-input').type(prompt); - cy.intercept('POST', '/rest/ask-ai', { + cy.intercept('POST', '/rest/ai/ask-ai', { statusCode: 200, body: { data: { @@ -169,9 +153,7 @@ return [] cy.getByTestId('ask-ai-cta').click(); const askAiReq = cy.wait('@ask-ai'); - askAiReq - .its('request.body') - .should('have.keys', ['question', 'model', 'context', 'n8nVersion']); + askAiReq.its('request.body').should('have.keys', ['question', 'context', 'forNode']); askAiReq.its('context').should('have.keys', ['schema', 'ndvPushRef', 'pushRef']); @@ -195,7 +177,7 @@ return [] ]; handledCodes.forEach(({ code, message }) => { - cy.intercept('POST', '/rest/ask-ai', { + cy.intercept('POST', '/rest/ai/ask-ai', { statusCode: code, status: code, }).as('ask-ai'); diff --git a/cypress/fixtures/Pinned_webhook_node.json b/cypress/fixtures/Pinned_webhook_node.json new file mode 100644 index 0000000000..eb98b17351 --- /dev/null +++ b/cypress/fixtures/Pinned_webhook_node.json @@ -0,0 +1,39 @@ +{ + "nodes": [ + { + "parameters": { + "path": "FwrbSiaua2Xmvn6-Z-7CQ", + "options": {} + }, + "id": "8fcc7e5f-2cef-4938-9564-eea504c20aa0", + "name": "Webhook", + "type": "n8n-nodes-base.webhook", + "typeVersion": 2, + "position": [ + 360, + 220 + ], + "webhookId": "9c778f2a-e882-46ed-a0e4-c8e2f76ccd65" + } + ], + "connections": {}, + "pinData": { + "Webhook": [ + { + "headers": { + "connection": "keep-alive", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36", + "accept": "*/*", + "cookie": "n8n-auth=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjNiM2FhOTE5LWRhZDgtNDE5MS1hZWZiLTlhZDIwZTZkMjJjNiIsImhhc2giOiJ1ZVAxR1F3U2paIiwiaWF0IjoxNzI4OTE1NTQyLCJleHAiOjE3Mjk1MjAzNDJ9.fV02gpUnSiUoMxHwfB0npBjcjct7Mv9vGfj-jRTT3-I", + "host": "localhost:5678", + "accept-encoding": "gzip, deflate" + }, + "params": {}, + "query": {}, + "body": {}, + "webhookUrl": "http://localhost:5678/webhook-test/FwrbSiaua2Xmvn6-Z-7CQ", + "executionMode": "test" + } + ] + } +} diff --git a/cypress/fixtures/aiAssistant/apply_code_diff_response.json b/cypress/fixtures/aiAssistant/responses/apply_code_diff_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/apply_code_diff_response.json rename to cypress/fixtures/aiAssistant/responses/apply_code_diff_response.json diff --git a/cypress/fixtures/aiAssistant/code_diff_suggestion_response.json b/cypress/fixtures/aiAssistant/responses/code_diff_suggestion_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/code_diff_suggestion_response.json rename to cypress/fixtures/aiAssistant/responses/code_diff_suggestion_response.json diff --git a/cypress/fixtures/aiAssistant/code_snippet_response.json b/cypress/fixtures/aiAssistant/responses/code_snippet_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/code_snippet_response.json rename to cypress/fixtures/aiAssistant/responses/code_snippet_response.json diff --git a/cypress/fixtures/aiAssistant/end_session_response.json b/cypress/fixtures/aiAssistant/responses/end_session_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/end_session_response.json rename to cypress/fixtures/aiAssistant/responses/end_session_response.json diff --git a/cypress/fixtures/aiAssistant/node_execution_error_response.json b/cypress/fixtures/aiAssistant/responses/node_execution_error_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/node_execution_error_response.json rename to cypress/fixtures/aiAssistant/responses/node_execution_error_response.json diff --git a/cypress/fixtures/aiAssistant/quick_reply_message_response.json b/cypress/fixtures/aiAssistant/responses/quick_reply_message_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/quick_reply_message_response.json rename to cypress/fixtures/aiAssistant/responses/quick_reply_message_response.json diff --git a/cypress/fixtures/aiAssistant/simple_message_response.json b/cypress/fixtures/aiAssistant/responses/simple_message_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/simple_message_response.json rename to cypress/fixtures/aiAssistant/responses/simple_message_response.json diff --git a/cypress/fixtures/aiAssistant/workflows/simple_http_request_workflow.json b/cypress/fixtures/aiAssistant/workflows/simple_http_request_workflow.json new file mode 100644 index 0000000000..28a0ee5359 --- /dev/null +++ b/cypress/fixtures/aiAssistant/workflows/simple_http_request_workflow.json @@ -0,0 +1,35 @@ +{ + "nodes": [ + { + "parameters": {}, + "id": "298d3dc9-5e99-4b3f-919e-05fdcdfbe2d0", + "name": "When clicking ‘Test workflow’", + "type": "n8n-nodes-base.manualTrigger", + "typeVersion": 1, + "position": [360, 220] + }, + { + "parameters": { + "options": {} + }, + "id": "65c32346-e939-4ec7-88a9-1f9184e2258d", + "name": "HTTP Request", + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [580, 220] + } + ], + "connections": { + "When clicking ‘Test workflow’": { + "main": [ + [ + { + "node": "HTTP Request", + "type": "main", + "index": 0 + } + ] + ] + } + } +} diff --git a/cypress/fixtures/aiAssistant/test_workflow.json b/cypress/fixtures/aiAssistant/workflows/test_workflow.json similarity index 100% rename from cypress/fixtures/aiAssistant/test_workflow.json rename to cypress/fixtures/aiAssistant/workflows/test_workflow.json diff --git a/cypress/pages/ndv.ts b/cypress/pages/ndv.ts index b775deec6d..cae1fb47b0 100644 --- a/cypress/pages/ndv.ts +++ b/cypress/pages/ndv.ts @@ -156,7 +156,7 @@ export class NDV extends BasePage { this.getters.nodeExecuteButton().first().click(); }, close: () => { - this.getters.backToCanvas().click(); + this.getters.backToCanvas().click({ force: true }); }, openInlineExpressionEditor: () => { cy.contains('Expression').invoke('show').click(); diff --git a/docker/images/n8n-custom/Dockerfile b/docker/images/n8n-custom/Dockerfile index ba271017d1..a533cdbdab 100644 --- a/docker/images/n8n-custom/Dockerfile +++ b/docker/images/n8n-custom/Dockerfile @@ -31,6 +31,30 @@ WORKDIR /home/node COPY --from=builder /compiled /usr/local/lib/node_modules/n8n COPY docker/images/n8n/docker-entrypoint.sh / +# Setup the Task Runner Launcher +ARG TARGETPLATFORM +ARG LAUNCHER_VERSION=0.1.1 +ENV N8N_RUNNERS_USE_LAUNCHER=true \ + N8N_RUNNERS_LAUNCHER_PATH=/usr/local/bin/task-runner-launcher +COPY docker/images/n8n/n8n-task-runners.json /etc/n8n-task-runners.json +# First, download, verify, then extract the launcher binary +# Second, chmod with 4555 to allow the use of setuid +# Third, create a new user and group to execute the Task Runners under +RUN \ + if [[ "$TARGETPLATFORM" = "linux/amd64" ]]; then export ARCH_NAME="x86_64"; \ + elif [[ "$TARGETPLATFORM" = "linux/arm64" ]]; then export ARCH_NAME="aarch64"; fi; \ + mkdir /launcher-temp && \ + cd /launcher-temp && \ + wget https://github.com/n8n-io/task-runner-launcher/releases/download/${LAUNCHER_VERSION}/task-runner-launcher-$ARCH_NAME-unknown-linux-musl.zip && \ + wget https://github.com/n8n-io/task-runner-launcher/releases/download/${LAUNCHER_VERSION}/task-runner-launcher-$ARCH_NAME-unknown-linux-musl.sha256 && \ + sha256sum -c task-runner-launcher-$ARCH_NAME-unknown-linux-musl.sha256 && \ + unzip -d $(dirname ${N8N_RUNNERS_LAUNCHER_PATH}) task-runner-launcher-$ARCH_NAME-unknown-linux-musl.zip task-runner-launcher && \ + cd - && \ + rm -r /launcher-temp && \ + chmod 4555 ${N8N_RUNNERS_LAUNCHER_PATH} && \ + addgroup -g 2000 task-runner && \ + adduser -D -u 2000 -g "Task Runner User" -G task-runner task-runner + RUN \ cd /usr/local/lib/node_modules/n8n && \ npm rebuild sqlite3 && \ diff --git a/docker/images/n8n/Dockerfile b/docker/images/n8n/Dockerfile index 2da1bc1f47..08e031cf5f 100644 --- a/docker/images/n8n/Dockerfile +++ b/docker/images/n8n/Dockerfile @@ -22,6 +22,30 @@ RUN set -eux; \ find /usr/local/lib/node_modules/n8n -type f -name "*.ts" -o -name "*.js.map" -o -name "*.vue" | xargs rm -f && \ rm -rf /root/.npm +# Setup the Task Runner Launcher +ARG TARGETPLATFORM +ARG LAUNCHER_VERSION=0.1.1 +ENV N8N_RUNNERS_USE_LAUNCHER=true \ + N8N_RUNNERS_LAUNCHER_PATH=/usr/local/bin/task-runner-launcher +COPY n8n-task-runners.json /etc/n8n-task-runners.json +# First, download, verify, then extract the launcher binary +# Second, chmod with 4555 to allow the use of setuid +# Third, create a new user and group to execute the Task Runners under +RUN \ + if [[ "$TARGETPLATFORM" = "linux/amd64" ]]; then export ARCH_NAME="x86_64"; \ + elif [[ "$TARGETPLATFORM" = "linux/arm64" ]]; then export ARCH_NAME="aarch64"; fi; \ + mkdir /launcher-temp && \ + cd /launcher-temp && \ + wget https://github.com/n8n-io/task-runner-launcher/releases/download/${LAUNCHER_VERSION}/task-runner-launcher-$ARCH_NAME-unknown-linux-musl.zip && \ + wget https://github.com/n8n-io/task-runner-launcher/releases/download/${LAUNCHER_VERSION}/task-runner-launcher-$ARCH_NAME-unknown-linux-musl.sha256 && \ + sha256sum -c task-runner-launcher-$ARCH_NAME-unknown-linux-musl.sha256 && \ + unzip -d $(dirname ${N8N_RUNNERS_LAUNCHER_PATH}) task-runner-launcher-$ARCH_NAME-unknown-linux-musl.zip task-runner-launcher && \ + cd - && \ + rm -r /launcher-temp && \ + chmod 4555 ${N8N_RUNNERS_LAUNCHER_PATH} && \ + addgroup -g 2000 task-runner && \ + adduser -D -u 2000 -g "Task Runner User" -G task-runner task-runner + COPY docker-entrypoint.sh / RUN \ diff --git a/docker/images/n8n/n8n-task-runners.json b/docker/images/n8n/n8n-task-runners.json new file mode 100644 index 0000000000..2dd65b67c8 --- /dev/null +++ b/docker/images/n8n/n8n-task-runners.json @@ -0,0 +1,19 @@ +{ + "task-runners": [ + { + "runner-type": "javascript", + "workdir": "/home/task-runner", + "command": "/usr/local/bin/node", + "args": ["/usr/local/lib/node_modules/n8n/node_modules/@n8n/task-runner/dist/start.js"], + "allowed-env": [ + "PATH", + "N8N_RUNNERS_GRANT_TOKEN", + "N8N_RUNNERS_N8N_URI", + "NODE_FUNCTION_ALLOW_BUILTIN", + "NODE_FUNCTION_ALLOW_EXTERNAL" + ], + "uid": 2000, + "gid": 2000 + } + ] +} diff --git a/n8n.code-workspace b/n8n.code-workspace deleted file mode 100644 index 8f4183e8f0..0000000000 --- a/n8n.code-workspace +++ /dev/null @@ -1,7 +0,0 @@ -{ - "folders": [ - { - "path": "." - } - ] -} diff --git a/package.json b/package.json index feda1c4701..ee888f53dd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "n8n-monorepo", - "version": "1.62.1", + "version": "1.63.0", "private": true, "engines": { "node": ">=20.15", @@ -69,14 +69,15 @@ ], "overrides": { "@types/node": "^18.16.16", - "chokidar": "3.5.2", - "esbuild": "^0.20.2", + "chokidar": "^4.0.1", + "esbuild": "^0.24.0", "formidable": "3.5.1", "pug": "^3.0.3", "semver": "^7.5.4", "tslib": "^2.6.2", "tsconfig-paths": "^4.2.0", "typescript": "^5.6.2", + "vue-tsc": "^2.1.6", "ws": ">=8.17.1" }, "patchedDependencies": { diff --git a/packages/@n8n/api-types/package.json b/packages/@n8n/api-types/package.json index ec2bf1bd32..e2614bcf68 100644 --- a/packages/@n8n/api-types/package.json +++ b/packages/@n8n/api-types/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/api-types", - "version": "0.3.0", + "version": "0.4.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/api-types/src/frontend-settings.ts b/packages/@n8n/api-types/src/frontend-settings.ts index c70826d4d1..5084344aeb 100644 --- a/packages/@n8n/api-types/src/frontend-settings.ts +++ b/packages/@n8n/api-types/src/frontend-settings.ts @@ -107,6 +107,9 @@ export interface FrontendSettings { aiAssistant: { enabled: boolean; }; + askAi: { + enabled: boolean; + }; deployment: { type: string; }; @@ -154,9 +157,6 @@ export interface FrontendSettings { banners: { dismissed: string[]; }; - ai: { - enabled: boolean; - }; workflowHistory: { pruneTime: number; licensePruneTime: number; diff --git a/packages/@n8n/api-types/src/scaling.ts b/packages/@n8n/api-types/src/scaling.ts index 8e15f06804..f0c3627e84 100644 --- a/packages/@n8n/api-types/src/scaling.ts +++ b/packages/@n8n/api-types/src/scaling.ts @@ -11,7 +11,7 @@ export type RunningJobSummary = { }; export type WorkerStatus = { - workerId: string; + senderId: string; runningJobsSummary: RunningJobSummary[]; freeMem: number; totalMem: number; diff --git a/packages/@n8n/benchmark/package.json b/packages/@n8n/benchmark/package.json index 2b6979fa45..98edd1dabd 100644 --- a/packages/@n8n/benchmark/package.json +++ b/packages/@n8n/benchmark/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/n8n-benchmark", - "version": "1.6.1", + "version": "1.7.0", "description": "Cli for running benchmark tests for n8n", "main": "dist/index", "scripts": { diff --git a/packages/@n8n/chat/package.json b/packages/@n8n/chat/package.json index 6f3e74fc71..24d6cf6f1c 100644 --- a/packages/@n8n/chat/package.json +++ b/packages/@n8n/chat/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/chat", - "version": "0.27.1", + "version": "0.28.0", "scripts": { "dev": "pnpm run storybook", "build": "pnpm build:vite && pnpm build:bundle", @@ -50,7 +50,7 @@ "unplugin-icons": "^0.19.0", "vite": "catalog:frontend", "vitest": "catalog:frontend", - "vite-plugin-dts": "^3.9.1", + "vite-plugin-dts": "^4.2.3", "vue-tsc": "catalog:frontend" }, "files": [ diff --git a/packages/@n8n/config/package.json b/packages/@n8n/config/package.json index 6d989f8208..10c8cbcf5b 100644 --- a/packages/@n8n/config/package.json +++ b/packages/@n8n/config/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/config", - "version": "1.12.1", + "version": "1.13.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/config/src/configs/logging.config.ts b/packages/@n8n/config/src/configs/logging.config.ts index 2f68416df4..c29bb6d5a8 100644 --- a/packages/@n8n/config/src/configs/logging.config.ts +++ b/packages/@n8n/config/src/configs/logging.config.ts @@ -1,6 +1,17 @@ import { Config, Env, Nested } from '../decorators'; import { StringArray } from '../utils'; +/** + * Scopes (areas of functionality) to filter logs by. + * + * `executions` -> execution lifecycle + * `license` -> license SDK + * `scaling` -> scaling mode + */ +export const LOG_SCOPES = ['executions', 'license', 'scaling'] as const; + +export type LogScope = (typeof LOG_SCOPES)[number]; + @Config class FileLoggingConfig { /** @@ -44,4 +55,19 @@ export class LoggingConfig { @Nested file: FileLoggingConfig; + + /** + * Scopes to filter logs by. Nothing is filtered by default. + * + * Currently supported log scopes: + * - `executions` + * - `license` + * - `scaling` + * + * @example + * `N8N_LOG_SCOPES=license` + * `N8N_LOG_SCOPES=license,executions` + */ + @Env('N8N_LOG_SCOPES') + scopes: StringArray = []; } diff --git a/packages/@n8n/config/src/configs/runners.config.ts b/packages/@n8n/config/src/configs/runners.config.ts index e7335e8827..14d1b01d1a 100644 --- a/packages/@n8n/config/src/configs/runners.config.ts +++ b/packages/@n8n/config/src/configs/runners.config.ts @@ -19,4 +19,14 @@ export class TaskRunnersConfig { /** IP address task runners server should listen on */ @Env('N8N_RUNNERS_SERVER_LISTEN_ADDRESS') listen_address: string = '127.0.0.1'; + + @Env('N8N_RUNNERS_USE_LAUNCHER') + useLauncher: boolean = false; + + @Env('N8N_RUNNERS_LAUNCHER_PATH') + launcherPath: string = ''; + + /** Which task runner to launch from the config */ + @Env('N8N_RUNNERS_LAUNCHER_RUNNER') + launcherRunner: string = 'javascript'; } diff --git a/packages/@n8n/config/src/index.ts b/packages/@n8n/config/src/index.ts index 3290cac5bb..9044ffa0fa 100644 --- a/packages/@n8n/config/src/index.ts +++ b/packages/@n8n/config/src/index.ts @@ -18,6 +18,9 @@ import { VersionNotificationsConfig } from './configs/version-notifications.conf import { WorkflowsConfig } from './configs/workflows.config'; import { Config, Env, Nested } from './decorators'; +export { LOG_SCOPES } from './configs/logging.config'; +export type { LogScope } from './configs/logging.config'; + @Config export class GlobalConfig { @Nested diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index a0952d0dd0..56f3bc6de7 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -228,6 +228,9 @@ describe('GlobalConfig', () => { authToken: '', listen_address: '127.0.0.1', port: 5679, + useLauncher: false, + launcherPath: '', + launcherRunner: 'javascript', }, sentry: { backendDsn: '', @@ -241,13 +244,13 @@ describe('GlobalConfig', () => { fileSizeMax: 16, location: 'logs/n8n.log', }, + scopes: [], }, }; it('should use all default values when no env variables are defined', () => { process.env = {}; const config = Container.get(GlobalConfig); - expect(deepCopy(config)).toEqual(defaultConfig); expect(mockFs.readFileSync).not.toHaveBeenCalled(); }); diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts index 32f6be42e7..aa294edadd 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts @@ -281,6 +281,7 @@ export class ToolHttpRequest implements INodeType { 'User-Agent': undefined, }, body: {}, + returnFullResponse: true, }; const authentication = this.getNodeParameter('authentication', itemIndex, 'none') as diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts new file mode 100644 index 0000000000..161aa140f5 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts @@ -0,0 +1,165 @@ +import get from 'lodash/get'; +import type { IDataObject, IExecuteFunctions } from 'n8n-workflow'; +import { jsonParse } from 'n8n-workflow'; + +import type { N8nTool } from '../../../../utils/N8nTool'; +import { ToolHttpRequest } from '../ToolHttpRequest.node'; + +const createExecuteFunctionsMock = (parameters: IDataObject, requestMock: any) => { + const nodeParameters = parameters; + + return { + getNodeParameter(parameter: string) { + return get(nodeParameters, parameter); + }, + getNode() { + return { + name: 'HTTP Request', + }; + }, + getInputData() { + return [{ json: {} }]; + }, + getWorkflow() { + return { + name: 'Test Workflow', + }; + }, + continueOnFail() { + return false; + }, + addInputData() { + return { index: 0 }; + }, + addOutputData() { + return; + }, + helpers: { + httpRequest: requestMock, + }, + } as unknown as IExecuteFunctions; +}; + +describe('ToolHttpRequest', () => { + let httpTool: ToolHttpRequest; + let mockRequest: jest.Mock; + + describe('Binary response', () => { + beforeEach(() => { + httpTool = new ToolHttpRequest(); + mockRequest = jest.fn(); + }); + + it('should return the error when receiving a binary response', async () => { + mockRequest.mockResolvedValue({ + body: Buffer.from(''), + headers: { + 'content-type': 'image/jpeg', + }, + }); + + const { response } = await httpTool.supplyData.call( + createExecuteFunctionsMock( + { + method: 'GET', + url: 'https://httpbin.org/image/jpeg', + options: {}, + placeholderDefinitions: { + values: [], + }, + }, + mockRequest, + ), + 0, + ); + + const res = await (response as N8nTool).invoke(''); + + expect(res).toContain('error'); + expect(res).toContain('Binary data is not supported'); + }); + + it('should return the response text when receiving a text response', async () => { + mockRequest.mockResolvedValue({ + body: 'Hello World', + headers: { + 'content-type': 'text/plain', + }, + }); + + const { response } = await httpTool.supplyData.call( + createExecuteFunctionsMock( + { + method: 'GET', + url: 'https://httpbin.org/text/plain', + options: {}, + placeholderDefinitions: { + values: [], + }, + }, + mockRequest, + ), + 0, + ); + + const res = await (response as N8nTool).invoke(''); + expect(res).toEqual('Hello World'); + }); + + it('should return the response text when receiving a text response with a charset', async () => { + mockRequest.mockResolvedValue({ + body: 'こんにちは世界', + headers: { + 'content-type': 'text/plain; charset=iso-2022-jp', + }, + }); + + const { response } = await httpTool.supplyData.call( + createExecuteFunctionsMock( + { + method: 'GET', + url: 'https://httpbin.org/text/plain', + options: {}, + placeholderDefinitions: { + values: [], + }, + }, + mockRequest, + ), + 0, + ); + + const res = await (response as N8nTool).invoke(''); + expect(res).toEqual('こんにちは世界'); + }); + + it('should return the response object when receiving a JSON response', async () => { + const mockJson = { hello: 'world' }; + + mockRequest.mockResolvedValue({ + body: mockJson, + headers: { + 'content-type': 'application/json', + }, + }); + + const { response } = await httpTool.supplyData.call( + createExecuteFunctionsMock( + { + method: 'GET', + url: 'https://httpbin.org/json', + options: {}, + placeholderDefinitions: { + values: [], + }, + }, + mockRequest, + ), + 0, + ); + + const res = await (response as N8nTool).invoke(''); + expect(jsonParse(res)).toEqual(mockJson); + }); + }); +}); diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts index c06a869a8d..e637251a74 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts @@ -1,3 +1,12 @@ +import { Readability } from '@mozilla/readability'; +import cheerio from 'cheerio'; +import { convert } from 'html-to-text'; +import { JSDOM } from 'jsdom'; +import get from 'lodash/get'; +import set from 'lodash/set'; +import unset from 'lodash/unset'; +import * as mime from 'mime-types'; +import { getOAuth2AdditionalParameters } from 'n8n-nodes-base/dist/nodes/HttpRequest/GenericFunctions'; import type { IExecuteFunctions, IDataObject, @@ -7,20 +16,8 @@ import type { NodeApiError, } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; - -import { getOAuth2AdditionalParameters } from 'n8n-nodes-base/dist/nodes/HttpRequest/GenericFunctions'; - -import set from 'lodash/set'; -import get from 'lodash/get'; -import unset from 'lodash/unset'; - -import cheerio from 'cheerio'; -import { convert } from 'html-to-text'; - -import { Readability } from '@mozilla/readability'; -import { JSDOM } from 'jsdom'; import { z } from 'zod'; -import type { DynamicZodObject } from '../../../types/zod.types'; + import type { ParameterInputType, ParametersValues, @@ -29,6 +26,7 @@ import type { SendIn, ToolParameter, } from './interfaces'; +import type { DynamicZodObject } from '../../../types/zod.types'; const genericCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: number) => { const genericType = ctx.getNodeParameter('genericAuthType', itemIndex) as string; @@ -176,6 +174,7 @@ const htmlOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: num ); } const returnData: string[] = []; + const html = cheerio.load(response); const htmlElements = html(cssSelector); @@ -574,6 +573,7 @@ export const configureToolFunction = ( // Clone options and rawRequestOptions to avoid mutating the original objects const options: IHttpRequestOptions | null = structuredClone(requestOptions); const clonedRawRequestOptions: { [key: string]: string } = structuredClone(rawRequestOptions); + let fullResponse: any; let response: string = ''; let executionError: Error | undefined = undefined; @@ -732,8 +732,6 @@ export const configureToolFunction = ( } } } catch (error) { - console.error(error); - const errorMessage = 'Input provided by model is not valid'; if (error instanceof NodeOperationError) { @@ -749,11 +747,29 @@ export const configureToolFunction = ( if (options) { try { - response = optimizeResponse(await httpRequest(options)); + fullResponse = await httpRequest(options); } catch (error) { const httpCode = (error as NodeApiError).httpCode; response = `${httpCode ? `HTTP ${httpCode} ` : ''}There was an error: "${error.message}"`; } + + if (!response) { + try { + // Check if the response is binary data + if (fullResponse?.headers?.['content-type']) { + const contentType = fullResponse.headers['content-type'] as string; + const mimeType = contentType.split(';')[0].trim(); + + if (mime.charset(mimeType) !== 'UTF-8') { + throw new NodeOperationError(ctx.getNode(), 'Binary data is not supported'); + } + } + + response = optimizeResponse(fullResponse.body); + } catch (error) { + response = `There was an error: "${error.message}"`; + } + } } if (typeof response !== 'string') { diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts index 47be9a9abc..231fdb4b77 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts @@ -278,7 +278,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise { - const jsTaskRunner = new JsTaskRunner('taskType', 'ws://localhost', 'grantToken', 1); + const createRunnerWithOpts = (opts: Partial = {}) => + new JsTaskRunner({ + wsUrl: 'ws://localhost', + grantToken: 'grantToken', + maxConcurrency: 1, + ...opts, + }); + + const defaultTaskRunner = createRunnerWithOpts(); const execTaskWithParams = async ({ task, taskData, + runner = defaultTaskRunner, }: { task: Task; taskData: AllCodeTaskData; + runner?: JsTaskRunner; }) => { - jest.spyOn(jsTaskRunner, 'requestData').mockResolvedValue(taskData); - return await jsTaskRunner.executeTask(task); + jest.spyOn(runner, 'requestData').mockResolvedValue(taskData); + return await runner.executeTask(task); }; afterEach(() => { jest.restoreAllMocks(); }); + const executeForAllItems = async ({ + code, + inputItems, + settings, + runner, + }: { + code: string; + inputItems: IDataObject[]; + settings?: Partial; + runner?: JsTaskRunner; + }) => { + return await execTaskWithParams({ + task: newTaskWithSettings({ + code, + nodeMode: 'runOnceForAllItems', + ...settings, + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + runner, + }); + }; + + const executeForEachItem = async ({ + code, + inputItems, + settings, + runner, + }: { + code: string; + inputItems: IDataObject[]; + settings?: Partial; + + runner?: JsTaskRunner; + }) => { + return await execTaskWithParams({ + task: newTaskWithSettings({ + code, + nodeMode: 'runOnceForEachItem', + ...settings, + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + runner, + }); + }; + describe('console', () => { test.each<[CodeExecutionMode]>([['runOnceForAllItems'], ['runOnceForEachItem']])( 'should make an rpc call for console log in %s mode', async (nodeMode) => { - jest.spyOn(jsTaskRunner, 'makeRpcCall').mockResolvedValue(undefined); + jest.spyOn(defaultTaskRunner, 'makeRpcCall').mockResolvedValue(undefined); const task = newTaskWithSettings({ code: "console.log('Hello', 'world!'); return {}", nodeMode, @@ -45,29 +105,185 @@ describe('JsTaskRunner', () => { taskData: newAllCodeTaskData([wrapIntoJson({})]), }); - expect(jsTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [ + expect(defaultTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [ 'Hello world!', ]); }, ); }); - describe('runOnceForAllItems', () => { - const executeForAllItems = async ({ - code, - inputItems, - settings, - }: { code: string; inputItems: IDataObject[]; settings?: Partial }) => { - return await execTaskWithParams({ - task: newTaskWithSettings({ - code, - nodeMode: 'runOnceForAllItems', - ...settings, - }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + describe('built-in methods and variables available in the context', () => { + const inputItems = [{ a: 1 }]; + + const testExpressionForAllItems = async ( + expression: string, + expected: IDataObject | string | number | boolean, + ) => { + const needsWrapping = typeof expected !== 'object'; + const outcome = await executeForAllItems({ + code: needsWrapping ? `return { val: ${expression} }` : `return ${expression}`, + inputItems, }); + + expect(outcome.result).toEqual([wrapIntoJson(needsWrapping ? { val: expected } : expected)]); }; + const testExpressionForEachItem = async ( + expression: string, + expected: IDataObject | string | number | boolean, + ) => { + const needsWrapping = typeof expected !== 'object'; + const outcome = await executeForEachItem({ + code: needsWrapping ? `return { val: ${expression} }` : `return ${expression}`, + inputItems, + }); + + expect(outcome.result).toEqual([ + withPairedItem(0, wrapIntoJson(needsWrapping ? { val: expected } : expected)), + ]); + }; + + const testGroups = { + // https://docs.n8n.io/code/builtin/current-node-input/ + 'current node input': [ + ['$input.first()', inputItems[0]], + ['$input.last()', inputItems[inputItems.length - 1]], + ['$input.params', { manualTriggerParam: 'empty' }], + ], + // https://docs.n8n.io/code/builtin/output-other-nodes/ + 'output of other nodes': [ + ['$("Trigger").first()', inputItems[0]], + ['$("Trigger").last()', inputItems[inputItems.length - 1]], + ['$("Trigger").params', { manualTriggerParam: 'empty' }], + ], + // https://docs.n8n.io/code/builtin/date-time/ + 'date and time': [ + ['$now', expect.any(DateTime)], + ['$today', expect.any(DateTime)], + ['{dt: DateTime}', { dt: expect.any(Function) }], + ], + // https://docs.n8n.io/code/builtin/jmespath/ + JMESPath: [['{ val: $jmespath([{ f: 1 },{ f: 2 }], "[*].f") }', { val: [1, 2] }]], + // https://docs.n8n.io/code/builtin/n8n-metadata/ + 'n8n metadata': [ + [ + '$execution', + { + id: 'exec-id', + mode: 'test', + resumeFormUrl: 'http://formWaitingBaseUrl/exec-id', + resumeUrl: 'http://webhookWaitingBaseUrl/exec-id', + customData: { + get: expect.any(Function), + getAll: expect.any(Function), + set: expect.any(Function), + setAll: expect.any(Function), + }, + }, + ], + ['$("Trigger").isExecuted', true], + ['$nodeVersion', 2], + ['$prevNode.name', 'Trigger'], + ['$prevNode.outputIndex', 0], + ['$runIndex', 0], + ['{ wf: $workflow }', { wf: { active: true, id: '1', name: 'Test Workflow' } }], + ['$vars', { var: 'value' }], + ], + }; + + for (const [groupName, tests] of Object.entries(testGroups)) { + describe(`${groupName} runOnceForAllItems`, () => { + test.each(tests)( + 'should have the %s available in the context', + async (expression, expected) => { + await testExpressionForAllItems(expression, expected); + }, + ); + }); + + describe(`${groupName} runOnceForEachItem`, () => { + test.each(tests)( + 'should have the %s available in the context', + async (expression, expected) => { + await testExpressionForEachItem(expression, expected); + }, + ); + }); + } + + describe('$env', () => { + it('should have the env available in context when access has not been blocked', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $env.VAR1 }', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: { + isEnvAccessBlocked: false, + isProcessAvailable: true, + env: { VAR1: 'value' }, + }, + }), + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: 'value' })]); + }); + + it('should be possible to access env if it has been blocked', async () => { + await expect( + execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $env.VAR1 }', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: { + isEnvAccessBlocked: true, + isProcessAvailable: true, + env: { VAR1: 'value' }, + }, + }), + }), + ).rejects.toThrow('access to env vars denied'); + }); + + it('should not be possible to iterate $env', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return Object.values($env).concat(Object.keys($env))', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: { + isEnvAccessBlocked: false, + isProcessAvailable: true, + env: { VAR1: '1', VAR2: '2', VAR3: '3' }, + }, + }), + }); + + expect(outcome.result).toEqual([]); + }); + + it("should not expose task runner's env variables even if no env state is received", async () => { + process.env.N8N_RUNNERS_N8N_URI = 'http://127.0.0.1:5679'; + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $env.N8N_RUNNERS_N8N_URI }', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: undefined, + }), + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: undefined })]); + }); + }); + }); + + describe('runOnceForAllItems', () => { describe('continue on fail', () => { it('should return an item with the error if continueOnFail is true', async () => { const outcome = await executeForAllItems({ @@ -77,7 +293,7 @@ describe('JsTaskRunner', () => { }); expect(outcome).toEqual({ - result: [wrapIntoJson({ error: 'Error message' })], + result: [wrapIntoJson({ error: 'Error message [line 1]' })], customData: undefined, }); }); @@ -181,21 +397,6 @@ describe('JsTaskRunner', () => { }); describe('runForEachItem', () => { - const executeForEachItem = async ({ - code, - inputItems, - settings, - }: { code: string; inputItems: IDataObject[]; settings?: Partial }) => { - return await execTaskWithParams({ - task: newTaskWithSettings({ - code, - nodeMode: 'runOnceForEachItem', - ...settings, - }), - taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), - }); - }; - describe('continue on fail', () => { it('should return an item with the error if continueOnFail is true', async () => { const outcome = await executeForEachItem({ @@ -206,8 +407,8 @@ describe('JsTaskRunner', () => { expect(outcome).toEqual({ result: [ - withPairedItem(0, wrapIntoJson({ error: 'Error message' })), - withPairedItem(1, wrapIntoJson({ error: 'Error message' })), + withPairedItem(0, wrapIntoJson({ error: 'Error message [line 1]' })), + withPairedItem(1, wrapIntoJson({ error: 'Error message [line 1]' })), ], customData: undefined, }); @@ -280,4 +481,282 @@ describe('JsTaskRunner', () => { }, ); }); + + describe('require', () => { + const inputItems = [{ a: 1 }]; + const packageJson = JSON.parse(fs.readFileSync('package.json', 'utf8')); + + describe('blocked by default', () => { + const testCases = [...builtinModules, ...Object.keys(packageJson.dependencies)]; + + test.each(testCases)( + 'should throw an error when requiring %s in runOnceForAllItems mode', + async (module) => { + await expect( + executeForAllItems({ + code: `return require('${module}')`, + inputItems, + }), + ).rejects.toThrow(`Cannot find module '${module}'`); + }, + ); + + test.each(testCases)( + 'should throw an error when requiring %s in runOnceForEachItem mode', + async (module) => { + await expect( + executeForEachItem({ + code: `return require('${module}')`, + inputItems, + }), + ).rejects.toThrow(`Cannot find module '${module}'`); + }, + ); + }); + + describe('all built-ins allowed with *', () => { + const testCases = builtinModules; + const runner = createRunnerWithOpts({ + allowedBuiltInModules: '*', + }); + + test.each(testCases)( + 'should be able to require %s in runOnceForAllItems mode', + async (module) => { + await expect( + executeForAllItems({ + code: `return { val: require('${module}') }`, + inputItems, + runner, + }), + ).resolves.toBeDefined(); + }, + ); + + test.each(testCases)( + 'should be able to require %s in runOnceForEachItem mode', + async (module) => { + await expect( + executeForEachItem({ + code: `return { val: require('${module}') }`, + inputItems, + runner, + }), + ).resolves.toBeDefined(); + }, + ); + }); + + describe('all external modules allowed with *', () => { + const testCases = Object.keys(packageJson.dependencies); + const runner = createRunnerWithOpts({ + allowedExternalModules: '*', + }); + + test.each(testCases)( + 'should be able to require %s in runOnceForAllItems mode', + async (module) => { + await expect( + executeForAllItems({ + code: `return { val: require('${module}') }`, + inputItems, + runner, + }), + ).resolves.toBeDefined(); + }, + ); + + test.each(testCases)( + 'should be able to require %s in runOnceForEachItem mode', + async (module) => { + await expect( + executeForEachItem({ + code: `return { val: require('${module}') }`, + inputItems, + runner, + }), + ).resolves.toBeDefined(); + }, + ); + }); + + describe('specifically allowed built-in modules', () => { + const runner = createRunnerWithOpts({ + allowedBuiltInModules: 'crypto,path', + }); + + const allowedCases = [ + ['crypto', 'require("crypto").randomBytes(16).toString("hex")', expect.any(String)], + ['path', 'require("path").normalize("/root/./dir")', '/root/dir'], + ]; + + const blockedCases = [['http'], ['process']]; + + test.each(allowedCases)( + 'should allow requiring %s in runOnceForAllItems mode', + async (_moduleName, expression, expected) => { + const outcome = await executeForAllItems({ + code: `return { val: ${expression} }`, + inputItems, + runner, + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: expected })]); + }, + ); + + test.each(allowedCases)( + 'should allow requiring %s in runOnceForEachItem mode', + async (_moduleName, expression, expected) => { + const outcome = await executeForEachItem({ + code: `return { val: ${expression} }`, + inputItems, + runner, + }); + + expect(outcome.result).toEqual([withPairedItem(0, wrapIntoJson({ val: expected }))]); + }, + ); + + test.each(blockedCases)( + 'should throw when trying to require %s in runOnceForAllItems mode', + async (moduleName) => { + await expect( + executeForAllItems({ + code: `require("${moduleName}")`, + inputItems, + runner, + }), + ).rejects.toThrow(`Cannot find module '${moduleName}'`); + }, + ); + + test.each(blockedCases)( + 'should throw when trying to require %s in runOnceForEachItem mode', + async (moduleName) => { + await expect( + executeForEachItem({ + code: `require("${moduleName}")`, + inputItems, + runner, + }), + ).rejects.toThrow(`Cannot find module '${moduleName}'`); + }, + ); + }); + + describe('specifically allowed external modules', () => { + const runner = createRunnerWithOpts({ + allowedExternalModules: 'nanoid', + }); + + const allowedCases = [['nanoid', 'require("nanoid").nanoid()', expect.any(String)]]; + + const blockedCases = [['n8n-core']]; + + test.each(allowedCases)( + 'should allow requiring %s in runOnceForAllItems mode', + async (_moduleName, expression, expected) => { + const outcome = await executeForAllItems({ + code: `return { val: ${expression} }`, + inputItems, + runner, + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: expected })]); + }, + ); + + test.each(allowedCases)( + 'should allow requiring %s in runOnceForEachItem mode', + async (_moduleName, expression, expected) => { + const outcome = await executeForEachItem({ + code: `return { val: ${expression} }`, + inputItems, + runner, + }); + + expect(outcome.result).toEqual([withPairedItem(0, wrapIntoJson({ val: expected }))]); + }, + ); + + test.each(blockedCases)( + 'should throw when trying to require %s in runOnceForAllItems mode', + async (moduleName) => { + await expect( + executeForAllItems({ + code: `require("${moduleName}")`, + inputItems, + runner, + }), + ).rejects.toThrow(`Cannot find module '${moduleName}'`); + }, + ); + + test.each(blockedCases)( + 'should throw when trying to require %s in runOnceForEachItem mode', + async (moduleName) => { + await expect( + executeForEachItem({ + code: `require("${moduleName}")`, + inputItems, + runner, + }), + ).rejects.toThrow(`Cannot find module '${moduleName}'`); + }, + ); + }); + }); + + describe('errors', () => { + test.each<[CodeExecutionMode]>([['runOnceForAllItems'], ['runOnceForEachItem']])( + 'should throw an ExecutionError if the code is invalid in %s mode', + async (nodeMode) => { + await expect( + execTaskWithParams({ + task: newTaskWithSettings({ + code: 'unknown', + nodeMode, + }), + taskData: newAllCodeTaskData([wrapIntoJson({ a: 1 })]), + }), + ).rejects.toThrow(ExecutionError); + }, + ); + + it('sends serializes an error correctly', async () => { + const runner = createRunnerWithOpts({}); + const taskId = '1'; + const task = newTaskWithSettings({ + code: 'unknown; return []', + nodeMode: 'runOnceForAllItems', + continueOnFail: false, + mode: 'manual', + workflowMode: 'manual', + }); + runner.runningTasks.set(taskId, task); + + const sendSpy = jest.spyOn(runner.ws, 'send').mockImplementation(() => {}); + jest.spyOn(runner, 'sendOffers').mockImplementation(() => {}); + jest + .spyOn(runner, 'requestData') + .mockResolvedValue(newAllCodeTaskData([wrapIntoJson({ a: 1 })])); + + await runner.receivedSettings(taskId, task.settings); + + expect(sendSpy).toHaveBeenCalledWith( + JSON.stringify({ + type: 'runner:taskerror', + taskId, + error: { + message: 'unknown is not defined [line 1]', + description: 'ReferenceError', + lineNumber: 1, + }, + }), + ); + + console.log('DONE'); + }, 1000); + }); }); diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts index 9f9d64df02..b157094619 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts @@ -65,6 +65,9 @@ export const newAllCodeTaskData = ( const manualTriggerNode = newNode({ name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', + parameters: { + manualTriggerParam: 'empty', + }, }); return { @@ -116,15 +119,32 @@ export const newAllCodeTaskData = ( siblingParameters: {}, mode: 'manual', selfData: {}, + envProviderState: { + env: {}, + isEnvAccessBlocked: true, + isProcessAvailable: true, + }, additionalData: { - formWaitingBaseUrl: '', + executionId: 'exec-id', instanceBaseUrl: '', restartExecutionId: '', restApiUrl: '', - webhookBaseUrl: '', - webhookTestBaseUrl: '', - webhookWaitingBaseUrl: '', - variables: {}, + formWaitingBaseUrl: 'http://formWaitingBaseUrl', + webhookBaseUrl: 'http://webhookBaseUrl', + webhookTestBaseUrl: 'http://webhookTestBaseUrl', + webhookWaitingBaseUrl: 'http://webhookWaitingBaseUrl', + variables: { + var: 'value', + }, + }, + executeData: { + node: codeNode, + data: { + main: [codeNodeInputData], + }, + source: { + main: [{ previousNode: manualTriggerNode.name }], + }, }, ...opts, }; diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts new file mode 100644 index 0000000000..3777940021 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts @@ -0,0 +1,53 @@ +import { ExecutionError } from '../execution-error'; + +describe('ExecutionError', () => { + const defaultStack = `TypeError: a.unknown is not a function + at VmCodeWrapper (evalmachine.:2:3) + at evalmachine.:7:2 + at Script.runInContext (node:vm:148:12) + at Script.runInNewContext (node:vm:153:17) + at runInNewContext (node:vm:309:38) + at JsTaskRunner.runForAllItems (/n8n/packages/@n8n/task-runner/dist/js-task-runner/js-task-runner.js:90:65) + at JsTaskRunner.executeTask (/n8n/packages/@n8n/task-runner/dist/js-task-runner/js-task-runner.js:71:26) + at process.processTicksAndRejections (node:internal/process/task_queues:95:5) + at async JsTaskRunner.receivedSettings (/n8n/packages/@n8n/task-runner/dist/task-runner.js:190:26)`; + + it('should parse error details from stack trace without itemIndex', () => { + const error = new Error('a.unknown is not a function'); + error.stack = defaultStack; + + const executionError = new ExecutionError(error); + expect(executionError.message).toBe('a.unknown is not a function [line 2]'); + expect(executionError.lineNumber).toBe(2); + expect(executionError.description).toBe('TypeError'); + expect(executionError.context).toBeUndefined(); + }); + + it('should parse error details from stack trace with itemIndex', () => { + const error = new Error('a.unknown is not a function'); + error.stack = defaultStack; + + const executionError = new ExecutionError(error, 1); + expect(executionError.message).toBe('a.unknown is not a function [line 2, for item 1]'); + expect(executionError.lineNumber).toBe(2); + expect(executionError.description).toBe('TypeError'); + expect(executionError.context).toEqual({ itemIndex: 1 }); + }); + + it('should serialize correctly', () => { + const error = new Error('a.unknown is not a function'); + error.stack = defaultStack; + + const executionError = new ExecutionError(error, 1); + + expect(JSON.stringify(executionError)).toBe( + JSON.stringify({ + message: 'a.unknown is not a function [line 2, for item 1]', + description: 'TypeError', + itemIndex: 1, + context: { itemIndex: 1 }, + lineNumber: 2, + }), + ); + }); +}); diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/error-like.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/error-like.ts new file mode 100644 index 0000000000..1eaf744e89 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/error-like.ts @@ -0,0 +1,12 @@ +export interface ErrorLike { + message: string; + stack?: string; +} + +export function isErrorLike(value: unknown): value is ErrorLike { + if (typeof value !== 'object' || value === null) return false; + + const errorLike = value as ErrorLike; + + return typeof errorLike.message === 'string'; +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/execution-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/execution-error.ts index e1fdffc0b6..63a2dd5e0b 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/errors/execution-error.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/execution-error.ts @@ -1,6 +1,9 @@ -import { ApplicationError } from 'n8n-workflow'; +import type { ErrorLike } from './error-like'; +import { SerializableError } from './serializable-error'; -export class ExecutionError extends ApplicationError { +const VM_WRAPPER_FN_NAME = 'VmCodeWrapper'; + +export class ExecutionError extends SerializableError { description: string | null = null; itemIndex: number | undefined = undefined; @@ -11,7 +14,7 @@ export class ExecutionError extends ApplicationError { lineNumber: number | undefined = undefined; - constructor(error: Error & { stack?: string }, itemIndex?: number) { + constructor(error: ErrorLike, itemIndex?: number) { super(error.message); this.itemIndex = itemIndex; @@ -32,10 +35,11 @@ export class ExecutionError extends ApplicationError { if (stackRows.length === 0) { this.message = 'Unknown error'; + return; } const messageRow = stackRows.find((line) => line.includes('Error:')); - const lineNumberRow = stackRows.find((line) => line.includes('Code:')); + const lineNumberRow = stackRows.find((line) => line.includes(`at ${VM_WRAPPER_FN_NAME} `)); const lineNumberDisplay = this.toLineNumberDisplay(lineNumberRow); if (!messageRow) { @@ -56,16 +60,22 @@ export class ExecutionError extends ApplicationError { } private toLineNumberDisplay(lineNumberRow?: string) { - const errorLineNumberMatch = lineNumberRow?.match(/Code:(?\d+)/); + if (!lineNumberRow) return ''; + // TODO: This doesn't work if there is a function definition in the code + // and the error is thrown from that function. + + const regex = new RegExp( + `at ${VM_WRAPPER_FN_NAME} \\(evalmachine\\.:(?\\d+):`, + ); + const errorLineNumberMatch = lineNumberRow.match(regex); if (!errorLineNumberMatch?.groups?.lineNumber) return null; const lineNumber = errorLineNumberMatch.groups.lineNumber; + if (!lineNumber) return ''; this.lineNumber = Number(lineNumber); - if (!lineNumber) return ''; - return this.itemIndex === undefined ? `[line ${lineNumber}]` : `[line ${lineNumber}, for item ${this.itemIndex}]`; diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts new file mode 100644 index 0000000000..cd0e568de0 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts @@ -0,0 +1,21 @@ +/** + * Error that has its message property serialized as well. Used to transport + * errors over the wire. + */ +export abstract class SerializableError extends Error { + constructor(message: string) { + super(message); + + // So it is serialized as well + this.makeMessageEnumerable(); + } + + private makeMessageEnumerable() { + Object.defineProperty(this, 'message', { + value: this.message, + enumerable: true, // This makes the message property enumerable + writable: true, + configurable: true, + }); + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/validation-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/validation-error.ts index f2ba712c2c..bf66136ccf 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/errors/validation-error.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/validation-error.ts @@ -1,6 +1,6 @@ -import { ApplicationError } from 'n8n-workflow'; +import { SerializableError } from './serializable-error'; -export class ValidationError extends ApplicationError { +export class ValidationError extends SerializableError { description = ''; itemIndex: number | undefined = undefined; diff --git a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts index 907569eb6e..5bf2e06f26 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts @@ -17,6 +17,7 @@ import type { INodeParameters, IRunExecutionData, WorkflowExecuteMode, + EnvProviderState, } from 'n8n-workflow'; import * as a from 'node:assert'; import { runInNewContext, type Context } from 'node:vm'; @@ -24,6 +25,10 @@ import { runInNewContext, type Context } from 'node:vm'; import type { TaskResultData } from '@/runner-types'; import { type Task, TaskRunner } from '@/task-runner'; +import { isErrorLike } from './errors/error-like'; +import { ExecutionError } from './errors/execution-error'; +import type { RequireResolver } from './require-resolver'; +import { createRequireResolver } from './require-resolver'; import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation'; export interface JSExecSettings { @@ -63,6 +68,7 @@ export interface AllCodeTaskData { connectionInputData: INodeExecutionData[]; siblingParameters: INodeParameters; mode: WorkflowExecuteMode; + envProviderState?: EnvProviderState; executeData?: IExecuteData; defaultReturnRunIndex: number; selfData: IDataObject; @@ -70,19 +76,47 @@ export interface AllCodeTaskData { additionalData: PartialAdditionalData; } +export interface JsTaskRunnerOpts { + wsUrl: string; + grantToken: string; + maxConcurrency: number; + name?: string; + /** + * List of built-in nodejs modules that are allowed to be required in the + * execution sandbox. Asterisk (*) can be used to allow all. + */ + allowedBuiltInModules?: string; + /** + * List of npm modules that are allowed to be required in the execution + * sandbox. Asterisk (*) can be used to allow all. + */ + allowedExternalModules?: string; +} + type CustomConsole = { log: (...args: unknown[]) => void; }; export class JsTaskRunner extends TaskRunner { - constructor( - taskType: string, - wsUrl: string, - grantToken: string, - maxConcurrency: number, - name?: string, - ) { - super(taskType, wsUrl, grantToken, maxConcurrency, name ?? 'JS Task Runner'); + private readonly requireResolver: RequireResolver; + + constructor({ + grantToken, + maxConcurrency, + wsUrl, + name = 'JS Task Runner', + allowedBuiltInModules, + allowedExternalModules, + }: JsTaskRunnerOpts) { + super('javascript', wsUrl, grantToken, maxConcurrency, name); + + const parseModuleAllowList = (moduleList: string) => + moduleList === '*' ? null : new Set(moduleList.split(',').map((x) => x.trim())); + + this.requireResolver = createRequireResolver({ + allowedBuiltInModules: parseModuleAllowList(allowedBuiltInModules ?? ''), + allowedExternalModules: parseModuleAllowList(allowedExternalModules ?? ''), + }); } async executeTask(task: Task): Promise { @@ -143,7 +177,7 @@ export class JsTaskRunner extends TaskRunner { const inputItems = allData.connectionInputData; const context: Context = { - require, + require: this.requireResolver, module: {}, console: customConsole, @@ -154,7 +188,7 @@ export class JsTaskRunner extends TaskRunner { try { const result = (await runInNewContext( - `module.exports = async function() {${settings.code}\n}()`, + `module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, context, )) as TaskResultData['result']; @@ -163,12 +197,14 @@ export class JsTaskRunner extends TaskRunner { } return validateRunForAllItemsOutput(result); - } catch (error) { + } catch (e) { + // Errors thrown by the VM are not instances of Error, so map them to an ExecutionError + const error = this.toExecutionErrorIfNeeded(e); + if (settings.continueOnFail) { - return [{ json: { error: this.getErrorMessageFromVmError(error) } }]; + return [{ json: { error: error.message } }]; } - (error as Record).node = allData.node; throw error; } } @@ -190,7 +226,7 @@ export class JsTaskRunner extends TaskRunner { const item = inputItems[index]; const dataProxy = this.createDataProxy(allData, workflow, index); const context: Context = { - require, + require: this.requireResolver, module: {}, console: customConsole, item, @@ -201,7 +237,7 @@ export class JsTaskRunner extends TaskRunner { try { let result = (await runInNewContext( - `module.exports = async function() {${settings.code}\n}()`, + `module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, context, )) as INodeExecutionData | undefined; @@ -225,14 +261,16 @@ export class JsTaskRunner extends TaskRunner { }, ); } - } catch (error) { + } catch (e) { + // Errors thrown by the VM are not instances of Error, so map them to an ExecutionError + const error = this.toExecutionErrorIfNeeded(e); + if (!settings.continueOnFail) { - (error as Record).node = allData.node; throw error; } returnData.push({ - json: { error: this.getErrorMessageFromVmError(error) }, + json: { error: error.message }, pairedItem: { item: index, }, @@ -262,14 +300,25 @@ export class JsTaskRunner extends TaskRunner { allData.defaultReturnRunIndex, allData.selfData, allData.contextNodeName, + // Make sure that even if we don't receive the envProviderState for + // whatever reason, we don't expose the task runner's env to the code + allData.envProviderState ?? { + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, + }, ).getDataProxy(); } - private getErrorMessageFromVmError(error: unknown): string { - if (typeof error === 'object' && !!error && 'message' in error) { - return error.message as string; + private toExecutionErrorIfNeeded(error: unknown): Error { + if (error instanceof Error) { + return error; } - return JSON.stringify(error); + if (isErrorLike(error)) { + return new ExecutionError(error); + } + + return new ExecutionError({ message: JSON.stringify(error) }); } } diff --git a/packages/@n8n/task-runner/src/js-task-runner/require-resolver.ts b/packages/@n8n/task-runner/src/js-task-runner/require-resolver.ts new file mode 100644 index 0000000000..ffa00c0441 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/require-resolver.ts @@ -0,0 +1,43 @@ +import { ApplicationError } from 'n8n-workflow'; +import { isBuiltin } from 'node:module'; + +import { ExecutionError } from './errors/execution-error'; + +export type RequireResolverOpts = { + /** + * List of built-in nodejs modules that are allowed to be required in the + * execution sandbox. `null` means all are allowed. + */ + allowedBuiltInModules: Set | null; + + /** + * List of external modules that are allowed to be required in the + * execution sandbox. `null` means all are allowed. + */ + allowedExternalModules: Set | null; +}; + +export type RequireResolver = (request: string) => unknown; + +export function createRequireResolver({ + allowedBuiltInModules, + allowedExternalModules, +}: RequireResolverOpts) { + return (request: string) => { + const checkIsAllowed = (allowList: Set | null, moduleName: string) => { + return allowList ? allowList.has(moduleName) : true; + }; + + const isAllowed = isBuiltin(request) + ? checkIsAllowed(allowedBuiltInModules, request) + : checkIsAllowed(allowedExternalModules, request); + + if (!isAllowed) { + const error = new ApplicationError(`Cannot find module '${request}'`); + throw new ExecutionError(error); + } + + // eslint-disable-next-line @typescript-eslint/no-var-requires + return require(request) as unknown; + }; +} diff --git a/packages/@n8n/task-runner/src/start.ts b/packages/@n8n/task-runner/src/start.ts index 7570a11780..5f856140d9 100644 --- a/packages/@n8n/task-runner/src/start.ts +++ b/packages/@n8n/task-runner/src/start.ts @@ -66,7 +66,13 @@ void (async function start() { } const wsUrl = `ws://${config.n8nUri}/runners/_ws`; - runner = new JsTaskRunner('javascript', wsUrl, grantToken, 5); + runner = new JsTaskRunner({ + wsUrl, + grantToken, + maxConcurrency: 5, + allowedBuiltInModules: process.env.NODE_FUNCTION_ALLOW_BUILTIN, + allowedExternalModules: process.env.NODE_FUNCTION_ALLOW_EXTERNAL, + }); process.on('SIGINT', createSignalHandler('SIGINT')); process.on('SIGTERM', createSignalHandler('SIGTERM')); diff --git a/packages/@n8n/task-runner/src/task-runner.ts b/packages/@n8n/task-runner/src/task-runner.ts index 89402d885c..ac8378636a 100644 --- a/packages/@n8n/task-runner/src/task-runner.ts +++ b/packages/@n8n/task-runner/src/task-runner.ts @@ -1,4 +1,4 @@ -import { ApplicationError, ensureError } from 'n8n-workflow'; +import { ApplicationError } from 'n8n-workflow'; import { nanoid } from 'nanoid'; import { URL } from 'node:url'; import { type MessageEvent, WebSocket } from 'ws'; @@ -256,8 +256,7 @@ export abstract class TaskRunner { try { const data = await this.executeTask(task); this.taskDone(taskId, data); - } catch (e) { - const error = ensureError(e); + } catch (error) { this.taskErrored(taskId, error); } } diff --git a/packages/cli/package.json b/packages/cli/package.json index 9c7664d8a8..d7d92d96bc 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "1.62.1", + "version": "1.63.0", "description": "n8n Workflow Automation Tool", "main": "dist/index", "types": "dist/index.d.ts", @@ -51,12 +51,12 @@ "!dist/**/e2e.*" ], "devDependencies": { - "@redocly/cli": "^1.6.0", + "@redocly/cli": "^1.25.5", "@types/aws4": "^1.5.1", "@types/bcryptjs": "^2.4.2", "@types/compression": "1.0.1", "@types/convict": "^6.1.1", - "@types/cookie-parser": "^1.4.2", + "@types/cookie-parser": "^1.4.7", "@types/express": "catalog:", "@types/flat": "^5.0.5", "@types/formidable": "^3.4.5", @@ -76,7 +76,6 @@ "@types/xml2js": "catalog:", "@types/yamljs": "^0.2.31", "@vvo/tzdb": "^6.141.0", - "chokidar": "^3.5.2", "concurrently": "^8.2.0", "ioredis-mock": "^8.8.1", "mjml": "^4.15.3", @@ -94,7 +93,7 @@ "@n8n/permissions": "workspace:*", "@n8n/task-runner": "workspace:*", "@n8n/typeorm": "0.3.20-12", - "@n8n_io/ai-assistant-sdk": "1.9.4", + "@n8n_io/ai-assistant-sdk": "1.10.3", "@n8n_io/license-sdk": "2.13.1", "@oclif/core": "4.0.7", "@rudderstack/rudder-sdk-node": "2.0.9", @@ -111,14 +110,14 @@ "class-validator": "0.14.0", "compression": "1.7.4", "convict": "6.2.4", - "cookie-parser": "1.4.6", + "cookie-parser": "1.4.7", "csrf": "3.1.0", "curlconverter": "3.21.0", "dotenv": "8.6.0", - "express": "4.21.0", + "express": "4.21.1", "express-async-errors": "3.1.1", "express-handlebars": "7.1.2", - "express-openapi-validator": "5.3.3", + "express-openapi-validator": "5.3.7", "express-prom-bundle": "6.6.0", "express-rate-limit": "7.2.0", "fast-glob": "catalog:", @@ -145,7 +144,7 @@ "nodemailer": "6.9.9", "oauth-1.0a": "2.2.6", "open": "7.4.2", - "openapi-types": "10.0.0", + "openapi-types": "12.1.3", "otpauth": "9.1.1", "p-cancelable": "2.1.1", "p-lazy": "3.1.0", @@ -164,9 +163,8 @@ "simple-git": "3.17.0", "source-map-support": "0.5.21", "sqlite3": "5.1.7", - "sse-channel": "4.0.0", "sshpk": "1.17.0", - "swagger-ui-express": "5.0.0", + "swagger-ui-express": "5.0.1", "syslog-client": "1.1.1", "typedi": "catalog:", "uuid": "catalog:", diff --git a/packages/cli/src/__tests__/license.test.ts b/packages/cli/src/__tests__/license.test.ts index 35da918abb..67a92b95cd 100644 --- a/packages/cli/src/__tests__/license.test.ts +++ b/packages/cli/src/__tests__/license.test.ts @@ -5,7 +5,7 @@ import type { InstanceSettings } from 'n8n-core'; import config from '@/config'; import { N8N_VERSION } from '@/constants'; import { License } from '@/license'; -import type { Logger } from '@/logging/logger.service'; +import { mockLogger } from '@test/mocking'; jest.mock('@n8n_io/license-sdk'); @@ -25,37 +25,39 @@ describe('License', () => { }); let license: License; - const logger = mock(); const instanceSettings = mock({ instanceId: MOCK_INSTANCE_ID, instanceType: 'main', }); beforeEach(async () => { - license = new License(logger, instanceSettings, mock(), mock(), mock()); + license = new License(mockLogger(), instanceSettings, mock(), mock(), mock()); await license.init(); }); test('initializes license manager', async () => { - expect(LicenseManager).toHaveBeenCalledWith({ - autoRenewEnabled: true, - autoRenewOffset: MOCK_RENEW_OFFSET, - offlineMode: false, - renewOnInit: true, - deviceFingerprint: expect.any(Function), - productIdentifier: `n8n-${N8N_VERSION}`, - logger, - loadCertStr: expect.any(Function), - saveCertStr: expect.any(Function), - onFeatureChange: expect.any(Function), - collectUsageMetrics: expect.any(Function), - collectPassthroughData: expect.any(Function), - server: MOCK_SERVER_URL, - tenantId: 1, - }); + expect(LicenseManager).toHaveBeenCalledWith( + expect.objectContaining({ + autoRenewEnabled: true, + autoRenewOffset: MOCK_RENEW_OFFSET, + offlineMode: false, + renewOnInit: true, + deviceFingerprint: expect.any(Function), + productIdentifier: `n8n-${N8N_VERSION}`, + loadCertStr: expect.any(Function), + saveCertStr: expect.any(Function), + onFeatureChange: expect.any(Function), + collectUsageMetrics: expect.any(Function), + collectPassthroughData: expect.any(Function), + server: MOCK_SERVER_URL, + tenantId: 1, + }), + ); }); test('initializes license manager for worker', async () => { + const logger = mockLogger(); + license = new License( logger, mock({ instanceType: 'worker' }), @@ -64,22 +66,23 @@ describe('License', () => { mock(), ); await license.init(); - expect(LicenseManager).toHaveBeenCalledWith({ - autoRenewEnabled: false, - autoRenewOffset: MOCK_RENEW_OFFSET, - offlineMode: true, - renewOnInit: false, - deviceFingerprint: expect.any(Function), - productIdentifier: `n8n-${N8N_VERSION}`, - logger, - loadCertStr: expect.any(Function), - saveCertStr: expect.any(Function), - onFeatureChange: expect.any(Function), - collectUsageMetrics: expect.any(Function), - collectPassthroughData: expect.any(Function), - server: MOCK_SERVER_URL, - tenantId: 1, - }); + expect(LicenseManager).toHaveBeenCalledWith( + expect.objectContaining({ + autoRenewEnabled: false, + autoRenewOffset: MOCK_RENEW_OFFSET, + offlineMode: true, + renewOnInit: false, + deviceFingerprint: expect.any(Function), + productIdentifier: `n8n-${N8N_VERSION}`, + loadCertStr: expect.any(Function), + saveCertStr: expect.any(Function), + onFeatureChange: expect.any(Function), + collectUsageMetrics: expect.any(Function), + collectPassthroughData: expect.any(Function), + server: MOCK_SERVER_URL, + tenantId: 1, + }), + ); }); test('attempts to activate license with provided key', async () => { @@ -196,7 +199,7 @@ describe('License', () => { it('should enable renewal', async () => { config.set('multiMainSetup.enabled', false); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }), @@ -208,7 +211,7 @@ describe('License', () => { it('should disable renewal', async () => { config.set('license.autoRenewEnabled', false); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -226,7 +229,7 @@ describe('License', () => { config.set('multiMainSetup.instanceType', status); config.set('license.autoRenewEnabled', false); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -241,7 +244,7 @@ describe('License', () => { config.set('multiMainSetup.instanceType', status); config.set('license.autoRenewEnabled', false); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -252,7 +255,7 @@ describe('License', () => { config.set('multiMainSetup.enabled', true); config.set('multiMainSetup.instanceType', 'leader'); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }), @@ -264,7 +267,7 @@ describe('License', () => { describe('reinit', () => { it('should reinitialize license manager', async () => { - const license = new License(mock(), mock(), mock(), mock(), mock()); + const license = new License(mockLogger(), mock(), mock(), mock(), mock()); await license.init(); const initSpy = jest.spyOn(license, 'init'); diff --git a/packages/cli/src/__tests__/node-types.test.ts b/packages/cli/src/__tests__/node-types.test.ts new file mode 100644 index 0000000000..11e2c5ba2b --- /dev/null +++ b/packages/cli/src/__tests__/node-types.test.ts @@ -0,0 +1,100 @@ +import { mock } from 'jest-mock-extended'; +import type { INodeType, IVersionedNodeType } from 'n8n-workflow'; + +import type { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; + +import { NodeTypes } from '../node-types'; + +describe('NodeTypes', () => { + let nodeTypes: NodeTypes; + const loadNodesAndCredentials = mock(); + + beforeEach(() => { + jest.clearAllMocks(); + nodeTypes = new NodeTypes(loadNodesAndCredentials); + }); + + describe('getByNameAndVersion', () => { + const nodeTypeName = 'n8n-nodes-base.testNode'; + + it('should throw an error if the node-type does not exist', () => { + const nodeTypeName = 'unknownNode'; + + // @ts-expect-error overwriting a readonly property + loadNodesAndCredentials.loadedNodes = {}; + // @ts-expect-error overwriting a readonly property + loadNodesAndCredentials.knownNodes = {}; + + expect(() => nodeTypes.getByNameAndVersion(nodeTypeName)).toThrow( + 'Unrecognized node type: unknownNode', + ); + }); + + it('should return a regular node-type without version', () => { + const nodeType = mock(); + + // @ts-expect-error overwriting a readonly property + loadNodesAndCredentials.loadedNodes = { + [nodeTypeName]: { type: nodeType }, + }; + + const result = nodeTypes.getByNameAndVersion(nodeTypeName); + + expect(result).toEqual(nodeType); + }); + + it('should return a regular node-type with version', () => { + const nodeTypeV1 = mock(); + const nodeType = mock({ + nodeVersions: { 1: nodeTypeV1 }, + getNodeType: () => nodeTypeV1, + }); + + // @ts-expect-error overwriting a readonly property + loadNodesAndCredentials.loadedNodes = { + [nodeTypeName]: { type: nodeType }, + }; + + const result = nodeTypes.getByNameAndVersion(nodeTypeName); + + expect(result).toEqual(nodeTypeV1); + }); + + it('should throw when a node-type is requested as tool, but does not support being used as one', () => { + const nodeType = mock(); + + // @ts-expect-error overwriting a readonly property + loadNodesAndCredentials.loadedNodes = { + [nodeTypeName]: { type: nodeType }, + }; + + expect(() => nodeTypes.getByNameAndVersion(`${nodeTypeName}Tool`)).toThrow( + 'Node cannot be used as a tool', + ); + }); + + it('should return the tool node-type when requested as tool', () => { + const nodeType = mock(); + // @ts-expect-error can't use a mock here + nodeType.description = { + name: nodeTypeName, + displayName: 'TestNode', + usableAsTool: true, + properties: [], + }; + + // @ts-expect-error overwriting a readonly property + loadNodesAndCredentials.loadedNodes = { + [nodeTypeName]: { type: nodeType }, + }; + + const result = nodeTypes.getByNameAndVersion(`${nodeTypeName}Tool`); + expect(result).not.toEqual(nodeType); + expect(result.description.name).toEqual('n8n-nodes-base.testNodeTool'); + expect(result.description.displayName).toEqual('TestNode Tool'); + expect(result.description.codex?.categories).toContain('AI'); + expect(result.description.inputs).toEqual([]); + expect(result.description.outputs).toEqual(['ai_tool']); + }); + }); +}); diff --git a/packages/cli/src/__tests__/wait-tracker.test.ts b/packages/cli/src/__tests__/wait-tracker.test.ts index 9ca3a66d33..e51cd88ccb 100644 --- a/packages/cli/src/__tests__/wait-tracker.test.ts +++ b/packages/cli/src/__tests__/wait-tracker.test.ts @@ -1,10 +1,12 @@ import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; import type { ExecutionRepository } from '@/databases/repositories/execution.repository'; import type { IExecutionResponse } from '@/interfaces'; import type { MultiMainSetup } from '@/services/orchestration/main/multi-main-setup.ee'; import { OrchestrationService } from '@/services/orchestration.service'; import { WaitTracker } from '@/wait-tracker'; +import { mockLogger } from '@test/mocking'; jest.useFakeTimers(); @@ -12,6 +14,7 @@ describe('WaitTracker', () => { const executionRepository = mock(); const multiMainSetup = mock(); const orchestrationService = new OrchestrationService(mock(), mock(), multiMainSetup); + const instanceSettings = mock({ isLeader: true }); const execution = mock({ id: '123', @@ -21,11 +24,12 @@ describe('WaitTracker', () => { let waitTracker: WaitTracker; beforeEach(() => { waitTracker = new WaitTracker( - mock(), + mockLogger(), executionRepository, mock(), mock(), orchestrationService, + instanceSettings, ); multiMainSetup.on.mockReturnThis(); }); @@ -36,7 +40,6 @@ describe('WaitTracker', () => { describe('init()', () => { it('should query DB for waiting executions if leader', async () => { - jest.spyOn(orchestrationService, 'isLeader', 'get').mockReturnValue(true); executionRepository.getWaitingExecutions.mockResolvedValue([execution]); waitTracker.init(); @@ -119,7 +122,6 @@ describe('WaitTracker', () => { describe('multi-main setup', () => { it('should start tracking if leader', () => { - jest.spyOn(orchestrationService, 'isLeader', 'get').mockReturnValue(true); jest.spyOn(orchestrationService, 'isSingleMainSetup', 'get').mockReturnValue(false); executionRepository.getWaitingExecutions.mockResolvedValue([]); @@ -130,7 +132,14 @@ describe('WaitTracker', () => { }); it('should not start tracking if follower', () => { - jest.spyOn(orchestrationService, 'isLeader', 'get').mockReturnValue(false); + const waitTracker = new WaitTracker( + mockLogger(), + executionRepository, + mock(), + mock(), + orchestrationService, + mock({ isLeader: false }), + ); jest.spyOn(orchestrationService, 'isSingleMainSetup', 'get').mockReturnValue(false); executionRepository.getWaitingExecutions.mockResolvedValue([]); diff --git a/packages/cli/src/abstract-server.ts b/packages/cli/src/abstract-server.ts index 95ecaccdc5..4456470b86 100644 --- a/packages/cli/src/abstract-server.ts +++ b/packages/cli/src/abstract-server.ts @@ -5,7 +5,6 @@ import { engine as expressHandlebars } from 'express-handlebars'; import { readFile } from 'fs/promises'; import type { Server } from 'http'; import isbot from 'isbot'; -import type { InstanceType } from 'n8n-core'; import { Container, Service } from 'typedi'; import config from '@/config'; @@ -22,7 +21,6 @@ import { TestWebhooks } from '@/webhooks/test-webhooks'; import { WaitingWebhooks } from '@/webhooks/waiting-webhooks'; import { createWebhookHandlerFor } from '@/webhooks/webhook-request-handler'; -import { generateHostInstanceId } from './databases/utils/generators'; import { ServiceUnavailableError } from './errors/response-errors/service-unavailable.error'; @Service() @@ -61,7 +59,7 @@ export abstract class AbstractServer { readonly uniqueInstanceId: string; - constructor(instanceType: Exclude) { + constructor() { this.app = express(); this.app.disable('x-powered-by'); @@ -85,8 +83,6 @@ export abstract class AbstractServer { this.endpointWebhookTest = this.globalConfig.endpoints.webhookTest; this.endpointWebhookWaiting = this.globalConfig.endpoints.webhookWaiting; - this.uniqueInstanceId = generateHostInstanceId(instanceType); - this.logger = Container.get(Logger); } diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index 9c5ef15f38..4127909e49 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ -import { ActiveWorkflows, NodeExecuteFunctions } from 'n8n-core'; +import { ActiveWorkflows, InstanceSettings, NodeExecuteFunctions } from 'n8n-core'; import type { ExecutionError, IDeferredPromise, @@ -74,6 +74,7 @@ export class ActiveWorkflowManager { private readonly workflowStaticDataService: WorkflowStaticDataService, private readonly activeWorkflowsService: ActiveWorkflowsService, private readonly workflowExecutionService: WorkflowExecutionService, + private readonly instanceSettings: InstanceSettings, ) {} async init() { @@ -423,7 +424,7 @@ export class ActiveWorkflowManager { if (dbWorkflows.length === 0) return; - if (this.orchestrationService.isLeader) { + if (this.instanceSettings.isLeader) { this.logger.info(' ================================'); this.logger.info(' Start Active Workflows:'); this.logger.info(' ================================'); diff --git a/packages/cli/src/commands/base-command.ts b/packages/cli/src/commands/base-command.ts index 7403dd337a..303b3cae3e 100644 --- a/packages/cli/src/commands/base-command.ts +++ b/packages/cli/src/commands/base-command.ts @@ -1,16 +1,26 @@ import 'reflect-metadata'; import { GlobalConfig } from '@n8n/config'; import { Command, Errors } from '@oclif/core'; -import { BinaryDataService, InstanceSettings, ObjectStoreService } from 'n8n-core'; -import { ApplicationError, ErrorReporterProxy as ErrorReporter, sleep } from 'n8n-workflow'; +import { + BinaryDataService, + InstanceSettings, + ObjectStoreService, + DataDeduplicationService, +} from 'n8n-core'; +import { + ApplicationError, + ensureError, + ErrorReporterProxy as ErrorReporter, + sleep, +} from 'n8n-workflow'; import { Container } from 'typedi'; import type { AbstractServer } from '@/abstract-server'; import config from '@/config'; import { LICENSE_FEATURES, inDevelopment, inTest } from '@/constants'; import * as CrashJournal from '@/crash-journal'; -import { generateHostInstanceId } from '@/databases/utils/generators'; import * as Db from '@/db'; +import { getDataDeduplicationService } from '@/deduplication'; import { initErrorHandling } from '@/error-reporting'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay'; @@ -34,8 +44,6 @@ export abstract class BaseCommand extends Command { protected instanceSettings: InstanceSettings = Container.get(InstanceSettings); - queueModeId: string; - protected server?: AbstractServer; protected shutdownService: ShutdownService = Container.get(ShutdownService); @@ -122,16 +130,6 @@ export abstract class BaseCommand extends Command { await Container.get(TelemetryEventRelay).init(); } - protected setInstanceQueueModeId() { - if (config.get('redis.queueModeId')) { - this.queueModeId = config.get('redis.queueModeId'); - return; - } - // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion - this.queueModeId = generateHostInstanceId(this.instanceSettings.instanceType!); - config.set('redis.queueModeId', this.queueModeId); - } - protected async stopProcess() { // This needs to be overridden } @@ -261,6 +259,11 @@ export abstract class BaseCommand extends Command { await Container.get(BinaryDataService).init(binaryDataConfig); } + protected async initDataDeduplicationService() { + const dataDeduplicationService = getDataDeduplicationService(); + await DataDeduplicationService.init(dataDeduplicationService); + } + async initExternalHooks() { this.externalHooks = Container.get(ExternalHooks); await this.externalHooks.init(); @@ -283,8 +286,9 @@ export abstract class BaseCommand extends Command { this.logger.debug('Attempting license activation'); await this.license.activate(activationKey); this.logger.debug('License init complete'); - } catch (e) { - this.logger.error('Could not activate license', e as Error); + } catch (e: unknown) { + const error = ensureError(e); + this.logger.error('Could not activate license', { error }); } } } diff --git a/packages/cli/src/commands/execute-batch.ts b/packages/cli/src/commands/execute-batch.ts index 71540952b5..fbbecd2cbb 100644 --- a/packages/cli/src/commands/execute-batch.ts +++ b/packages/cli/src/commands/execute-batch.ts @@ -167,6 +167,7 @@ export class ExecuteBatch extends BaseCommand { async init() { await super.init(); await this.initBinaryDataService(); + await this.initDataDeduplicationService(); await this.initExternalHooks(); } diff --git a/packages/cli/src/commands/execute.ts b/packages/cli/src/commands/execute.ts index 9a901fdfc5..fd49a2b619 100644 --- a/packages/cli/src/commands/execute.ts +++ b/packages/cli/src/commands/execute.ts @@ -31,6 +31,7 @@ export class Execute extends BaseCommand { async init() { await super.init(); await this.initBinaryDataService(); + await this.initDataDeduplicationService(); await this.initExternalHooks(); } diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index 36e690c37e..b46ef52ea4 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ -import { Flags, type Config } from '@oclif/core'; +import { Flags } from '@oclif/core'; import glob from 'fast-glob'; import { createReadStream, createWriteStream, existsSync } from 'fs'; import { mkdir } from 'fs/promises'; @@ -21,11 +21,11 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus' import { EventService } from '@/events/event.service'; import { ExecutionService } from '@/executions/execution.service'; import { License } from '@/license'; -import { SingleMainTaskManager } from '@/runners/task-managers/single-main-task-manager'; +import { LocalTaskManager } from '@/runners/task-managers/local-task-manager'; import { TaskManager } from '@/runners/task-managers/task-manager'; -import { Publisher } from '@/scaling/pubsub/publisher.service'; +import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; +import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { Server } from '@/server'; -import { OrchestrationHandlerMainService } from '@/services/orchestration/main/orchestration.handler.main.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { OwnershipService } from '@/services/ownership.service'; import { PruningService } from '@/services/pruning.service'; @@ -70,11 +70,6 @@ export class Start extends BaseCommand { override needsCommunityPackages = true; - constructor(argv: string[], cmdConfig: Config) { - super(argv, cmdConfig); - this.setInstanceQueueModeId(); - } - /** * Opens the UI in browser */ @@ -174,8 +169,9 @@ export class Start extends BaseCommand { this.logger.info('Initializing n8n process'); if (config.getEnv('executions.mode') === 'queue') { - this.logger.debug('Main Instance running in queue mode'); - this.logger.debug(`Queue mode id: ${this.queueModeId}`); + const scopedLogger = this.logger.withScope('scaling'); + scopedLogger.debug('Starting main instance in scaling mode'); + scopedLogger.debug(`Host ID: ${this.instanceSettings.hostId}`); } const { flags } = await this.parse(Start); @@ -212,6 +208,8 @@ export class Start extends BaseCommand { this.logger.debug('Wait tracker init complete'); await this.initBinaryDataService(); this.logger.debug('Binary data service init complete'); + await this.initDataDeduplicationService(); + this.logger.debug('Data deduplication service init complete'); await this.initExternalHooks(); this.logger.debug('External hooks init complete'); await this.initExternalSecrets(); @@ -224,7 +222,7 @@ export class Start extends BaseCommand { } if (!this.globalConfig.taskRunners.disabled) { - Container.set(TaskManager, new SingleMainTaskManager()); + Container.set(TaskManager, new LocalTaskManager()); const { TaskRunnerServer } = await import('@/runners/task-runner-server'); const taskRunnerServer = Container.get(TaskRunnerServer); await taskRunnerServer.start(); @@ -252,10 +250,13 @@ export class Start extends BaseCommand { await orchestrationService.init(); - await Container.get(OrchestrationHandlerMainService).initWithOptions({ - queueModeId: this.queueModeId, - publisher: Container.get(Publisher), - }); + Container.get(PubSubHandler).init(); + + const subscriber = Container.get(Subscriber); + await subscriber.subscribe('n8n.commands'); + await subscriber.subscribe('n8n.worker-response'); + + this.logger.withScope('scaling').debug('Pubsub setup completed'); if (!orchestrationService.isMultiMainSetupEnabled) return; diff --git a/packages/cli/src/commands/webhook.ts b/packages/cli/src/commands/webhook.ts index 5a5d656c8c..d9d2f011fb 100644 --- a/packages/cli/src/commands/webhook.ts +++ b/packages/cli/src/commands/webhook.ts @@ -1,4 +1,4 @@ -import { Flags, type Config } from '@oclif/core'; +import { Flags } from '@oclif/core'; import { ApplicationError } from 'n8n-workflow'; import { Container } from 'typedi'; @@ -6,7 +6,7 @@ import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; import { Subscriber } from '@/scaling/pubsub/subscriber.service'; -import { OrchestrationWebhookService } from '@/services/orchestration/webhook/orchestration.webhook.service'; +import { OrchestrationService } from '@/services/orchestration.service'; import { WebhookServer } from '@/webhooks/webhook-server'; import { BaseCommand } from './base-command'; @@ -24,14 +24,6 @@ export class Webhook extends BaseCommand { override needsCommunityPackages = true; - constructor(argv: string[], cmdConfig: Config) { - super(argv, cmdConfig); - if (this.queueModeId) { - this.logger.debug(`Webhook Instance queue mode id: ${this.queueModeId}`); - } - this.setInstanceQueueModeId(); - } - /** * Stops n8n in a graceful way. * Make for example sure that all the webhooks from third party services @@ -71,8 +63,8 @@ export class Webhook extends BaseCommand { await this.initCrashJournal(); this.logger.debug('Crash journal initialized'); - this.logger.info('Initializing n8n webhook process'); - this.logger.debug(`Queue mode id: ${this.queueModeId}`); + this.logger.info('Starting n8n webhook process...'); + this.logger.debug(`Host ID: ${this.instanceSettings.hostId}`); await super.init(); @@ -82,6 +74,8 @@ export class Webhook extends BaseCommand { this.logger.debug('Orchestration init complete'); await this.initBinaryDataService(); this.logger.debug('Binary data service init complete'); + await this.initDataDeduplicationService(); + this.logger.debug('Data deduplication service init complete'); await this.initExternalHooks(); this.logger.debug('External hooks init complete'); await this.initExternalSecrets(); @@ -98,7 +92,6 @@ export class Webhook extends BaseCommand { const { ScalingService } = await import('@/scaling/scaling.service'); await Container.get(ScalingService).setupQueue(); await this.server.start(); - this.logger.debug(`Webhook listener ID: ${this.server.uniqueInstanceId}`); this.logger.info('Webhook listener waiting for requests.'); // Make sure that the process does not close @@ -110,11 +103,9 @@ export class Webhook extends BaseCommand { } async initOrchestration() { - await Container.get(OrchestrationWebhookService).init(); + await Container.get(OrchestrationService).init(); Container.get(PubSubHandler).init(); - const subscriber = Container.get(Subscriber); - await subscriber.subscribe('n8n.commands'); - subscriber.setCommandMessageHandler(); + await Container.get(Subscriber).subscribe('n8n.commands'); } } diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index 6345db6763..96f151f547 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -1,18 +1,20 @@ import { Flags, type Config } from '@oclif/core'; -import { ApplicationError } from 'n8n-workflow'; import { Container } from 'typedi'; import config from '@/config'; import { N8N_VERSION, inTest } from '@/constants'; +import { WorkerMissingEncryptionKey } from '@/errors/worker-missing-encryption-key.error'; import { EventMessageGeneric } from '@/eventbus/event-message-classes/event-message-generic'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; -import { JobProcessor } from '@/scaling/job-processor'; +import { Logger } from '@/logging/logger.service'; +import { LocalTaskManager } from '@/runners/task-managers/local-task-manager'; +import { TaskManager } from '@/runners/task-managers/task-manager'; import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import type { ScalingService } from '@/scaling/scaling.service'; import type { WorkerServerEndpointsConfig } from '@/scaling/worker-server'; -import { OrchestrationWorkerService } from '@/services/orchestration/worker/orchestration.worker.service'; +import { OrchestrationService } from '@/services/orchestration.service'; import { BaseCommand } from './base-command'; @@ -39,8 +41,6 @@ export class Worker extends BaseCommand { scalingService: ScalingService; - jobProcessor: JobProcessor; - override needsCommunityPackages = true; /** @@ -49,27 +49,27 @@ export class Worker extends BaseCommand { * get removed. */ async stopProcess() { - this.logger.info('Stopping n8n...'); + this.logger.info('Stopping worker...'); try { await this.externalHooks?.run('n8n.stop', []); } catch (error) { - await this.exitWithCrash('There was an error shutting down n8n.', error); + await this.exitWithCrash('Error shutting down worker', error); } await this.exitSuccessFully(); } constructor(argv: string[], cmdConfig: Config) { - super(argv, cmdConfig); + if (!process.env.N8N_ENCRYPTION_KEY) throw new WorkerMissingEncryptionKey(); - if (!process.env.N8N_ENCRYPTION_KEY) { - throw new ApplicationError( - 'Missing encryption key. Worker started without the required N8N_ENCRYPTION_KEY env var. More information: https://docs.n8n.io/hosting/configuration/configuration-examples/encryption-key/', - ); + if (config.getEnv('executions.mode') !== 'queue') { + config.set('executions.mode', 'queue'); } - this.setInstanceQueueModeId(); + super(argv, cmdConfig); + + this.logger = Container.get(Logger).withScope('scaling'); } async init() { @@ -84,7 +84,7 @@ export class Worker extends BaseCommand { await this.initCrashJournal(); this.logger.debug('Starting n8n worker...'); - this.logger.debug(`Queue mode id: ${this.queueModeId}`); + this.logger.debug(`Host ID: ${this.instanceSettings.hostId}`); await this.setConcurrency(); await super.init(); @@ -93,6 +93,8 @@ export class Worker extends BaseCommand { this.logger.debug('License init complete'); await this.initBinaryDataService(); this.logger.debug('Binary data service init complete'); + await this.initDataDeduplicationService(); + this.logger.debug('Data deduplication service init complete'); await this.initExternalHooks(); this.logger.debug('External hooks init complete'); await this.initExternalSecrets(); @@ -107,15 +109,26 @@ export class Worker extends BaseCommand { new EventMessageGeneric({ eventName: 'n8n.worker.started', payload: { - workerId: this.queueModeId, + workerId: this.instanceSettings.hostId, }, }), ); + + if (!this.globalConfig.taskRunners.disabled) { + Container.set(TaskManager, new LocalTaskManager()); + const { TaskRunnerServer } = await import('@/runners/task-runner-server'); + const taskRunnerServer = Container.get(TaskRunnerServer); + await taskRunnerServer.start(); + + const { TaskRunnerProcess } = await import('@/runners/task-runner-process'); + const runnerProcess = Container.get(TaskRunnerProcess); + await runnerProcess.start(); + } } async initEventBus() { await Container.get(MessageEventBus).initialize({ - workerId: this.queueModeId, + workerId: this.instanceSettings.hostId, }); Container.get(LogStreamingEventRelay).init(); } @@ -127,12 +140,12 @@ export class Worker extends BaseCommand { * The subscription connection adds a handler to handle the command messages */ async initOrchestration() { - await Container.get(OrchestrationWorkerService).init(); + await Container.get(OrchestrationService).init(); Container.get(PubSubHandler).init(); - const subscriber = Container.get(Subscriber); - await subscriber.subscribe('n8n.commands'); - subscriber.setCommandMessageHandler(); + await Container.get(Subscriber).subscribe('n8n.commands'); + + this.logger.withScope('scaling').debug('Pubsub setup ready'); } async setConcurrency() { @@ -150,8 +163,6 @@ export class Worker extends BaseCommand { await this.scalingService.setupQueue(); this.scalingService.setupWorker(this.concurrency); - - this.jobProcessor = Container.get(JobProcessor); } async run() { diff --git a/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts b/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts index 6774708099..6511ae4d03 100644 --- a/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts +++ b/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts @@ -11,13 +11,13 @@ import type { ExecutionRepository } from '@/databases/repositories/execution.rep import { InvalidConcurrencyLimitError } from '@/errors/invalid-concurrency-limit.error'; import type { EventService } from '@/events/event.service'; import type { IExecutingWorkflowData } from '@/interfaces'; -import type { Logger } from '@/logging/logger.service'; import type { Telemetry } from '@/telemetry'; +import { mockLogger } from '@test/mocking'; import { ConcurrencyQueue } from '../concurrency-queue'; describe('ConcurrencyControlService', () => { - const logger = mock(); + const logger = mockLogger(); const executionRepository = mock(); const telemetry = mock(); const eventService = mock(); diff --git a/packages/cli/src/concurrency/concurrency-control.service.ts b/packages/cli/src/concurrency/concurrency-control.service.ts index 1665279352..cf537870f2 100644 --- a/packages/cli/src/concurrency/concurrency-control.service.ts +++ b/packages/cli/src/concurrency/concurrency-control.service.ts @@ -8,7 +8,6 @@ import { UnknownExecutionModeError } from '@/errors/unknown-execution-mode.error import { EventService } from '@/events/event.service'; import type { IExecutingWorkflowData } from '@/interfaces'; import { Logger } from '@/logging/logger.service'; -import type { LogMetadata } from '@/logging/types'; import { Telemetry } from '@/telemetry'; import { ConcurrencyQueue } from './concurrency-queue'; @@ -34,6 +33,8 @@ export class ConcurrencyControlService { private readonly telemetry: Telemetry, private readonly eventService: EventService, ) { + this.logger = this.logger.withScope('executions'); + this.productionLimit = config.getEnv('executions.concurrency.productionLimit'); if (this.productionLimit === 0) { @@ -46,7 +47,6 @@ export class ConcurrencyControlService { if (this.productionLimit === -1 || config.getEnv('executions.mode') === 'queue') { this.isEnabled = false; - this.log('Service disabled'); return; } @@ -65,12 +65,12 @@ export class ConcurrencyControlService { }); this.productionQueue.on('execution-throttled', ({ executionId }) => { - this.log('Execution throttled', { executionId }); + this.logger.debug('Execution throttled', { executionId }); this.eventService.emit('execution-throttled', { executionId }); }); this.productionQueue.on('execution-released', async (executionId) => { - this.log('Execution released', { executionId }); + this.logger.debug('Execution released', { executionId }); }); } @@ -144,9 +144,9 @@ export class ConcurrencyControlService { // ---------------------------------- private logInit() { - this.log('Enabled'); + this.logger.debug('Enabled'); - this.log( + this.logger.debug( [ 'Production execution concurrency is', this.productionLimit === -1 ? 'unlimited' : 'limited to ' + this.productionLimit.toString(), @@ -171,10 +171,6 @@ export class ConcurrencyControlService { throw new UnknownExecutionModeError(mode); } - private log(message: string, metadata?: LogMetadata) { - this.logger.debug(['[Concurrency Control]', message].join(' '), metadata); - } - private shouldReport(capacity: number) { return config.getEnv('deployment.type') === 'cloud' && this.limitsToReport.includes(capacity); } diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index 047df9341e..d2bb5297d4 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -491,11 +491,6 @@ export const schema = { default: 'n8n', env: 'N8N_REDIS_KEY_PREFIX', }, - queueModeId: { - doc: 'Unique ID for this n8n instance, is usually set automatically by n8n during startup', - format: String, - default: '', - }, }, /** diff --git a/packages/cli/src/config/types.ts b/packages/cli/src/config/types.ts index 0d3c5db2cb..78f2358f5d 100644 --- a/packages/cli/src/config/types.ts +++ b/packages/cli/src/config/types.ts @@ -1,5 +1,6 @@ import type { RedisOptions } from 'ioredis'; import type { BinaryData } from 'n8n-core'; +import type { IProcessedDataConfig } from 'n8n-workflow'; import type { schema } from './schema'; @@ -76,6 +77,7 @@ type ToReturnType = T extends NumericPath type ExceptionPaths = { 'queue.bull.redis': RedisOptions; binaryDataManager: BinaryData.Config; + processedDataManager: IProcessedDataConfig; 'userManagement.isInstanceOwnerSetUp': boolean; 'ui.banners.dismissed': string[] | undefined; }; diff --git a/packages/cli/src/constants.ts b/packages/cli/src/constants.ts index 5d458ca376..04512e8be9 100644 --- a/packages/cli/src/constants.ts +++ b/packages/cli/src/constants.ts @@ -91,6 +91,7 @@ export const LICENSE_FEATURES = { PROJECT_ROLE_EDITOR: 'feat:projectRole:editor', PROJECT_ROLE_VIEWER: 'feat:projectRole:viewer', AI_ASSISTANT: 'feat:aiAssistant', + ASK_AI: 'feat:askAi', COMMUNITY_NODES_CUSTOM_REGISTRY: 'feat:communityNodes:customRegistry', } as const; diff --git a/packages/cli/src/controllers/ai-assistant.controller.ts b/packages/cli/src/controllers/ai.controller.ts similarity index 65% rename from packages/cli/src/controllers/ai-assistant.controller.ts rename to packages/cli/src/controllers/ai.controller.ts index c910be0a24..1957db2971 100644 --- a/packages/cli/src/controllers/ai-assistant.controller.ts +++ b/packages/cli/src/controllers/ai.controller.ts @@ -7,18 +7,18 @@ import { WritableStream } from 'node:stream/web'; import { Post, RestController } from '@/decorators'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { AiAssistantRequest } from '@/requests'; -import { AiAssistantService } from '@/services/ai-assistant.service'; +import { AiService } from '@/services/ai.service'; type FlushableResponse = Response & { flush: () => void }; -@RestController('/ai-assistant') -export class AiAssistantController { - constructor(private readonly aiAssistantService: AiAssistantService) {} +@RestController('/ai') +export class AiController { + constructor(private readonly aiService: AiService) {} @Post('/chat', { rateLimit: { limit: 100 } }) async chat(req: AiAssistantRequest.Chat, res: FlushableResponse) { try { - const aiResponse = await this.aiAssistantService.chat(req.body, req.user); + const aiResponse = await this.aiService.chat(req.body, req.user); if (aiResponse.body) { res.header('Content-type', 'application/json-lines').flush(); await aiResponse.body.pipeTo( @@ -40,10 +40,21 @@ export class AiAssistantController { @Post('/chat/apply-suggestion') async applySuggestion( - req: AiAssistantRequest.ApplySuggestion, + req: AiAssistantRequest.ApplySuggestionPayload, ): Promise { try { - return await this.aiAssistantService.applySuggestion(req.body, req.user); + return await this.aiService.applySuggestion(req.body, req.user); + } catch (e) { + assert(e instanceof Error); + ErrorReporterProxy.error(e); + throw new InternalServerError(`Something went wrong: ${e.message}`); + } + } + + @Post('/ask-ai') + async askAi(req: AiAssistantRequest.AskAiPayload): Promise { + try { + return await this.aiService.askAi(req.body, req.user); } catch (e) { assert(e instanceof Error); ErrorReporterProxy.error(e); diff --git a/packages/cli/src/controllers/debug.controller.ts b/packages/cli/src/controllers/debug.controller.ts index 9fd2b067d3..1a2b08d550 100644 --- a/packages/cli/src/controllers/debug.controller.ts +++ b/packages/cli/src/controllers/debug.controller.ts @@ -1,3 +1,5 @@ +import { InstanceSettings } from 'n8n-core'; + import { ActiveWorkflowManager } from '@/active-workflow-manager'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { Get, RestController } from '@/decorators'; @@ -9,6 +11,7 @@ export class DebugController { private readonly orchestrationService: OrchestrationService, private readonly activeWorkflowManager: ActiveWorkflowManager, private readonly workflowRepository: WorkflowRepository, + private readonly instanceSettings: InstanceSettings, ) {} @Get('/multi-main-setup', { skipAuth: true }) @@ -24,9 +27,9 @@ export class DebugController { const activationErrors = await this.activeWorkflowManager.getAllWorkflowActivationErrors(); return { - instanceId: this.orchestrationService.instanceId, + instanceId: this.instanceSettings.instanceId, leaderKey, - isLeader: this.orchestrationService.isLeader, + isLeader: this.instanceSettings.isLeader, activeWorkflows: { webhooks, // webhook-based active workflows triggersAndPollers, // poller- and trigger-based active workflows diff --git a/packages/cli/src/controllers/e2e.controller.ts b/packages/cli/src/controllers/e2e.controller.ts index 06c4f68c7e..9c5a1ff36d 100644 --- a/packages/cli/src/controllers/e2e.controller.ts +++ b/packages/cli/src/controllers/e2e.controller.ts @@ -92,6 +92,7 @@ export class E2EController { [LICENSE_FEATURES.PROJECT_ROLE_VIEWER]: false, [LICENSE_FEATURES.AI_ASSISTANT]: false, [LICENSE_FEATURES.COMMUNITY_NODES_CUSTOM_REGISTRY]: false, + [LICENSE_FEATURES.ASK_AI]: false, }; private numericFeatures: Record = { diff --git a/packages/cli/src/controllers/orchestration.controller.ts b/packages/cli/src/controllers/orchestration.controller.ts index a5235d1169..db1d690a3e 100644 --- a/packages/cli/src/controllers/orchestration.controller.ts +++ b/packages/cli/src/controllers/orchestration.controller.ts @@ -28,11 +28,4 @@ export class OrchestrationController { if (!this.licenseService.isWorkerViewLicensed()) return; return await this.orchestrationService.getWorkerStatus(); } - - @GlobalScope('orchestration:list') - @Post('/worker/ids') - async getWorkerIdsAll() { - if (!this.licenseService.isWorkerViewLicensed()) return; - return await this.orchestrationService.getWorkerIds(); - } } diff --git a/packages/cli/src/databases/entities/index.ts b/packages/cli/src/databases/entities/index.ts index 1993d20a75..39f67b3252 100644 --- a/packages/cli/src/databases/entities/index.ts +++ b/packages/cli/src/databases/entities/index.ts @@ -13,6 +13,7 @@ import { ExecutionMetadata } from './execution-metadata'; import { InstalledNodes } from './installed-nodes'; import { InstalledPackages } from './installed-packages'; import { InvalidAuthToken } from './invalid-auth-token'; +import { ProcessedData } from './processed-data'; import { Project } from './project'; import { ProjectRelation } from './project-relation'; import { Settings } from './settings'; @@ -56,4 +57,5 @@ export const entities = { Project, ProjectRelation, ApiKey, + ProcessedData, }; diff --git a/packages/cli/src/databases/entities/processed-data.ts b/packages/cli/src/databases/entities/processed-data.ts new file mode 100644 index 0000000000..bd638fca95 --- /dev/null +++ b/packages/cli/src/databases/entities/processed-data.ts @@ -0,0 +1,22 @@ +import { Column, Entity, PrimaryColumn } from '@n8n/typeorm'; + +import type { IProcessedDataEntries, IProcessedDataLatest } from '@/interfaces'; + +import { jsonColumnType, WithTimestamps } from './abstract-entity'; +import { objectRetriever } from '../utils/transformers'; + +@Entity() +export class ProcessedData extends WithTimestamps { + @PrimaryColumn('varchar') + context: string; + + @PrimaryColumn() + workflowId: string; + + @Column({ + type: jsonColumnType, + nullable: true, + transformer: objectRetriever, + }) + value: IProcessedDataEntries | IProcessedDataLatest; +} diff --git a/packages/cli/src/databases/migrations/common/1726606152711-CreateProcessedDataTable.ts b/packages/cli/src/databases/migrations/common/1726606152711-CreateProcessedDataTable.ts new file mode 100644 index 0000000000..86992a0580 --- /dev/null +++ b/packages/cli/src/databases/migrations/common/1726606152711-CreateProcessedDataTable.ts @@ -0,0 +1,23 @@ +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; + +const processedDataTableName = 'processed_data'; + +export class CreateProcessedDataTable1726606152711 implements ReversibleMigration { + async up({ schemaBuilder: { createTable, column } }: MigrationContext) { + await createTable(processedDataTableName) + .withColumns( + column('workflowId').varchar(36).notNull.primary, + column('value').varchar(255).notNull, + column('context').varchar(255).notNull.primary, + ) + .withForeignKey('workflowId', { + tableName: 'workflow_entity', + columnName: 'id', + onDelete: 'CASCADE', + }).withTimestamps; + } + + async down({ schemaBuilder: { dropTable } }: MigrationContext) { + await dropTable(processedDataTableName); + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/index.ts b/packages/cli/src/databases/migrations/mysqldb/index.ts index 07b910b949..1dcca1e592 100644 --- a/packages/cli/src/databases/migrations/mysqldb/index.ts +++ b/packages/cli/src/databases/migrations/mysqldb/index.ts @@ -64,6 +64,7 @@ import { CreateInvalidAuthTokenTable1723627610222 } from '../common/172362761022 import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable'; +import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; export const mysqlMigrations: Migration[] = [ @@ -132,4 +133,5 @@ export const mysqlMigrations: Migration[] = [ CreateAnnotationTables1724753530828, AddApiKeysTable1724951148974, SeparateExecutionCreationFromStart1727427440136, + CreateProcessedDataTable1726606152711, ]; diff --git a/packages/cli/src/databases/migrations/postgresdb/index.ts b/packages/cli/src/databases/migrations/postgresdb/index.ts index 21b90e201d..eb0e2bd946 100644 --- a/packages/cli/src/databases/migrations/postgresdb/index.ts +++ b/packages/cli/src/databases/migrations/postgresdb/index.ts @@ -64,6 +64,7 @@ import { CreateInvalidAuthTokenTable1723627610222 } from '../common/172362761022 import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable'; +import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; export const postgresMigrations: Migration[] = [ @@ -132,4 +133,5 @@ export const postgresMigrations: Migration[] = [ CreateAnnotationTables1724753530828, AddApiKeysTable1724951148974, SeparateExecutionCreationFromStart1727427440136, + CreateProcessedDataTable1726606152711, ]; diff --git a/packages/cli/src/databases/migrations/sqlite/index.ts b/packages/cli/src/databases/migrations/sqlite/index.ts index 2828bb3f59..797b26752c 100644 --- a/packages/cli/src/databases/migrations/sqlite/index.ts +++ b/packages/cli/src/databases/migrations/sqlite/index.ts @@ -61,6 +61,7 @@ import { AddConstraintToExecutionMetadata1720101653148 } from '../common/1720101 import { CreateInvalidAuthTokenTable1723627610222 } from '../common/1723627610222-CreateInvalidAuthTokenTable'; import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; +import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; const sqliteMigrations: Migration[] = [ @@ -126,6 +127,7 @@ const sqliteMigrations: Migration[] = [ CreateAnnotationTables1724753530828, AddApiKeysTable1724951148974, SeparateExecutionCreationFromStart1727427440136, + CreateProcessedDataTable1726606152711, ]; export { sqliteMigrations }; diff --git a/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts b/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts index 48f3119780..10d1371f37 100644 --- a/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts +++ b/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts @@ -12,7 +12,9 @@ import { mockInstance, mockEntityManager } from '@test/mocking'; describe('ExecutionRepository', () => { const entityManager = mockEntityManager(ExecutionEntity); - const globalConfig = mockInstance(GlobalConfig, { logging: { outputs: ['console'] } }); + const globalConfig = mockInstance(GlobalConfig, { + logging: { outputs: ['console'], scopes: [] }, + }); const binaryDataService = mockInstance(BinaryDataService); const executionRepository = Container.get(ExecutionRepository); const mockDate = new Date('2023-12-28 12:34:56.789Z'); diff --git a/packages/cli/src/databases/repositories/processed-data.repository.ts b/packages/cli/src/databases/repositories/processed-data.repository.ts new file mode 100644 index 0000000000..f02fbf270a --- /dev/null +++ b/packages/cli/src/databases/repositories/processed-data.repository.ts @@ -0,0 +1,11 @@ +import { DataSource, Repository } from '@n8n/typeorm'; +import { Service } from 'typedi'; + +import { ProcessedData } from '../entities/processed-data'; + +@Service() +export class ProcessedDataRepository extends Repository { + constructor(dataSource: DataSource) { + super(ProcessedData, dataSource.manager); + } +} diff --git a/packages/cli/src/deduplication/deduplication-helper.ts b/packages/cli/src/deduplication/deduplication-helper.ts new file mode 100644 index 0000000000..a913a21a8c --- /dev/null +++ b/packages/cli/src/deduplication/deduplication-helper.ts @@ -0,0 +1,356 @@ +import { createHash } from 'crypto'; +import { + type ICheckProcessedContextData, + type IDataDeduplicator, + type ICheckProcessedOptions, + type IDeduplicationOutput, + type DeduplicationScope, + type DeduplicationItemTypes, + type DeduplicationMode, + tryToParseDateTime, +} from 'n8n-workflow'; +import * as assert from 'node:assert/strict'; +import { Container } from 'typedi'; + +import type { ProcessedData } from '@/databases/entities/processed-data'; +import { ProcessedDataRepository } from '@/databases/repositories/processed-data.repository'; +import { DeduplicationError } from '@/errors/deduplication.error'; +import type { IProcessedDataEntries, IProcessedDataLatest } from '@/interfaces'; + +export class DeduplicationHelper implements IDataDeduplicator { + private static sortEntries( + items: DeduplicationItemTypes[], + mode: DeduplicationMode, + ): DeduplicationItemTypes[] { + return items.slice().sort((a, b) => DeduplicationHelper.compareValues(mode, a, b)); + } + /** + * Compares two values based on the provided mode ('latestIncrementalKey' or 'latestDate'). + * + * @param {DeduplicationMode} mode - The mode to determine the comparison logic. Can be either: + * - 'latestIncrementalKey': Compares numeric values and returns true if `value1` is greater than `value2`. + * - 'latestDate': Compares date strings and returns true if `value1` is a later date than `value2`. + * + * @param {DeduplicationItemTypes} value1 - The first value to compare. + * - If the mode is 'latestIncrementalKey', this should be a numeric value or a string that can be converted to a number. + * - If the mode is 'latestDate', this should be a valid date string. + * + * @param {DeduplicationItemTypes} value2 - The second value to compare. + * - If the mode is 'latestIncrementalKey', this should be a numeric value or a string that can be converted to a number. + * - If the mode is 'latestDate', this should be a valid date string. + * + * @returns {boolean} - Returns `true` if `value1` is greater than `value2` based on the comparison mode. + * - In 'latestIncrementalKey' mode, it returns `true` if `value1` is numerically greater than `value2`. + * - In 'latestDate' mode, it returns `true` if `value1` is a later date than `value2`. + * + * @throws {DeduplicationError} - Throws an error if: + * - The mode is 'latestIncrementalKey' and the values are not valid numbers. + * - The mode is 'latestDate' and the values are not valid date strings. + * - An unsupported mode is provided. + */ + + private static compareValues( + mode: DeduplicationMode, + value1: DeduplicationItemTypes, + value2: DeduplicationItemTypes, + ): 1 | 0 | -1 { + if (mode === 'latestIncrementalKey') { + const num1 = Number(value1); + const num2 = Number(value2); + if (!isNaN(num1) && !isNaN(num2)) { + return num1 === num2 ? 0 : num1 > num2 ? 1 : -1; + } + throw new DeduplicationError( + 'Invalid value. Only numbers are supported in mode "latestIncrementalKey"', + ); + } else if (mode === 'latestDate') { + try { + const date1 = tryToParseDateTime(value1); + const date2 = tryToParseDateTime(value2); + + return date1 === date2 ? 0 : date1 > date2 ? 1 : -1; + } catch (error) { + throw new DeduplicationError( + 'Invalid value. Only valid dates are supported in mode "latestDate"', + ); + } + } else { + throw new DeduplicationError( + "Invalid mode. Only 'latestIncrementalKey' and 'latestDate' are supported.", + ); + } + } + + private static createContext( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + ): string { + if (scope === 'node') { + if (!contextData.node) { + throw new DeduplicationError( + "No node information has been provided and so cannot use scope 'node'", + ); + } + // Use the node ID to make sure that the data can still be accessed and does not get deleted + // whenever the node gets renamed + return `n:${contextData.node.id}`; + } + return ''; + } + + private static createValueHash(value: DeduplicationItemTypes): string { + return createHash('md5').update(value.toString()).digest('base64'); + } + + private async findProcessedData( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + ): Promise { + return await Container.get(ProcessedDataRepository).findOne({ + where: { + workflowId: contextData.workflow.id, + context: DeduplicationHelper.createContext(scope, contextData), + }, + }); + } + + private validateMode(processedData: ProcessedData | null, options: ICheckProcessedOptions) { + if (processedData && processedData.value.mode !== options.mode) { + throw new DeduplicationError( + 'Deduplication data was originally saved with an incompatible setting of the ‘Keep Items Where’ parameter. Try ’Clean Database’ operation to reset.', + ); + } + } + + private processedDataHasEntries( + data: IProcessedDataEntries | IProcessedDataLatest, + ): data is IProcessedDataEntries { + return Array.isArray(data.data); + } + + private processedDataIsLatest( + data: IProcessedDataEntries | IProcessedDataLatest, + ): data is IProcessedDataLatest { + return data && !Array.isArray(data.data); + } + + private async handleLatestModes( + items: DeduplicationItemTypes[], + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + processedData: ProcessedData | null, + dbContext: string, + ): Promise { + const incomingItems = DeduplicationHelper.sortEntries(items, options.mode); + + if (!processedData) { + // All items are new so add new entries + await Container.get(ProcessedDataRepository).insert({ + workflowId: contextData.workflow.id, + context: dbContext, + value: { + mode: options.mode, + data: incomingItems.pop(), + }, + }); + + return { + new: items, + processed: [], + }; + } + + const returnData: IDeduplicationOutput = { + new: [], + processed: [], + }; + + if (!this.processedDataIsLatest(processedData.value)) { + return returnData; + } + + let largestValue = processedData.value.data; + const processedDataValue = processedData.value; + + incomingItems.forEach((item) => { + if (DeduplicationHelper.compareValues(options.mode, item, processedDataValue.data) === 1) { + returnData.new.push(item); + if (DeduplicationHelper.compareValues(options.mode, item, largestValue) === 1) { + largestValue = item; + } + } else { + returnData.processed.push(item); + } + }); + + processedData.value.data = largestValue; + + await Container.get(ProcessedDataRepository).update( + { workflowId: processedData.workflowId, context: processedData.context }, + processedData, + ); + + return returnData; + } + + private async handleHashedItems( + items: DeduplicationItemTypes[], + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + processedData: ProcessedData | null, + dbContext: string, + ): Promise { + const hashedItems = items.map((item) => DeduplicationHelper.createValueHash(item)); + + if (!processedData) { + // All items are new so add new entries + if (options.maxEntries) { + hashedItems.splice(0, hashedItems.length - options.maxEntries); + } + await Container.get(ProcessedDataRepository).insert({ + workflowId: contextData.workflow.id, + context: dbContext, + value: { + mode: options.mode, + data: hashedItems, + }, + }); + + return { + new: items, + processed: [], + }; + } + + const returnData: IDeduplicationOutput = { + new: [], + processed: [], + }; + + if (!this.processedDataHasEntries(processedData.value)) { + return returnData; + } + + const processedDataValue = processedData.value; + const processedItemsSet = new Set(processedDataValue.data); + + hashedItems.forEach((item, index) => { + if (processedItemsSet.has(item)) { + returnData.processed.push(items[index]); + } else { + returnData.new.push(items[index]); + processedDataValue.data.push(item); + } + }); + + if (options.maxEntries) { + processedDataValue.data.splice(0, processedDataValue.data.length - options.maxEntries); + } + + await Container.get(ProcessedDataRepository).update( + { workflowId: processedData.workflowId, context: processedData.context }, + processedData, + ); + + return returnData; + } + + async checkProcessedAndRecord( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + const dbContext = DeduplicationHelper.createContext(scope, contextData); + + assert.ok(contextData.workflow.id); + + const processedData = await this.findProcessedData(scope, contextData); + + this.validateMode(processedData, options); + + if (['latestIncrementalKey', 'latestDate'].includes(options.mode)) { + return await this.handleLatestModes(items, contextData, options, processedData, dbContext); + } + //mode entries + return await this.handleHashedItems(items, contextData, options, processedData, dbContext); + } + + async removeProcessed( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + if (['latestIncrementalKey', 'latestDate'].includes(options.mode)) { + throw new DeduplicationError('Removing processed data is not possible in mode "latest"'); + } + assert.ok(contextData.workflow.id); + + const processedData = await Container.get(ProcessedDataRepository).findOne({ + where: { + workflowId: contextData.workflow.id, + context: DeduplicationHelper.createContext(scope, contextData), + }, + }); + + if (!processedData) { + return; + } + + const hashedItems = items.map((item) => DeduplicationHelper.createValueHash(item)); + + if (!this.processedDataHasEntries(processedData.value)) { + return; + } + + const processedDataValue = processedData.value; + + hashedItems.forEach((item) => { + const index = processedDataValue.data.findIndex((value) => value === item); + if (index !== -1) { + processedDataValue.data.splice(index, 1); + } + }); + + await Container.get(ProcessedDataRepository).update( + { workflowId: processedData.workflowId, context: processedData.context }, + processedData, + ); + } + + async clearAllProcessedItems( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + ): Promise { + await Container.get(ProcessedDataRepository).delete({ + workflowId: contextData.workflow.id, + context: DeduplicationHelper.createContext(scope, contextData), + }); + } + + async getProcessedDataCount( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + const processedDataRepository = Container.get(ProcessedDataRepository); + + const processedData = await processedDataRepository.findOne({ + where: { + workflowId: contextData.workflow.id, + context: DeduplicationHelper.createContext(scope, contextData), + }, + }); + + if ( + options.mode === 'entries' && + processedData && + this.processedDataHasEntries(processedData.value) + ) { + return processedData.value.data.length; + } else { + return 0; + } + } +} diff --git a/packages/cli/src/deduplication/index.ts b/packages/cli/src/deduplication/index.ts new file mode 100644 index 0000000000..2cf2973d71 --- /dev/null +++ b/packages/cli/src/deduplication/index.ts @@ -0,0 +1,7 @@ +import { type IDataDeduplicator } from 'n8n-workflow'; + +import { DeduplicationHelper } from './deduplication-helper'; + +export function getDataDeduplicationService(): IDataDeduplicator { + return new DeduplicationHelper(); +} diff --git a/packages/cli/src/environments/source-control/source-control-import.service.ee.ts b/packages/cli/src/environments/source-control/source-control-import.service.ee.ts index b08ae27dd8..b5012d2762 100644 --- a/packages/cli/src/environments/source-control/source-control-import.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-import.service.ee.ts @@ -2,7 +2,12 @@ import { In } from '@n8n/typeorm'; import glob from 'fast-glob'; import { Credentials, InstanceSettings } from 'n8n-core'; -import { ApplicationError, jsonParse, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; +import { + ApplicationError, + jsonParse, + ErrorReporterProxy as ErrorReporter, + ensureError, +} from 'n8n-workflow'; import { readFile as fsReadFile } from 'node:fs/promises'; import path from 'path'; import { Container, Service } from 'typedi'; @@ -274,8 +279,9 @@ export class SourceControlImportService { this.logger.debug(`Reactivating workflow id ${existingWorkflow.id}`); await workflowManager.add(existingWorkflow.id, 'activate'); // update the versionId of the workflow to match the imported workflow - } catch (error) { - this.logger.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error); + } catch (e) { + const error = ensureError(e); + this.logger.error(`Failed to activate workflow ${existingWorkflow.id}`, { error }); } finally { await Container.get(WorkflowRepository).update( { id: existingWorkflow.id }, @@ -377,8 +383,9 @@ export class SourceControlImportService { await fsReadFile(candidate.file, { encoding: 'utf8' }), { fallbackValue: { tags: [], mappings: [] } }, ); - } catch (error) { - this.logger.error(`Failed to import tags from file ${candidate.file}`, error as Error); + } catch (e) { + const error = ensureError(e); + this.logger.error(`Failed to import tags from file ${candidate.file}`, { error }); return; } @@ -444,8 +451,8 @@ export class SourceControlImportService { await fsReadFile(candidate.file, { encoding: 'utf8' }), { fallbackValue: [] }, ); - } catch (error) { - this.logger.error(`Failed to import tags from file ${candidate.file}`, error as Error); + } catch (e) { + this.logger.error(`Failed to import tags from file ${candidate.file}`, { error: e }); return; } const overriddenKeys = Object.keys(valueOverrides ?? {}); diff --git a/packages/cli/src/errors/deduplication.error.ts b/packages/cli/src/errors/deduplication.error.ts new file mode 100644 index 0000000000..8e9173abb9 --- /dev/null +++ b/packages/cli/src/errors/deduplication.error.ts @@ -0,0 +1,7 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class DeduplicationError extends ApplicationError { + constructor(message: string) { + super(`Deduplication Failed: ${message}`); + } +} diff --git a/packages/cli/src/errors/worker-missing-encryption-key.error.ts b/packages/cli/src/errors/worker-missing-encryption-key.error.ts new file mode 100644 index 0000000000..29b8dad929 --- /dev/null +++ b/packages/cli/src/errors/worker-missing-encryption-key.error.ts @@ -0,0 +1,14 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class WorkerMissingEncryptionKey extends ApplicationError { + constructor() { + super( + [ + 'Failed to start worker because of missing encryption key.', + 'Please set the `N8N_ENCRYPTION_KEY` env var when starting the worker.', + 'See: https://docs.n8n.io/hosting/configuration/configuration-examples/encryption-key/', + ].join(' '), + { level: 'warning' }, + ); + } +} diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee.ts index f6a35f4329..a5373d0cc5 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee.ts @@ -180,7 +180,7 @@ export class MessageEventBusDestinationWebhook try { JSON.parse(this.jsonQuery); } catch { - this.logger.error('JSON parameter need to be an valid JSON'); + this.logger.error('JSON parameter needs to be valid JSON'); } this.axiosRequestOptions.params = jsonParse(this.jsonQuery); } @@ -198,7 +198,7 @@ export class MessageEventBusDestinationWebhook try { JSON.parse(this.jsonHeaders); } catch { - this.logger.error('JSON parameter need to be an valid JSON'); + this.logger.error('JSON parameter needs to be valid JSON'); } this.axiosRequestOptions.headers = jsonParse(this.jsonHeaders); } diff --git a/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts b/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts index b9177faa07..6c6a928a67 100644 --- a/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts +++ b/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts @@ -149,7 +149,7 @@ export class MessageEventBusLogWriter { this._worker = new Worker(workerFileName); if (this.worker) { this.worker.on('messageerror', async (error) => { - this.logger.error('Event Bus Log Writer thread error, attempting to restart...', error); + this.logger.error('Event Bus Log Writer thread error, attempting to restart...', { error }); await MessageEventBusLogWriter.instance.startThread(); }); return true; diff --git a/packages/cli/src/events/maps/pub-sub.event-map.ts b/packages/cli/src/events/maps/pub-sub.event-map.ts index 9237e79d13..ff27741b9b 100644 --- a/packages/cli/src/events/maps/pub-sub.event-map.ts +++ b/packages/cli/src/events/maps/pub-sub.event-map.ts @@ -1,4 +1,4 @@ -import type { WorkerStatus, PushType } from '@n8n/api-types'; +import type { PushType, WorkerStatus } from '@n8n/api-types'; import type { IWorkflowDb } from '@/interfaces'; @@ -80,25 +80,5 @@ export type PubSubCommandMap = { }; export type PubSubWorkerResponseMap = { - // #region Lifecycle - - 'restart-event-bus': { - result: 'success' | 'error'; - error?: string; - }; - - 'reload-external-secrets-providers': { - result: 'success' | 'error'; - error?: string; - }; - - // #endregion - - // #region Worker view - - 'get-worker-id': never; - - 'get-worker-status': WorkerStatus; - - // #endregion + 'response-to-get-worker-status': WorkerStatus; }; diff --git a/packages/cli/src/events/relays/telemetry.event-relay.ts b/packages/cli/src/events/relays/telemetry.event-relay.ts index 11d84751d0..4cf0690eec 100644 --- a/packages/cli/src/events/relays/telemetry.event-relay.ts +++ b/packages/cli/src/events/relays/telemetry.event-relay.ts @@ -651,7 +651,9 @@ export class TelemetryEventRelay extends EventRelay { } if (telemetryProperties.is_manual) { - nodeGraphResult = TelemetryHelpers.generateNodesGraph(workflow, this.nodeTypes); + nodeGraphResult = TelemetryHelpers.generateNodesGraph(workflow, this.nodeTypes, { + runData: runData.data.resultData?.runData, + }); telemetryProperties.node_graph = nodeGraphResult.nodeGraph; telemetryProperties.node_graph_string = JSON.stringify(nodeGraphResult.nodeGraph); @@ -663,7 +665,9 @@ export class TelemetryEventRelay extends EventRelay { if (telemetryProperties.is_manual) { if (!nodeGraphResult) { - nodeGraphResult = TelemetryHelpers.generateNodesGraph(workflow, this.nodeTypes); + nodeGraphResult = TelemetryHelpers.generateNodesGraph(workflow, this.nodeTypes, { + runData: runData.data.resultData?.runData, + }); } let userRole: 'owner' | 'sharee' | undefined = undefined; @@ -688,7 +692,9 @@ export class TelemetryEventRelay extends EventRelay { }; if (!manualExecEventProperties.node_graph_string) { - nodeGraphResult = TelemetryHelpers.generateNodesGraph(workflow, this.nodeTypes); + nodeGraphResult = TelemetryHelpers.generateNodesGraph(workflow, this.nodeTypes, { + runData: runData.data.resultData?.runData, + }); manualExecEventProperties.node_graph_string = JSON.stringify(nodeGraphResult.nodeGraph); } diff --git a/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts b/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts index 9596dd35df..68fd528f14 100644 --- a/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts +++ b/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts @@ -1,5 +1,5 @@ import pick from 'lodash/pick'; -import type { ExecutionStatus, IRun, IWorkflowBase } from 'n8n-workflow'; +import { ensureError, type ExecutionStatus, type IRun, type IWorkflowBase } from 'n8n-workflow'; import { Container } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; @@ -95,7 +95,8 @@ export async function updateExistingExecution(parameters: { ); } } catch (e) { - logger.error(`Failed to save metadata for execution ID ${executionId}`, e as Error); + const error = ensureError(e); + logger.error(`Failed to save metadata for execution ID ${executionId}`, { error }); } if (executionData.finished === true && executionData.retryOf !== undefined) { diff --git a/packages/cli/src/interfaces.ts b/packages/cli/src/interfaces.ts index 5c29eea093..4d2cd9b2d9 100644 --- a/packages/cli/src/interfaces.ts +++ b/packages/cli/src/interfaces.ts @@ -22,6 +22,8 @@ import type { INodeProperties, IUserSettings, IWorkflowExecutionDataProcess, + DeduplicationMode, + DeduplicationItemTypes, } from 'n8n-workflow'; import type PCancelable from 'p-cancelable'; @@ -48,6 +50,20 @@ export interface ICredentialsOverwrite { [key: string]: ICredentialDataDecryptedObject; } +// ---------------------------------- +// ProcessedData +// ---------------------------------- + +export interface IProcessedDataLatest { + mode: DeduplicationMode; + data: DeduplicationItemTypes; +} + +export interface IProcessedDataEntries { + mode: DeduplicationMode; + data: DeduplicationItemTypes[]; +} + // ---------------------------------- // tags // ---------------------------------- diff --git a/packages/cli/src/license.ts b/packages/cli/src/license.ts index 0cde2bd922..da7ab80313 100644 --- a/packages/cli/src/license.ts +++ b/packages/cli/src/license.ts @@ -37,7 +37,9 @@ export class License { private readonly orchestrationService: OrchestrationService, private readonly settingsRepository: SettingsRepository, private readonly licenseMetricsService: LicenseMetricsService, - ) {} + ) { + this.logger = this.logger.withScope('license'); + } /** * Whether this instance should renew the license - on init and periodically. @@ -109,9 +111,9 @@ export class License { await this.manager.initialize(); this.logger.debug('License initialized'); - } catch (e: unknown) { - if (e instanceof Error) { - this.logger.error('Could not initialize license manager sdk', e); + } catch (error: unknown) { + if (error instanceof Error) { + this.logger.error('Could not initialize license manager sdk', { error }); } } } @@ -141,10 +143,7 @@ export class License { this.orchestrationService.setMultiMainSetupLicensed(isMultiMainLicensed ?? false); - if ( - this.orchestrationService.isMultiMainSetupEnabled && - this.orchestrationService.isFollower - ) { + if (this.orchestrationService.isMultiMainSetupEnabled && this.instanceSettings.isFollower) { this.logger.debug( '[Multi-main setup] Instance is follower, skipping sending of "reload-license" command...', ); @@ -253,6 +252,10 @@ export class License { return this.isFeatureEnabled(LICENSE_FEATURES.AI_ASSISTANT); } + isAskAiEnabled() { + return this.isFeatureEnabled(LICENSE_FEATURES.ASK_AI); + } + isAdvancedExecutionFiltersEnabled() { return this.isFeatureEnabled(LICENSE_FEATURES.ADVANCED_EXECUTION_FILTERS); } diff --git a/packages/cli/src/license/license.service.ts b/packages/cli/src/license/license.service.ts index 9e4ab2382c..43f9961334 100644 --- a/packages/cli/src/license/license.service.ts +++ b/packages/cli/src/license/license.service.ts @@ -14,8 +14,7 @@ type LicenseError = Error & { errorId?: keyof typeof LicenseErrors }; export const LicenseErrors = { SCHEMA_VALIDATION: 'Activation key is in the wrong format', - RESERVATION_EXHAUSTED: - 'Activation key has been used too many times. Please contact sales@n8n.io if you would like to extend it', + RESERVATION_EXHAUSTED: 'Activation key has been used too many times', RESERVATION_EXPIRED: 'Activation key has expired', NOT_FOUND: 'Activation key not found', RESERVATION_CONFLICT: 'Activation key not found', diff --git a/packages/cli/src/logging/__tests__/logger.service.test.ts b/packages/cli/src/logging/__tests__/logger.service.test.ts index f699443909..d01a709639 100644 --- a/packages/cli/src/logging/__tests__/logger.service.test.ts +++ b/packages/cli/src/logging/__tests__/logger.service.test.ts @@ -11,6 +11,7 @@ describe('Logger', () => { logging: { level: 'info', outputs: ['console'], + scopes: [], }, }); @@ -30,6 +31,7 @@ describe('Logger', () => { logging: { level: 'info', outputs: ['file'], + scopes: [], file: { fileSizeMax: 100, fileCountMax: 16, @@ -56,6 +58,7 @@ describe('Logger', () => { logging: { level: 'error', outputs: ['console'], + scopes: [], }, }); @@ -74,6 +77,7 @@ describe('Logger', () => { logging: { level: 'warn', outputs: ['console'], + scopes: [], }, }); @@ -92,6 +96,7 @@ describe('Logger', () => { logging: { level: 'info', outputs: ['console'], + scopes: [], }, }); @@ -110,6 +115,7 @@ describe('Logger', () => { logging: { level: 'debug', outputs: ['console'], + scopes: [], }, }); @@ -128,6 +134,7 @@ describe('Logger', () => { logging: { level: 'silent', outputs: ['console'], + scopes: [], }, }); diff --git a/packages/cli/src/logging/logger.service.ts b/packages/cli/src/logging/logger.service.ts index c294645d61..8bdb9177de 100644 --- a/packages/cli/src/logging/logger.service.ts +++ b/packages/cli/src/logging/logger.service.ts @@ -1,5 +1,7 @@ +import type { LogScope } from '@n8n/config'; import { GlobalConfig } from '@n8n/config'; import callsites from 'callsites'; +import type { TransformableInfo } from 'logform'; import { InstanceSettings } from 'n8n-core'; import { LoggerProxy, LOG_LEVELS } from 'n8n-workflow'; import path, { basename } from 'node:path'; @@ -15,10 +17,16 @@ import type { LogLocationMetadata, LogLevel, LogMetadata } from './types'; @Service() export class Logger { - private readonly internalLogger: winston.Logger; + private internalLogger: winston.Logger; private readonly level: LogLevel; + private readonly scopes: Set; + + private get isScopingEnabled() { + return this.scopes.size > 0; + } + constructor( private readonly globalConfig: GlobalConfig, private readonly instanceSettings: InstanceSettings, @@ -35,15 +43,30 @@ export class Logger { if (!isSilent) { this.setLevel(); - const { outputs } = this.globalConfig.logging; + const { outputs, scopes } = this.globalConfig.logging; if (outputs.includes('console')) this.setConsoleTransport(); if (outputs.includes('file')) this.setFileTransport(); + + this.scopes = new Set(scopes); } LoggerProxy.init(this); } + private setInternalLogger(internalLogger: winston.Logger) { + this.internalLogger = internalLogger; + } + + withScope(scope: LogScope) { + const scopedLogger = new Logger(this.globalConfig, this.instanceSettings); + const childLogger = this.internalLogger.child({ scope }); + + scopedLogger.setInternalLogger(childLogger); + + return scopedLogger; + } + private log(level: LogLevel, message: string, metadata: LogMetadata) { const location: LogLocationMetadata = {}; @@ -81,11 +104,22 @@ export class Logger { this.internalLogger.add(new winston.transports.Console({ format })); } + private scopeFilter() { + return winston.format((info: TransformableInfo & { metadata: LogMetadata }) => { + const shouldIncludeScope = info.metadata.scope && this.scopes.has(info.metadata.scope); + + if (this.isScopingEnabled && !shouldIncludeScope) return false; + + return info; + })(); + } + private debugDevConsoleFormat() { return winston.format.combine( winston.format.metadata(), winston.format.timestamp({ format: () => this.devTsFormat() }), winston.format.colorize({ all: true }), + this.scopeFilter(), winston.format.printf(({ level: _level, message, timestamp, metadata: _metadata }) => { const SEPARATOR = ' '.repeat(3); const LOG_LEVEL_COLUMN_WIDTH = 15; // 5 columns + ANSI color codes @@ -100,6 +134,7 @@ export class Logger { return winston.format.combine( winston.format.metadata(), winston.format.timestamp(), + this.scopeFilter(), winston.format.printf(({ level, message, timestamp, metadata }) => { const _metadata = this.toPrintable(metadata); return `${timestamp} | ${level.padEnd(5)} | ${message}${_metadata ? ' ' + _metadata : ''}`; diff --git a/packages/cli/src/logging/types.ts b/packages/cli/src/logging/types.ts index 94b02d8ad7..b6022c0bf6 100644 --- a/packages/cli/src/logging/types.ts +++ b/packages/cli/src/logging/types.ts @@ -1,7 +1,14 @@ +import type { LogScope } from '@n8n/config'; + import type { LOG_LEVELS } from './constants'; export type LogLevel = (typeof LOG_LEVELS)[number]; -export type LogLocationMetadata = Partial<{ file: string; function: string }>; +export type LogMetadata = { + [key: string]: unknown; + scope?: LogScope; + file?: string; + function?: string; +}; -export type LogMetadata = Record | Error; +export type LogLocationMetadata = Pick; diff --git a/packages/cli/src/node-types.ts b/packages/cli/src/node-types.ts index 84b406001e..26b1b61e36 100644 --- a/packages/cli/src/node-types.ts +++ b/packages/cli/src/node-types.ts @@ -44,15 +44,38 @@ export class NodeTypes implements INodeTypes { } getByNameAndVersion(nodeType: string, version?: number): INodeType { - const versionedNodeType = NodeHelpers.getVersionedNodeType( - this.getNode(nodeType).type, - version, - ); - if (versionedNodeType.description.usableAsTool) { - return NodeHelpers.convertNodeToAiTool(versionedNodeType); + const origType = nodeType; + const toolRequested = nodeType.startsWith('n8n-nodes-base') && nodeType.endsWith('Tool'); + // Make sure the nodeType to actually get from disk is the un-wrapped type + if (toolRequested) { + nodeType = nodeType.replace(/Tool$/, ''); } - return versionedNodeType; + const node = this.getNode(nodeType); + const versionedNodeType = NodeHelpers.getVersionedNodeType(node.type, version); + if (!toolRequested) return versionedNodeType; + + if (!versionedNodeType.description.usableAsTool) + throw new ApplicationError('Node cannot be used as a tool', { extra: { nodeType } }); + + const { loadedNodes } = this.loadNodesAndCredentials; + if (origType in loadedNodes) { + return loadedNodes[origType].type as INodeType; + } + + // Instead of modifying the existing type, we extend it into a new type object + const clonedProperties = Object.create( + versionedNodeType.description.properties, + ) as INodeTypeDescription['properties']; + const clonedDescription = Object.create(versionedNodeType.description, { + properties: { value: clonedProperties }, + }) as INodeTypeDescription; + const clonedNode = Object.create(versionedNodeType, { + description: { value: clonedDescription }, + }) as INodeType; + const tool = NodeHelpers.convertNodeToAiTool(clonedNode); + loadedNodes[nodeType + 'Tool'] = { sourcePath: '', type: tool }; + return tool; } /* Some nodeTypes need to get special parameters applied like the polling nodes the polling times */ diff --git a/packages/cli/src/permissions/global-roles.ts b/packages/cli/src/permissions/global-roles.ts index 664cd8384e..6315c3c617 100644 --- a/packages/cli/src/permissions/global-roles.ts +++ b/packages/cli/src/permissions/global-roles.ts @@ -38,7 +38,6 @@ export const GLOBAL_OWNER_SCOPES: Scope[] = [ 'license:manage', 'logStreaming:manage', 'orchestration:read', - 'orchestration:list', 'saml:manage', 'securityAudit:generate', 'sourceControl:pull', diff --git a/packages/cli/src/push/abstract.push.ts b/packages/cli/src/push/abstract.push.ts index c56fa4c042..24cafa8121 100644 --- a/packages/cli/src/push/abstract.push.ts +++ b/packages/cli/src/push/abstract.push.ts @@ -1,8 +1,9 @@ import type { PushPayload, PushType } from '@n8n/api-types'; import { assert, jsonStringify } from 'n8n-workflow'; +import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; -import type { Logger } from '@/logging/logger.service'; +import { Logger } from '@/logging/logger.service'; import type { OnPushMessage } from '@/push/types'; import { TypedEmitter } from '@/typed-emitter'; @@ -16,6 +17,7 @@ export interface AbstractPushEvents { * * @emits message when a message is received from a client */ +@Service() export abstract class AbstractPush extends TypedEmitter { protected connections: Record = {}; @@ -23,9 +25,12 @@ export abstract class AbstractPush extends TypedEmitter this.pingAll(), 60 * 1000); } protected add(pushRef: string, userId: User['id'], connection: Connection) { @@ -75,6 +80,12 @@ export abstract class AbstractPush extends TypedEmitter(type: Type, data: PushPayload) { this.sendTo(type, data, Object.keys(this.connections)); } diff --git a/packages/cli/src/push/sse.push.ts b/packages/cli/src/push/sse.push.ts index 85d16a3b42..04e39d6d79 100644 --- a/packages/cli/src/push/sse.push.ts +++ b/packages/cli/src/push/sse.push.ts @@ -1,8 +1,6 @@ import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logging/logger.service'; -import SSEChannel from 'sse-channel'; import { AbstractPush } from './abstract.push'; import type { PushRequest, PushResponse } from './types'; @@ -11,29 +9,41 @@ type Connection = { req: PushRequest; res: PushResponse }; @Service() export class SSEPush extends AbstractPush { - readonly channel = new SSEChannel(); - - readonly connections: Record = {}; - - constructor(logger: Logger) { - super(logger); - - this.channel.on('disconnect', (_, { req }) => { - this.remove(req?.query?.pushRef); - }); - } - add(pushRef: string, userId: User['id'], connection: Connection) { + const { req, res } = connection; + + // Initialize the connection + req.socket.setTimeout(0); + req.socket.setNoDelay(true); + req.socket.setKeepAlive(true); + res.setHeader('Content-Type', 'text/event-stream; charset=UTF-8'); + res.setHeader('Cache-Control', 'no-cache'); + res.setHeader('Connection', 'keep-alive'); + res.writeHead(200); + res.write(':ok\n\n'); + res.flush(); + super.add(pushRef, userId, connection); - this.channel.addClient(connection.req, connection.res); + + // When the client disconnects, remove the client + const removeClient = () => this.remove(pushRef); + req.once('end', removeClient); + req.once('close', removeClient); + res.once('finish', removeClient); } protected close({ res }: Connection) { res.end(); - this.channel.removeClient(res); } protected sendToOneConnection(connection: Connection, data: string) { - this.channel.send(data, [connection.res]); + const { res } = connection; + res.write('data: ' + data + '\n\n'); + res.flush(); + } + + protected ping({ res }: Connection) { + res.write(':ping\n\n'); + res.flush(); } } diff --git a/packages/cli/src/push/types.ts b/packages/cli/src/push/types.ts index db9121eecc..b0db44ba1a 100644 --- a/packages/cli/src/push/types.ts +++ b/packages/cli/src/push/types.ts @@ -11,7 +11,15 @@ export type PushRequest = AuthenticatedRequest<{}, {}, {}, { pushRef: string }>; export type SSEPushRequest = PushRequest & { ws: undefined }; export type WebSocketPushRequest = PushRequest & { ws: WebSocket }; -export type PushResponse = Response & { req: PushRequest }; +export type PushResponse = Response & { + req: PushRequest; + /** + * `flush()` is defined in the compression middleware. + * This is necessary because the compression middleware sometimes waits + * for a certain amount of data before sending the data to the client + */ + flush: () => void; +}; export interface OnPushMessage { pushRef: string; diff --git a/packages/cli/src/push/websocket.push.ts b/packages/cli/src/push/websocket.push.ts index dc60d70901..a2ea39c500 100644 --- a/packages/cli/src/push/websocket.push.ts +++ b/packages/cli/src/push/websocket.push.ts @@ -3,7 +3,6 @@ import { Service } from 'typedi'; import type WebSocket from 'ws'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logging/logger.service'; import { AbstractPush } from './abstract.push'; @@ -13,13 +12,6 @@ function heartbeat(this: WebSocket) { @Service() export class WebSocketPush extends AbstractPush { - constructor(logger: Logger) { - super(logger); - - // Ping all connected clients every 60 seconds - setInterval(() => this.pingAll(), 60 * 1000); - } - add(pushRef: string, userId: User['id'], connection: WebSocket) { connection.isAlive = true; connection.on('pong', heartbeat); @@ -67,17 +59,12 @@ export class WebSocketPush extends AbstractPush { connection.send(data); } - private pingAll() { - for (const pushRef in this.connections) { - const connection = this.connections[pushRef]; - // If a connection did not respond with a `PONG` in the last 60 seconds, disconnect - if (!connection.isAlive) { - delete this.connections[pushRef]; - return connection.terminate(); - } - - connection.isAlive = false; - connection.ping(); + protected ping(connection: WebSocket): void { + // If a connection did not respond with a `PONG` in the last 60 seconds, disconnect + if (!connection.isAlive) { + return connection.terminate(); } + connection.isAlive = false; + connection.ping(); } } diff --git a/packages/cli/src/requests.ts b/packages/cli/src/requests.ts index b8fa4b99ca..e25a244f5f 100644 --- a/packages/cli/src/requests.ts +++ b/packages/cli/src/requests.ts @@ -586,5 +586,6 @@ export declare namespace AiAssistantRequest { type Chat = AuthenticatedRequest<{}, {}, AiAssistantSDK.ChatRequestPayload>; type SuggestionPayload = { sessionId: string; suggestionId: string }; - type ApplySuggestion = AuthenticatedRequest<{}, {}, SuggestionPayload>; + type ApplySuggestionPayload = AuthenticatedRequest<{}, {}, SuggestionPayload>; + type AskAiPayload = AuthenticatedRequest<{}, {}, AiAssistantSDK.AskAiRequestPayload>; } diff --git a/packages/cli/src/runners/__tests__/task-broker.test.ts b/packages/cli/src/runners/__tests__/task-broker.test.ts index f5b91a3f2c..5d627ba341 100644 --- a/packages/cli/src/runners/__tests__/task-broker.test.ts +++ b/packages/cli/src/runners/__tests__/task-broker.test.ts @@ -5,6 +5,8 @@ import type { RunnerMessage, TaskResultData } from '../runner-types'; import { TaskBroker } from '../task-broker.service'; import type { TaskOffer, TaskRequest, TaskRunner } from '../task-broker.service'; +const createValidUntil = (ms: number) => process.hrtime.bigint() + BigInt(ms * 1_000_000); + describe('TaskBroker', () => { let taskBroker: TaskBroker; @@ -15,14 +17,12 @@ describe('TaskBroker', () => { describe('expireTasks', () => { it('should remove expired task offers and keep valid task offers', () => { - const now = process.hrtime.bigint(); - const validOffer: TaskOffer = { offerId: 'valid', runnerId: 'runner1', taskType: 'taskType1', validFor: 1000, - validUntil: now + BigInt(1000 * 1_000_000), // 1 second in the future + validUntil: createValidUntil(1000), // 1 second in the future }; const expiredOffer1: TaskOffer = { @@ -30,7 +30,7 @@ describe('TaskBroker', () => { runnerId: 'runner2', taskType: 'taskType1', validFor: 1000, - validUntil: now - BigInt(1000 * 1_000_000), // 1 second in the past + validUntil: createValidUntil(-1000), // 1 second in the past }; const expiredOffer2: TaskOffer = { @@ -38,7 +38,7 @@ describe('TaskBroker', () => { runnerId: 'runner3', taskType: 'taskType1', validFor: 2000, - validUntil: now - BigInt(2000 * 1_000_000), // 2 seconds in the past + validUntil: createValidUntil(-2000), // 2 seconds in the past }; taskBroker.setPendingTaskOffers([validOffer, expiredOffer1, expiredOffer2]); @@ -102,6 +102,55 @@ describe('TaskBroker', () => { expect(runnerIds).toHaveLength(0); }); + + it('should remove any pending offers for that runner', () => { + const runnerId = 'runner1'; + const runner = mock({ id: runnerId }); + const messageCallback = jest.fn(); + + taskBroker.registerRunner(runner, messageCallback); + taskBroker.taskOffered({ + offerId: 'offer1', + runnerId, + taskType: 'mock', + validFor: 1000, + validUntil: createValidUntil(1000), + }); + taskBroker.taskOffered({ + offerId: 'offer2', + runnerId: 'runner2', + taskType: 'mock', + validFor: 1000, + validUntil: createValidUntil(1000), + }); + taskBroker.deregisterRunner(runnerId); + + const offers = taskBroker.getPendingTaskOffers(); + expect(offers).toHaveLength(1); + expect(offers[0].runnerId).toBe('runner2'); + }); + + it('should fail any running tasks for that runner', () => { + const runnerId = 'runner1'; + const runner = mock({ id: runnerId }); + const messageCallback = jest.fn(); + + const taskId = 'task1'; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const failSpy = jest.spyOn(taskBroker as any, 'failTask'); + const rejectSpy = jest.spyOn(taskBroker, 'handleRunnerReject'); + + taskBroker.registerRunner(runner, messageCallback); + taskBroker.setTasks({ + [taskId]: { id: taskId, requesterId: 'requester1', runnerId, taskType: 'mock' }, + task2: { id: 'task2', requesterId: 'requester1', runnerId: 'runner2', taskType: 'mock' }, + }); + taskBroker.deregisterRunner(runnerId); + + expect(failSpy).toBeCalledWith(taskId, `The Task Runner (${runnerId}) has disconnected`); + expect(rejectSpy).toBeCalledWith(taskId, `The Task Runner (${runnerId}) has disconnected`); + }); }); describe('deregisterRequester', () => { @@ -121,14 +170,12 @@ describe('TaskBroker', () => { describe('taskRequested', () => { it('should match a pending offer to an incoming request', async () => { - const now = process.hrtime.bigint(); - const offer: TaskOffer = { offerId: 'offer1', runnerId: 'runner1', taskType: 'taskType1', validFor: 1000, - validUntil: now + BigInt(1000 * 1_000_000), + validUntil: createValidUntil(1000), }; taskBroker.setPendingTaskOffers([offer]); @@ -150,8 +197,6 @@ describe('TaskBroker', () => { describe('taskOffered', () => { it('should match a pending request to an incoming offer', () => { - const now = process.hrtime.bigint(); - const request: TaskRequest = { requestId: 'request1', requesterId: 'requester1', @@ -166,7 +211,7 @@ describe('TaskBroker', () => { runnerId: 'runner1', taskType: 'taskType1', validFor: 1000, - validUntil: now + BigInt(1000 * 1_000_000), + validUntil: createValidUntil(1000), }; jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); // allow Jest to exit cleanly @@ -180,14 +225,12 @@ describe('TaskBroker', () => { describe('settleTasks', () => { it('should match task offers with task requests by task type', () => { - const now = process.hrtime.bigint(); - const offer1: TaskOffer = { offerId: 'offer1', runnerId: 'runner1', taskType: 'taskType1', validFor: 1000, - validUntil: now + BigInt(1000 * 1_000_000), + validUntil: createValidUntil(1000), }; const offer2: TaskOffer = { @@ -195,7 +238,7 @@ describe('TaskBroker', () => { runnerId: 'runner2', taskType: 'taskType2', validFor: 1000, - validUntil: now + BigInt(1000 * 1_000_000), + validUntil: createValidUntil(1000), }; const request1: TaskRequest = { @@ -235,14 +278,12 @@ describe('TaskBroker', () => { }); it('should not match a request whose acceptance is in progress', () => { - const now = process.hrtime.bigint(); - const offer: TaskOffer = { offerId: 'offer1', runnerId: 'runner1', taskType: 'taskType1', validFor: 1000, - validUntil: now + BigInt(1000 * 1_000_000), + validUntil: createValidUntil(1000), }; const request: TaskRequest = { @@ -271,14 +312,12 @@ describe('TaskBroker', () => { }); it('should expire tasks before settling', () => { - const now = process.hrtime.bigint(); - const validOffer: TaskOffer = { offerId: 'valid', runnerId: 'runner1', taskType: 'taskType1', validFor: 1000, - validUntil: now + BigInt(1000 * 1_000_000), // 1 second in the future + validUntil: createValidUntil(1000), // 1 second in the future }; const expiredOffer: TaskOffer = { @@ -286,7 +325,7 @@ describe('TaskBroker', () => { runnerId: 'runner2', taskType: 'taskType2', // will be removed before matching validFor: 1000, - validUntil: now - BigInt(1000 * 1_000_000), // 1 second in the past + validUntil: createValidUntil(-1000), // 1 second in the past }; const request1: TaskRequest = { diff --git a/packages/cli/src/runners/__tests__/task-runner-process.test.ts b/packages/cli/src/runners/__tests__/task-runner-process.test.ts new file mode 100644 index 0000000000..b2ad678ee1 --- /dev/null +++ b/packages/cli/src/runners/__tests__/task-runner-process.test.ts @@ -0,0 +1,48 @@ +import { GlobalConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; +import type { ChildProcess, SpawnOptions } from 'node:child_process'; + +import { mockInstance } from '../../../test/shared/mocking'; +import type { TaskRunnerAuthService } from '../auth/task-runner-auth.service'; +import { TaskRunnerProcess } from '../task-runner-process'; + +const spawnMock = jest.fn(() => + mock({ + stdout: { + pipe: jest.fn(), + }, + stderr: { + pipe: jest.fn(), + }, + }), +); +require('child_process').spawn = spawnMock; + +describe('TaskRunnerProcess', () => { + const globalConfig = mockInstance(GlobalConfig); + const authService = mock(); + const taskRunnerProcess = new TaskRunnerProcess(globalConfig, authService); + + afterEach(async () => { + spawnMock.mockClear(); + }); + + describe('start', () => { + it('should propagate NODE_FUNCTION_ALLOW_BUILTIN and NODE_FUNCTION_ALLOW_EXTERNAL from env', async () => { + jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken'); + process.env.NODE_FUNCTION_ALLOW_BUILTIN = '*'; + process.env.NODE_FUNCTION_ALLOW_EXTERNAL = '*'; + + await taskRunnerProcess.start(); + + // @ts-expect-error The type is not correct + const options = spawnMock.mock.calls[0][2] as SpawnOptions; + expect(options.env).toEqual( + expect.objectContaining({ + NODE_FUNCTION_ALLOW_BUILTIN: '*', + NODE_FUNCTION_ALLOW_EXTERNAL: '*', + }), + ); + }); + }); +}); diff --git a/packages/cli/src/runners/task-broker.service.ts b/packages/cli/src/runners/task-broker.service.ts index 829910b468..a63cbbda21 100644 --- a/packages/cli/src/runners/task-broker.service.ts +++ b/packages/cli/src/runners/task-broker.service.ts @@ -75,15 +75,11 @@ export class TaskBroker { expireTasks() { const now = process.hrtime.bigint(); - const invalidOffers: number[] = []; - for (let i = 0; i < this.pendingTaskOffers.length; i++) { + for (let i = this.pendingTaskOffers.length - 1; i >= 0; i--) { if (this.pendingTaskOffers[i].validUntil < now) { - invalidOffers.push(i); + this.pendingTaskOffers.splice(i, 1); } } - - // We reverse the list so the later indexes are valid after deleting earlier ones - invalidOffers.reverse().forEach((i) => this.pendingTaskOffers.splice(i, 1)); } registerRunner(runner: TaskRunner, messageCallback: MessageCallback) { @@ -92,6 +88,21 @@ export class TaskBroker { deregisterRunner(runnerId: string) { this.knownRunners.delete(runnerId); + + // Remove any pending offers + for (let i = this.pendingTaskOffers.length - 1; i >= 0; i--) { + if (this.pendingTaskOffers[i].runnerId === runnerId) { + this.pendingTaskOffers.splice(i, 1); + } + } + + // Fail any tasks + for (const task of this.tasks.values()) { + if (task.runnerId === runnerId) { + void this.failTask(task.id, `The Task Runner (${runnerId}) has disconnected`); + this.handleRunnerReject(task.id, `The Task Runner (${runnerId}) has disconnected`); + } + } } registerRequester(requesterId: string, messageCallback: RequesterMessageCallback) { diff --git a/packages/cli/src/runners/task-managers/single-main-task-manager.ts b/packages/cli/src/runners/task-managers/local-task-manager.ts similarity index 92% rename from packages/cli/src/runners/task-managers/single-main-task-manager.ts rename to packages/cli/src/runners/task-managers/local-task-manager.ts index b5b60df72b..a8fca01b2c 100644 --- a/packages/cli/src/runners/task-managers/single-main-task-manager.ts +++ b/packages/cli/src/runners/task-managers/local-task-manager.ts @@ -5,7 +5,7 @@ import type { RequesterMessage } from '../runner-types'; import type { RequesterMessageCallback } from '../task-broker.service'; import { TaskBroker } from '../task-broker.service'; -export class SingleMainTaskManager extends TaskManager { +export class LocalTaskManager extends TaskManager { taskBroker: TaskBroker; id: string = 'single-main'; diff --git a/packages/cli/src/runners/task-managers/task-manager.ts b/packages/cli/src/runners/task-managers/task-manager.ts index 9f7e492fbe..58d8ade906 100644 --- a/packages/cli/src/runners/task-managers/task-manager.ts +++ b/packages/cli/src/runners/task-managers/task-manager.ts @@ -1,4 +1,5 @@ import { + type EnvProviderState, type IExecuteFunctions, type Workflow, type IRunExecutionData, @@ -11,11 +12,12 @@ import { type IExecuteData, type IDataObject, type IWorkflowExecuteAdditionalData, + type Result, + createResultOk, + createResultError, } from 'n8n-workflow'; import { nanoid } from 'nanoid'; -import { TaskError } from '@/runners/errors'; - import { RPC_ALLOW_LIST, type TaskResultData, @@ -42,6 +44,7 @@ export interface TaskData { connectionInputData: INodeExecutionData[]; siblingParameters: INodeParameters; mode: WorkflowExecuteMode; + envProviderState: EnvProviderState; executeData?: IExecuteData; defaultReturnRunIndex: number; selfData: IDataObject; @@ -76,6 +79,7 @@ export interface AllCodeTaskData { connectionInputData: INodeExecutionData[]; siblingParameters: INodeParameters; mode: WorkflowExecuteMode; + envProviderState: EnvProviderState; executeData?: IExecuteData; defaultReturnRunIndex: number; selfData: IDataObject; @@ -122,7 +126,7 @@ export class TaskManager { tasks: Map = new Map(); - async startTask( + async startTask( additionalData: IWorkflowExecuteAdditionalData, taskType: string, settings: unknown, @@ -137,11 +141,12 @@ export class TaskManager { connectionInputData: INodeExecutionData[], siblingParameters: INodeParameters, mode: WorkflowExecuteMode, + envProviderState: EnvProviderState, executeData?: IExecuteData, defaultReturnRunIndex = -1, selfData: IDataObject = {}, contextNodeName: string = activeNodeName, - ): Promise { + ): Promise> { const data: TaskData = { workflow, runExecutionData, @@ -153,6 +158,7 @@ export class TaskManager { itemIndex, siblingParameters, mode, + envProviderState, executeData, defaultReturnRunIndex, selfData, @@ -216,14 +222,10 @@ export class TaskManager { runExecutionData.resultData.metadata[k] = v; }); } - return resultData.result as T; - } catch (e) { - if (typeof e === 'string') { - throw new TaskError(e, { - level: 'error', - }); - } - throw e; + + return createResultOk(resultData.result as TData); + } catch (e: unknown) { + return createResultError(e as TError); } finally { this.tasks.delete(taskId); } @@ -311,6 +313,7 @@ export class TaskManager { contextNodeName: jd.contextNodeName, defaultReturnRunIndex: jd.defaultReturnRunIndex, mode: jd.mode, + envProviderState: jd.envProviderState, node: jd.node, runExecutionData: jd.runExecutionData, runIndex: jd.runIndex, diff --git a/packages/cli/src/runners/task-runner-process.ts b/packages/cli/src/runners/task-runner-process.ts index 5f420ab568..857d581127 100644 --- a/packages/cli/src/runners/task-runner-process.ts +++ b/packages/cli/src/runners/task-runner-process.ts @@ -1,6 +1,7 @@ import { GlobalConfig } from '@n8n/config'; import * as a from 'node:assert/strict'; import { spawn } from 'node:child_process'; +import * as process from 'node:process'; import { Service } from 'typedi'; import { TaskRunnerAuthService } from './auth/task-runner-auth.service'; @@ -38,15 +39,11 @@ export class TaskRunnerProcess { a.ok(!this.process, 'Task Runner Process already running'); const grantToken = await this.authService.createGrantToken(); - const startScript = require.resolve('@n8n/task-runner'); - this.process = spawn('node', [startScript], { - env: { - PATH: process.env.PATH, - N8N_RUNNERS_GRANT_TOKEN: grantToken, - N8N_RUNNERS_N8N_URI: `127.0.0.1:${this.globalConfig.taskRunners.port}`, - }, - }); + const n8nUri = `127.0.0.1:${this.globalConfig.taskRunners.port}`; + this.process = this.globalConfig.taskRunners.useLauncher + ? this.startLauncher(grantToken, n8nUri) + : this.startNode(grantToken, n8nUri); this.process.stdout?.pipe(process.stdout); this.process.stderr?.pipe(process.stderr); @@ -54,6 +51,38 @@ export class TaskRunnerProcess { this.monitorProcess(this.process); } + startNode(grantToken: string, n8nUri: string) { + const startScript = require.resolve('@n8n/task-runner'); + + return spawn('node', [startScript], { + env: { + PATH: process.env.PATH, + N8N_RUNNERS_GRANT_TOKEN: grantToken, + N8N_RUNNERS_N8N_URI: n8nUri, + NODE_FUNCTION_ALLOW_BUILTIN: process.env.NODE_FUNCTION_ALLOW_BUILTIN, + NODE_FUNCTION_ALLOW_EXTERNAL: process.env.NODE_FUNCTION_ALLOW_EXTERNAL, + }, + }); + } + + startLauncher(grantToken: string, n8nUri: string) { + return spawn( + this.globalConfig.taskRunners.launcherPath, + ['launch', this.globalConfig.taskRunners.launcherRunner], + { + env: { + PATH: process.env.PATH, + N8N_RUNNERS_GRANT_TOKEN: grantToken, + N8N_RUNNERS_N8N_URI: n8nUri, + NODE_FUNCTION_ALLOW_BUILTIN: process.env.NODE_FUNCTION_ALLOW_BUILTIN, + NODE_FUNCTION_ALLOW_EXTERNAL: process.env.NODE_FUNCTION_ALLOW_EXTERNAL, + // For debug logging if enabled + RUST_LOG: process.env.RUST_LOG, + }, + }, + ); + } + @OnShutdown() async stop() { if (!this.process) { @@ -63,15 +92,44 @@ export class TaskRunnerProcess { this.isShuttingDown = true; // TODO: Timeout & force kill - this.process.kill(); + if (this.globalConfig.taskRunners.useLauncher) { + await this.killLauncher(); + } else { + this.killNode(); + } await this.runPromise; this.isShuttingDown = false; } - private monitorProcess(process: ChildProcess) { + killNode() { + if (!this.process) { + return; + } + this.process.kill(); + } + + async killLauncher() { + if (!this.process?.pid) { + return; + } + + const killProcess = spawn(this.globalConfig.taskRunners.launcherPath, [ + 'kill', + this.globalConfig.taskRunners.launcherRunner, + this.process.pid.toString(), + ]); + + await new Promise((resolve) => { + killProcess.on('exit', () => { + resolve(); + }); + }); + } + + private monitorProcess(taskRunnerProcess: ChildProcess) { this.runPromise = new Promise((resolve) => { - process.on('exit', (code) => { + taskRunnerProcess.on('exit', (code) => { this.onProcessExit(code, resolve); }); }); diff --git a/packages/cli/src/scaling/__tests__/publisher.service.test.ts b/packages/cli/src/scaling/__tests__/publisher.service.test.ts index 439af01ef9..f77b6b5d5a 100644 --- a/packages/cli/src/scaling/__tests__/publisher.service.test.ts +++ b/packages/cli/src/scaling/__tests__/publisher.service.test.ts @@ -1,35 +1,35 @@ import type { Redis as SingleNodeClient } from 'ioredis'; import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; import config from '@/config'; -import { generateNanoId } from '@/databases/utils/generators'; import type { RedisClientService } from '@/services/redis-client.service'; +import { mockLogger } from '@test/mocking'; import { Publisher } from '../pubsub/publisher.service'; import type { PubSub } from '../pubsub/pubsub.types'; describe('Publisher', () => { - let queueModeId: string; - beforeEach(() => { config.set('executions.mode', 'queue'); - queueModeId = generateNanoId(); - config.set('redis.queueModeId', queueModeId); }); const client = mock(); + const logger = mockLogger(); + const hostId = 'main-bnxa1riryKUNHtln'; + const instanceSettings = mock({ hostId }); const redisClientService = mock({ createClient: () => client }); describe('constructor', () => { it('should init Redis client in scaling mode', () => { - const publisher = new Publisher(mock(), redisClientService); + const publisher = new Publisher(logger, redisClientService, instanceSettings); expect(publisher.getClient()).toEqual(client); }); it('should not init Redis client in regular mode', () => { config.set('executions.mode', 'regular'); - const publisher = new Publisher(mock(), redisClientService); + const publisher = new Publisher(logger, redisClientService, instanceSettings); expect(publisher.getClient()).toBeUndefined(); }); @@ -37,7 +37,7 @@ describe('Publisher', () => { describe('shutdown', () => { it('should disconnect Redis client', () => { - const publisher = new Publisher(mock(), redisClientService); + const publisher = new Publisher(logger, redisClientService, instanceSettings); publisher.shutdown(); expect(client.disconnect).toHaveBeenCalled(); }); @@ -45,23 +45,23 @@ describe('Publisher', () => { describe('publishCommand', () => { it('should publish command into `n8n.commands` pubsub channel', async () => { - const publisher = new Publisher(mock(), redisClientService); + const publisher = new Publisher(logger, redisClientService, instanceSettings); const msg = mock({ command: 'reload-license' }); await publisher.publishCommand(msg); expect(client.publish).toHaveBeenCalledWith( 'n8n.commands', - JSON.stringify({ ...msg, senderId: queueModeId, selfSend: false, debounce: true }), + JSON.stringify({ ...msg, senderId: hostId, selfSend: false, debounce: true }), ); }); }); describe('publishWorkerResponse', () => { it('should publish worker response into `n8n.worker-response` pubsub channel', async () => { - const publisher = new Publisher(mock(), redisClientService); + const publisher = new Publisher(logger, redisClientService, instanceSettings); const msg = mock({ - command: 'reload-external-secrets-providers', + response: 'response-to-get-worker-status', }); await publisher.publishWorkerResponse(msg); diff --git a/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts b/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts index 0cf8d5ef48..314ded0b8b 100644 --- a/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts +++ b/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts @@ -1,15 +1,25 @@ +import type { WorkerStatus } from '@n8n/api-types'; import { mock } from 'jest-mock-extended'; import type { InstanceSettings } from 'n8n-core'; +import type { Workflow } from 'n8n-workflow'; +import type { ActiveWorkflowManager } from '@/active-workflow-manager'; +import type { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import type { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { EventService } from '@/events/event.service'; import type { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; +import type { IWorkflowDb } from '@/interfaces'; import type { License } from '@/license'; +import type { Push } from '@/push'; +import type { WebSocketPush } from '@/push/websocket.push'; import type { CommunityPackagesService } from '@/services/community-packages.service'; +import type { TestWebhooks } from '@/webhooks/test-webhooks'; import type { Publisher } from '../pubsub/publisher.service'; import { PubSubHandler } from '../pubsub/pubsub-handler'; -import type { WorkerStatus } from '../worker-status'; +import type { WorkerStatusService } from '../worker-status.service'; + +const flushPromises = async () => await new Promise((resolve) => setImmediate(resolve)); describe('PubSubHandler', () => { const eventService = new EventService(); @@ -18,7 +28,11 @@ describe('PubSubHandler', () => { const externalSecretsManager = mock(); const communityPackagesService = mock(); const publisher = mock(); - const workerStatus = mock(); + const workerStatusService = mock(); + const activeWorkflowManager = mock(); + const push = mock(); + const workflowRepository = mock(); + const testWebhooks = mock(); afterEach(() => { eventService.removeAllListeners(); @@ -29,7 +43,7 @@ describe('PubSubHandler', () => { it('should set up handlers in webhook process', () => { // @ts-expect-error Spying on private method - const setupHandlersSpy = jest.spyOn(PubSubHandler.prototype, 'setupHandlers'); + const setupHandlers = jest.spyOn(PubSubHandler.prototype, 'setupHandlers'); new PubSubHandler( eventService, @@ -39,10 +53,14 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); - expect(setupHandlersSpy).toHaveBeenCalledWith({ + expect(setupHandlers).toHaveBeenCalledWith({ 'reload-license': expect.any(Function), 'restart-event-bus': expect.any(Function), 'reload-external-secrets-providers': expect.any(Function), @@ -61,7 +79,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('reload-license'); @@ -78,7 +100,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('restart-event-bus'); @@ -95,7 +121,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('reload-external-secrets-providers'); @@ -112,7 +142,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('community-package-install', { @@ -135,7 +169,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('community-package-update', { @@ -158,7 +196,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('community-package-uninstall', { @@ -184,7 +226,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); expect(setupHandlersSpy).toHaveBeenCalledWith({ @@ -195,7 +241,6 @@ describe('PubSubHandler', () => { 'community-package-update': expect.any(Function), 'community-package-uninstall': expect.any(Function), 'get-worker-status': expect.any(Function), - 'get-worker-id': expect.any(Function), }); }); @@ -208,7 +253,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('reload-license'); @@ -225,7 +274,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('restart-event-bus'); @@ -242,7 +295,11 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('reload-external-secrets-providers'); @@ -250,6 +307,83 @@ describe('PubSubHandler', () => { expect(externalSecretsManager.reloadAllProviders).toHaveBeenCalled(); }); + it('should install community package on `community-package-install` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-install', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should update community package on `community-package-update` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-update', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should uninstall community package on `community-package-uninstall` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-uninstall', { + packageName: 'test-package', + }); + + expect(communityPackagesService.removeNpmPackage).toHaveBeenCalledWith('test-package'); + }); + it('should generate status on `get-worker-status` event', () => { new PubSubHandler( eventService, @@ -259,15 +393,34 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); eventService.emit('get-worker-status'); - expect(workerStatus.generateStatus).toHaveBeenCalled(); + expect(workerStatusService.generateStatus).toHaveBeenCalled(); + }); + }); + + describe('in main process', () => { + const instanceSettings = mock({ + instanceType: 'main', + isLeader: true, + isFollower: false, }); - it('should get worker ID on `get-worker-id` event', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should set up command and worker response handlers in main process', () => { + // @ts-expect-error Spying on private method + const setupHandlersSpy = jest.spyOn(PubSubHandler.prototype, 'setupHandlers'); + new PubSubHandler( eventService, instanceSettings, @@ -276,14 +429,449 @@ describe('PubSubHandler', () => { externalSecretsManager, communityPackagesService, publisher, - workerStatus, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, ).init(); - eventService.emit('get-worker-id'); + expect(setupHandlersSpy).toHaveBeenCalledWith({ + 'reload-license': expect.any(Function), + 'restart-event-bus': expect.any(Function), + 'reload-external-secrets-providers': expect.any(Function), + 'community-package-install': expect.any(Function), + 'community-package-update': expect.any(Function), + 'community-package-uninstall': expect.any(Function), + 'add-webhooks-triggers-and-pollers': expect.any(Function), + 'remove-triggers-and-pollers': expect.any(Function), + 'display-workflow-activation': expect.any(Function), + 'display-workflow-deactivation': expect.any(Function), + 'display-workflow-activation-error': expect.any(Function), + 'relay-execution-lifecycle-event': expect.any(Function), + 'clear-test-webhooks': expect.any(Function), + 'response-to-get-worker-status': expect.any(Function), + }); + }); - expect(publisher.publishWorkerResponse).toHaveBeenCalledWith({ - workerId: expect.any(String), - command: 'get-worker-id', + it('should reload license on `reload-license` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('reload-license'); + + expect(license.reload).toHaveBeenCalled(); + }); + + it('should restart event bus on `restart-event-bus` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('restart-event-bus'); + + expect(eventbus.restart).toHaveBeenCalled(); + }); + + it('should reload providers on `reload-external-secrets-providers` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('reload-external-secrets-providers'); + + expect(externalSecretsManager.reloadAllProviders).toHaveBeenCalled(); + }); + + it('should install community package on `community-package-install` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-install', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should update community package on `community-package-update` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-update', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should uninstall community package on `community-package-uninstall` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-uninstall', { + packageName: 'test-package', + }); + + expect(communityPackagesService.removeNpmPackage).toHaveBeenCalledWith('test-package'); + }); + + describe('multi-main setup', () => { + it('if leader, should handle `add-webhooks-triggers-and-pollers` event', async () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('add-webhooks-triggers-and-pollers', { workflowId }); + + await flushPromises(); + + expect(activeWorkflowManager.add).toHaveBeenCalledWith(workflowId, 'activate', undefined, { + shouldPublish: false, + }); + expect(push.broadcast).toHaveBeenCalledWith('workflowActivated', { workflowId }); + expect(publisher.publishCommand).toHaveBeenCalledWith({ + command: 'display-workflow-activation', + payload: { workflowId }, + }); + }); + + it('if follower, should skip `add-webhooks-triggers-and-pollers` event', async () => { + new PubSubHandler( + eventService, + mock({ instanceType: 'main', isLeader: false, isFollower: true }), + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('add-webhooks-triggers-and-pollers', { workflowId }); + + await flushPromises(); + + expect(activeWorkflowManager.add).not.toHaveBeenCalled(); + expect(push.broadcast).not.toHaveBeenCalled(); + expect(publisher.publishCommand).not.toHaveBeenCalled(); + }); + + it('if leader, should handle `remove-triggers-and-pollers` event', async () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('remove-triggers-and-pollers', { workflowId }); + + await flushPromises(); + + expect(activeWorkflowManager.removeActivationError).toHaveBeenCalledWith(workflowId); + expect(activeWorkflowManager.removeWorkflowTriggersAndPollers).toHaveBeenCalledWith( + workflowId, + ); + expect(push.broadcast).toHaveBeenCalledWith('workflowDeactivated', { workflowId }); + expect(publisher.publishCommand).toHaveBeenCalledWith({ + command: 'display-workflow-deactivation', + payload: { workflowId }, + }); + }); + + it('if follower, should skip `remove-triggers-and-pollers` event', async () => { + new PubSubHandler( + eventService, + mock({ instanceType: 'main', isLeader: false, isFollower: true }), + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('remove-triggers-and-pollers', { workflowId }); + + await flushPromises(); + + expect(activeWorkflowManager.removeActivationError).not.toHaveBeenCalled(); + expect(activeWorkflowManager.removeWorkflowTriggersAndPollers).not.toHaveBeenCalled(); + expect(push.broadcast).not.toHaveBeenCalled(); + expect(publisher.publishCommand).not.toHaveBeenCalled(); + }); + + it('should handle `display-workflow-activation` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('display-workflow-activation', { workflowId }); + + expect(push.broadcast).toHaveBeenCalledWith('workflowActivated', { workflowId }); + }); + + it('should handle `display-workflow-deactivation` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('display-workflow-deactivation', { workflowId }); + + expect(push.broadcast).toHaveBeenCalledWith('workflowDeactivated', { workflowId }); + }); + + it('should handle `display-workflow-activation-error` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + const errorMessage = 'Test error message'; + + eventService.emit('display-workflow-activation-error', { workflowId, errorMessage }); + + expect(push.broadcast).toHaveBeenCalledWith('workflowFailedToActivate', { + workflowId, + errorMessage, + }); + }); + + it('should handle `relay-execution-lifecycle-event` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const pushRef = 'test-push-ref'; + const type = 'executionStarted'; + const args = { testArg: 'value' }; + + push.getBackend.mockReturnValue( + mock({ hasPushRef: jest.fn().mockReturnValue(true) }), + ); + + eventService.emit('relay-execution-lifecycle-event', { type, args, pushRef }); + + expect(push.send).toHaveBeenCalledWith(type, args, pushRef); + }); + + it('should handle `clear-test-webhooks` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const webhookKey = 'test-webhook-key'; + const workflowEntity = mock({ id: 'test-workflow-id' }); + const pushRef = 'test-push-ref'; + + push.getBackend.mockReturnValue( + mock({ hasPushRef: jest.fn().mockReturnValue(true) }), + ); + testWebhooks.toWorkflow.mockReturnValue(mock({ id: 'test-workflow-id' })); + + eventService.emit('clear-test-webhooks', { webhookKey, workflowEntity, pushRef }); + + expect(testWebhooks.clearTimeout).toHaveBeenCalledWith(webhookKey); + expect(testWebhooks.deactivateWebhooks).toHaveBeenCalled(); + }); + + it('should handle `response-to-get-worker-status event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workerStatus = mock({ senderId: 'worker-1', loadAvg: [123] }); + + eventService.emit('response-to-get-worker-status', workerStatus); + + expect(push.broadcast).toHaveBeenCalledWith('sendWorkerStatusMessage', { + workerId: workerStatus.senderId, + status: workerStatus, + }); }); }); }); diff --git a/packages/cli/src/scaling/__tests__/scaling.service.test.ts b/packages/cli/src/scaling/__tests__/scaling.service.test.ts index f1ae78f838..a6c14ab964 100644 --- a/packages/cli/src/scaling/__tests__/scaling.service.test.ts +++ b/packages/cli/src/scaling/__tests__/scaling.service.test.ts @@ -6,7 +6,7 @@ import { ApplicationError } from 'n8n-workflow'; import Container from 'typedi'; import type { OrchestrationService } from '@/services/orchestration.service'; -import { mockInstance } from '@test/mocking'; +import { mockInstance, mockLogger } from '@test/mocking'; import { JOB_TYPE_NAME, QUEUE_NAME } from '../constants'; import type { JobProcessor } from '../job-processor'; @@ -74,7 +74,7 @@ describe('ScalingService', () => { instanceSettings.markAsLeader(); scalingService = new ScalingService( - mock(), + mockLogger(), mock(), jobProcessor, globalConfig, diff --git a/packages/cli/src/scaling/__tests__/subscriber.service.test.ts b/packages/cli/src/scaling/__tests__/subscriber.service.test.ts index 31e8486b8c..4f97208b99 100644 --- a/packages/cli/src/scaling/__tests__/subscriber.service.test.ts +++ b/packages/cli/src/scaling/__tests__/subscriber.service.test.ts @@ -17,14 +17,14 @@ describe('Subscriber', () => { describe('constructor', () => { it('should init Redis client in scaling mode', () => { - const subscriber = new Subscriber(mock(), redisClientService, mock()); + const subscriber = new Subscriber(mock(), redisClientService, mock(), mock()); expect(subscriber.getClient()).toEqual(client); }); it('should not init Redis client in regular mode', () => { config.set('executions.mode', 'regular'); - const subscriber = new Subscriber(mock(), redisClientService, mock()); + const subscriber = new Subscriber(mock(), redisClientService, mock(), mock()); expect(subscriber.getClient()).toBeUndefined(); }); @@ -32,7 +32,7 @@ describe('Subscriber', () => { describe('shutdown', () => { it('should disconnect Redis client', () => { - const subscriber = new Subscriber(mock(), redisClientService, mock()); + const subscriber = new Subscriber(mock(), redisClientService, mock(), mock()); subscriber.shutdown(); expect(client.disconnect).toHaveBeenCalled(); }); @@ -40,24 +40,11 @@ describe('Subscriber', () => { describe('subscribe', () => { it('should subscribe to pubsub channel', async () => { - const subscriber = new Subscriber(mock(), redisClientService, mock()); + const subscriber = new Subscriber(mock(), redisClientService, mock(), mock()); await subscriber.subscribe('n8n.commands'); expect(client.subscribe).toHaveBeenCalledWith('n8n.commands', expect.any(Function)); }); }); - - describe('setMessageHandler', () => { - it('should set message handler function for channel', () => { - const subscriber = new Subscriber(mock(), redisClientService, mock()); - const channel = 'n8n.commands'; - const handlerFn = jest.fn(); - - subscriber.setMessageHandler(channel, handlerFn); - - // @ts-expect-error Private field - expect(subscriber.handlers).toEqual(new Map([[channel, handlerFn]])); - }); - }); }); diff --git a/packages/cli/src/scaling/__tests__/worker-server.test.ts b/packages/cli/src/scaling/__tests__/worker-server.test.ts index 778d403bf2..8bcdd3aa5c 100644 --- a/packages/cli/src/scaling/__tests__/worker-server.test.ts +++ b/packages/cli/src/scaling/__tests__/worker-server.test.ts @@ -8,6 +8,7 @@ import * as http from 'node:http'; import type { ExternalHooks } from '@/external-hooks'; import type { PrometheusMetricsService } from '@/metrics/prometheus-metrics.service'; import { bodyParser, rawBodyReader } from '@/middlewares'; +import { mockLogger } from '@test/mocking'; import { WorkerServer } from '../worker-server'; @@ -48,7 +49,7 @@ describe('WorkerServer', () => { () => new WorkerServer( globalConfig, - mock(), + mockLogger(), mock(), externalHooks, mock({ instanceType: 'webhook' }), @@ -73,7 +74,7 @@ describe('WorkerServer', () => { new WorkerServer( globalConfig, - mock(), + mockLogger(), mock(), externalHooks, instanceSettings, @@ -100,7 +101,7 @@ describe('WorkerServer', () => { const workerServer = new WorkerServer( globalConfig, - mock(), + mockLogger(), mock(), externalHooks, instanceSettings, @@ -135,7 +136,7 @@ describe('WorkerServer', () => { const workerServer = new WorkerServer( globalConfig, - mock(), + mockLogger(), mock(), externalHooks, instanceSettings, @@ -156,7 +157,7 @@ describe('WorkerServer', () => { const workerServer = new WorkerServer( globalConfig, - mock(), + mockLogger(), mock(), externalHooks, instanceSettings, @@ -174,7 +175,7 @@ describe('WorkerServer', () => { const workerServer = new WorkerServer( globalConfig, - mock(), + mockLogger(), mock(), externalHooks, instanceSettings, diff --git a/packages/cli/src/scaling/job-processor.ts b/packages/cli/src/scaling/job-processor.ts index 49e1383ac6..1322beac27 100644 --- a/packages/cli/src/scaling/job-processor.ts +++ b/packages/cli/src/scaling/job-processor.ts @@ -1,5 +1,5 @@ import type { RunningJobSummary } from '@n8n/api-types'; -import { WorkflowExecute } from 'n8n-core'; +import { InstanceSettings, WorkflowExecute } from 'n8n-core'; import { BINARY_ENCODING, ApplicationError, Workflow } from 'n8n-workflow'; import type { ExecutionStatus, IExecuteResponsePromiseData, IRun } from 'n8n-workflow'; import type PCancelable from 'p-cancelable'; @@ -12,7 +12,14 @@ import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; -import type { Job, JobId, JobResult, RunningJob } from './scaling.types'; +import type { + Job, + JobFinishedMessage, + JobId, + JobResult, + RespondToWebhookMessage, + RunningJob, +} from './scaling.types'; /** * Responsible for processing jobs from the queue, i.e. running enqueued executions. @@ -26,7 +33,10 @@ export class JobProcessor { private readonly executionRepository: ExecutionRepository, private readonly workflowRepository: WorkflowRepository, private readonly nodeTypes: NodeTypes, - ) {} + private readonly instanceSettings: InstanceSettings, + ) { + this.logger = this.logger.withScope('scaling'); + } async processJob(job: Job): Promise { const { executionId, loadStaticData } = job.data; @@ -37,15 +47,18 @@ export class JobProcessor { }); if (!execution) { - this.logger.error('[JobProcessor] Failed to find execution data', { executionId }); - throw new ApplicationError('Failed to find execution data. Aborting execution.', { - extra: { executionId }, - }); + throw new ApplicationError( + `Worker failed to find data for execution ${executionId} (job ${job.id})`, + { level: 'warning' }, + ); } const workflowId = execution.workflowData.id; - this.logger.info(`[JobProcessor] Starting job ${job.id} (execution ${executionId})`); + this.logger.info(`Worker started execution ${executionId} (job ${job.id})`, { + executionId, + jobId: job.id, + }); const startedAt = await this.executionRepository.setRunning(executionId); @@ -58,8 +71,10 @@ export class JobProcessor { }); if (workflowData === null) { - this.logger.error('[JobProcessor] Failed to find workflow', { workflowId, executionId }); - throw new ApplicationError('Failed to find workflow', { extra: { workflowId } }); + throw new ApplicationError( + `Worker failed to find workflow ${workflowId} to run execution ${executionId} (job ${job.id})`, + { level: 'warning' }, + ); } staticData = workflowData.staticData; @@ -102,11 +117,14 @@ export class JobProcessor { additionalData.hooks.hookFunctions.sendResponse = [ async (response: IExecuteResponsePromiseData): Promise => { - await job.progress({ + const msg: RespondToWebhookMessage = { kind: 'respond-to-webhook', executionId, response: this.encodeWebhookResponse(response), - }); + workerId: this.instanceSettings.hostId, + }; + + await job.progress(msg); }, ]; @@ -115,7 +133,7 @@ export class JobProcessor { additionalData.setExecutionStatus = (status: ExecutionStatus) => { // Can't set the status directly in the queued worker, but it will happen in InternalHook.onWorkflowPostExecute this.logger.debug( - `[JobProcessor] Queued worker execution status for ${executionId} is "${status}"`, + `Queued worker execution status for execution ${executionId} (job ${job.id}) is "${status}"`, ); }; @@ -148,7 +166,18 @@ export class JobProcessor { delete this.runningJobs[job.id]; - this.logger.debug('[JobProcessor] Job finished running', { jobId: job.id, executionId }); + this.logger.info(`Worker finished execution ${executionId} (job ${job.id})`, { + executionId, + jobId: job.id, + }); + + const msg: JobFinishedMessage = { + kind: 'job-finished', + executionId, + workerId: this.instanceSettings.hostId, + }; + + await job.progress(msg); /** * @important Do NOT call `workflowExecuteAfter` hook here. diff --git a/packages/cli/src/scaling/pubsub/publisher.service.ts b/packages/cli/src/scaling/pubsub/publisher.service.ts index bfcede6542..06a876accf 100644 --- a/packages/cli/src/scaling/pubsub/publisher.service.ts +++ b/packages/cli/src/scaling/pubsub/publisher.service.ts @@ -1,4 +1,5 @@ import type { Redis as SingleNodeClient, Cluster as MultiNodeClient } from 'ioredis'; +import { InstanceSettings } from 'n8n-core'; import { Service } from 'typedi'; import config from '@/config'; @@ -20,10 +21,13 @@ export class Publisher { constructor( private readonly logger: Logger, private readonly redisClientService: RedisClientService, + private readonly instanceSettings: InstanceSettings, ) { // @TODO: Once this class is only ever initialized in scaling mode, throw in the next line instead. if (config.getEnv('executions.mode') !== 'queue') return; + this.logger = this.logger.withScope('scaling'); + this.client = this.redisClientService.createClient({ type: 'publisher(n8n)' }); } @@ -46,7 +50,7 @@ export class Publisher { 'n8n.commands', JSON.stringify({ ...msg, - senderId: config.getEnv('redis.queueModeId'), + senderId: this.instanceSettings.hostId, selfSend: SELF_SEND_COMMANDS.has(msg.command), debounce: !IMMEDIATE_COMMANDS.has(msg.command), }), @@ -55,11 +59,11 @@ export class Publisher { this.logger.debug(`Published ${msg.command} to command channel`); } - /** Publish a response for a command into the `n8n.worker-response` channel. */ + /** Publish a response to a command into the `n8n.worker-response` channel. */ async publishWorkerResponse(msg: PubSub.WorkerResponse) { await this.client.publish('n8n.worker-response', JSON.stringify(msg)); - this.logger.debug(`Published response for ${msg.command} to worker response channel`); + this.logger.debug(`Published ${msg.response} to worker response channel`); } // #endregion diff --git a/packages/cli/src/scaling/pubsub/pubsub-handler.ts b/packages/cli/src/scaling/pubsub/pubsub-handler.ts index 267cae6977..deeed5b584 100644 --- a/packages/cli/src/scaling/pubsub/pubsub-handler.ts +++ b/packages/cli/src/scaling/pubsub/pubsub-handler.ts @@ -1,17 +1,22 @@ import { InstanceSettings } from 'n8n-core'; +import { ensureError } from 'n8n-workflow'; import { Service } from 'typedi'; -import config from '@/config'; +import { ActiveWorkflowManager } from '@/active-workflow-manager'; +import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { EventService } from '@/events/event.service'; import type { PubSubEventMap } from '@/events/maps/pub-sub.event-map'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { License } from '@/license'; +import { Push } from '@/push'; import { Publisher } from '@/scaling/pubsub/publisher.service'; import { CommunityPackagesService } from '@/services/community-packages.service'; import { assertNever } from '@/utils'; +import { TestWebhooks } from '@/webhooks/test-webhooks'; -import { WorkerStatus } from '../worker-status'; +import type { PubSub } from './pubsub.types'; +import { WorkerStatusService } from '../worker-status.service'; /** * Responsible for handling events emitted from messages received via a pubsub channel. @@ -26,7 +31,11 @@ export class PubSubHandler { private readonly externalSecretsManager: ExternalSecretsManager, private readonly communityPackagesService: CommunityPackagesService, private readonly publisher: Publisher, - private readonly workerStatus: WorkerStatus, + private readonly workerStatusService: WorkerStatusService, + private readonly activeWorkflowManager: ActiveWorkflowManager, + private readonly push: Push, + private readonly workflowRepository: WorkflowRepository, + private readonly testWebhooks: TestWebhooks, ) {} init() { @@ -39,19 +48,23 @@ export class PubSubHandler { ...this.commonHandlers, 'get-worker-status': async () => await this.publisher.publishWorkerResponse({ - workerId: config.getEnv('redis.queueModeId'), - command: 'get-worker-status', - payload: this.workerStatus.generateStatus(), - }), - 'get-worker-id': async () => - await this.publisher.publishWorkerResponse({ - workerId: config.getEnv('redis.queueModeId'), - command: 'get-worker-id', + senderId: this.instanceSettings.hostId, + response: 'response-to-get-worker-status', + payload: this.workerStatusService.generateStatus(), }), }); break; case 'main': - // TODO + this.setupHandlers({ + ...this.commonHandlers, + ...this.multiMainHandlers, + 'response-to-get-worker-status': async (payload) => + this.push.broadcast('sendWorkerStatusMessage', { + workerId: payload.senderId, + status: payload, + }), + }); + break; default: assertNever(this.instanceSettings.instanceType); @@ -72,17 +85,8 @@ export class PubSubHandler { } } - /** Handlers shared by webhook and worker processes. */ private commonHandlers: { - [K in keyof Pick< - PubSubEventMap, - | 'reload-license' - | 'restart-event-bus' - | 'reload-external-secrets-providers' - | 'community-package-install' - | 'community-package-update' - | 'community-package-uninstall' - >]: (event: PubSubEventMap[K]) => Promise; + [EventName in keyof PubSub.CommonEvents]: (event: PubSubEventMap[EventName]) => Promise; } = { 'reload-license': async () => await this.license.reload(), 'restart-event-bus': async () => await this.eventbus.restart(), @@ -95,4 +99,73 @@ export class PubSubHandler { 'community-package-uninstall': async ({ packageName }) => await this.communityPackagesService.removeNpmPackage(packageName), }; + + private multiMainHandlers: { + [EventName in keyof PubSub.MultiMainEvents]: ( + event: PubSubEventMap[EventName], + ) => Promise; + } = { + 'add-webhooks-triggers-and-pollers': async ({ workflowId }) => { + if (this.instanceSettings.isFollower) return; + + try { + await this.activeWorkflowManager.add(workflowId, 'activate', undefined, { + shouldPublish: false, // prevent leader from re-publishing message + }); + + this.push.broadcast('workflowActivated', { workflowId }); + + await this.publisher.publishCommand({ + command: 'display-workflow-activation', + payload: { workflowId }, + }); // instruct followers to show activation in UI + } catch (e) { + const error = ensureError(e); + const { message } = error; + + await this.workflowRepository.update(workflowId, { active: false }); + + this.push.broadcast('workflowFailedToActivate', { workflowId, errorMessage: message }); + + await this.publisher.publishCommand({ + command: 'display-workflow-activation-error', + payload: { workflowId, errorMessage: message }, + }); // instruct followers to show activation error in UI + } + }, + 'remove-triggers-and-pollers': async ({ workflowId }) => { + if (this.instanceSettings.isFollower) return; + + await this.activeWorkflowManager.removeActivationError(workflowId); + await this.activeWorkflowManager.removeWorkflowTriggersAndPollers(workflowId); + + this.push.broadcast('workflowDeactivated', { workflowId }); + + // instruct followers to show workflow deactivation in UI + await this.publisher.publishCommand({ + command: 'display-workflow-deactivation', + payload: { workflowId }, + }); + }, + 'display-workflow-activation': async ({ workflowId }) => + this.push.broadcast('workflowActivated', { workflowId }), + 'display-workflow-deactivation': async ({ workflowId }) => + this.push.broadcast('workflowDeactivated', { workflowId }), + 'display-workflow-activation-error': async ({ workflowId, errorMessage }) => + this.push.broadcast('workflowFailedToActivate', { workflowId, errorMessage }), + 'relay-execution-lifecycle-event': async ({ type, args, pushRef }) => { + if (!this.push.getBackend().hasPushRef(pushRef)) return; + + this.push.send(type, args, pushRef); + }, + 'clear-test-webhooks': async ({ webhookKey, workflowEntity, pushRef }) => { + if (!this.push.getBackend().hasPushRef(pushRef)) return; + + this.testWebhooks.clearTimeout(webhookKey); + + const workflow = this.testWebhooks.toWorkflow(workflowEntity); + + await this.testWebhooks.deactivateWebhooks(workflow); + }, + }; } diff --git a/packages/cli/src/scaling/pubsub/pubsub.types.ts b/packages/cli/src/scaling/pubsub/pubsub.types.ts index be38cc98f8..eec0110201 100644 --- a/packages/cli/src/scaling/pubsub/pubsub.types.ts +++ b/packages/cli/src/scaling/pubsub/pubsub.types.ts @@ -1,4 +1,8 @@ -import type { PubSubCommandMap, PubSubWorkerResponseMap } from '@/events/maps/pub-sub.event-map'; +import type { + PubSubCommandMap, + PubSubEventMap, + PubSubWorkerResponseMap, +} from '@/events/maps/pub-sub.event-map'; import type { Resolve } from '@/utlity.types'; import type { COMMAND_PUBSUB_CHANNEL, WORKER_RESPONSE_PUBSUB_CHANNEL } from '../constants'; @@ -75,9 +79,17 @@ export namespace PubSub { // ---------------------------------- type _ToWorkerResponse = { - workerId: string; + /** ID of worker sending the response. */ + senderId: string; + + /** IDs of processes to send the response to. */ targets?: string[]; - command: WorkerResponseKey; + + /** Content of worker response. */ + response: WorkerResponseKey; + + /** Whether the worker response should be debounced when received. */ + debounce?: boolean; } & (PubSubWorkerResponseMap[WorkerResponseKey] extends never ? { payload?: never } // some responses carry no payload : { payload: PubSubWorkerResponseMap[WorkerResponseKey] }); @@ -86,18 +98,36 @@ export namespace PubSub { _ToWorkerResponse >; - namespace WorkerResponses { - export type RestartEventBus = ToWorkerResponse<'restart-event-bus'>; - export type ReloadExternalSecretsProviders = - ToWorkerResponse<'reload-external-secrets-providers'>; - export type GetWorkerId = ToWorkerResponse<'get-worker-id'>; - export type GetWorkerStatus = ToWorkerResponse<'get-worker-status'>; - } - /** Response sent via the `n8n.worker-response` pubsub channel. */ - export type WorkerResponse = - | WorkerResponses.RestartEventBus - | WorkerResponses.ReloadExternalSecretsProviders - | WorkerResponses.GetWorkerId - | WorkerResponses.GetWorkerStatus; + export type WorkerResponse = ToWorkerResponse<'response-to-get-worker-status'>; + + // ---------------------------------- + // events + // ---------------------------------- + + /** + * Of all events emitted from pubsub messages, those whose handlers + * are all present in main, worker, and webhook processes. + */ + export type CommonEvents = Pick< + PubSubEventMap, + | 'reload-license' + | 'restart-event-bus' + | 'reload-external-secrets-providers' + | 'community-package-install' + | 'community-package-update' + | 'community-package-uninstall' + >; + + /** Multi-main events emitted from pubsub messages. */ + export type MultiMainEvents = Pick< + PubSubEventMap, + | 'add-webhooks-triggers-and-pollers' + | 'remove-triggers-and-pollers' + | 'display-workflow-activation' + | 'display-workflow-deactivation' + | 'display-workflow-activation-error' + | 'relay-execution-lifecycle-event' + | 'clear-test-webhooks' + >; } diff --git a/packages/cli/src/scaling/pubsub/subscriber.service.ts b/packages/cli/src/scaling/pubsub/subscriber.service.ts index 7586b52ebc..c2045215e0 100644 --- a/packages/cli/src/scaling/pubsub/subscriber.service.ts +++ b/packages/cli/src/scaling/pubsub/subscriber.service.ts @@ -1,5 +1,6 @@ import type { Redis as SingleNodeClient, Cluster as MultiNodeClient } from 'ioredis'; import debounce from 'lodash/debounce'; +import { InstanceSettings } from 'n8n-core'; import { jsonParse } from 'n8n-workflow'; import { Service } from 'typedi'; @@ -17,22 +18,31 @@ import type { PubSub } from './pubsub.types'; export class Subscriber { private readonly client: SingleNodeClient | MultiNodeClient; - private readonly handlers: Map = new Map(); - - // #region Lifecycle - constructor( private readonly logger: Logger, private readonly redisClientService: RedisClientService, private readonly eventService: EventService, + private readonly instanceSettings: InstanceSettings, ) { // @TODO: Once this class is only ever initialized in scaling mode, throw in the next line instead. if (config.getEnv('executions.mode') !== 'queue') return; + this.logger = this.logger.withScope('scaling'); + this.client = this.redisClientService.createClient({ type: 'subscriber(n8n)' }); - this.client.on('message', (channel: PubSub.Channel, message) => { - this.handlers.get(channel)?.(message); + const handlerFn = (msg: PubSub.Command | PubSub.WorkerResponse) => { + const eventName = 'command' in msg ? msg.command : msg.response; + this.eventService.emit(eventName, msg.payload); + }; + + const debouncedHandlerFn = debounce(handlerFn, 300); + + this.client.on('message', (channel: PubSub.Channel, str) => { + const msg = this.parseMessage(str, channel); + if (!msg) return; + if (msg.debounce) debouncedHandlerFn(msg); + else handlerFn(msg); }); } @@ -45,66 +55,47 @@ export class Subscriber { this.client.disconnect(); } - // #endregion - - // #region Subscribing - async subscribe(channel: PubSub.Channel) { await this.client.subscribe(channel, (error) => { if (error) { - this.logger.error('Failed to subscribe to channel', { channel, cause: error }); + this.logger.error(`Failed to subscribe to channel ${channel}`, { error }); return; } - this.logger.debug('Subscribed to channel', { channel }); + this.logger.debug(`Subscribed to channel ${channel}`); }); } - /** Set the message handler function for a channel. */ - setMessageHandler(channel: PubSub.Channel, handlerFn: PubSub.HandlerFn) { - this.handlers.set(channel, handlerFn); - } - - // #endregion - - // #region Commands - - setCommandMessageHandler() { - const handlerFn = (msg: PubSub.Command) => this.eventService.emit(msg.command, msg.payload); - const debouncedHandlerFn = debounce(handlerFn, 300); - - this.setMessageHandler('n8n.commands', (str: string) => { - const msg = this.parseCommandMessage(str); - if (!msg) return; - if (msg.debounce) debouncedHandlerFn(msg); - else handlerFn(msg); + private parseMessage(str: string, channel: PubSub.Channel) { + const msg = jsonParse(str, { + fallbackValue: null, }); - } - - private parseCommandMessage(str: string) { - const msg = jsonParse(str, { fallbackValue: null }); if (!msg) { - this.logger.debug('Received invalid string via command channel', { message: str }); - + this.logger.error(`Received malformed message via channel ${channel}`, { + msg: str, + channel, + }); return null; } - this.logger.debug('Received message via command channel', msg); - - const queueModeId = config.getEnv('redis.queueModeId'); + const { hostId } = this.instanceSettings; if ( + 'command' in msg && !msg.selfSend && - (msg.senderId === queueModeId || (msg.targets && !msg.targets.includes(queueModeId))) + (msg.senderId === hostId || (msg.targets && !msg.targets.includes(hostId))) ) { - this.logger.debug('Disregarding message - not for this instance', msg); - return null; } + const msgName = 'command' in msg ? msg.command : msg.response; + + this.logger.debug(`Received message ${msgName} via channel ${channel}`, { + msg, + channel, + }); + return msg; } - - // #endregion } diff --git a/packages/cli/src/scaling/scaling.service.ts b/packages/cli/src/scaling/scaling.service.ts index f9d805140d..0645642ddb 100644 --- a/packages/cli/src/scaling/scaling.service.ts +++ b/packages/cli/src/scaling/scaling.service.ts @@ -6,6 +6,7 @@ import { sleep, jsonStringify, ErrorReporterProxy, + ensureError, } from 'n8n-workflow'; import type { IExecuteResponsePromiseData } from 'n8n-workflow'; import { strict } from 'node:assert'; @@ -20,6 +21,7 @@ import { MaxStalledCountError } from '@/errors/max-stalled-count.error'; import { EventService } from '@/events/event.service'; import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; +import { assertNever } from '@/utils'; import { JOB_TYPE_NAME, QUEUE_NAME } from './constants'; import { JobProcessor } from './job-processor'; @@ -31,7 +33,8 @@ import type { JobStatus, JobId, QueueRecoveryContext, - JobReport, + JobMessage, + JobFailedMessage, } from './scaling.types'; @Service() @@ -47,7 +50,9 @@ export class ScalingService { private readonly instanceSettings: InstanceSettings, private readonly orchestrationService: OrchestrationService, private readonly eventService: EventService, - ) {} + ) { + this.logger = this.logger.withScope('scaling'); + } // #region Lifecycle @@ -77,7 +82,7 @@ export class ScalingService { this.scheduleQueueMetrics(); - this.logger.debug('[ScalingService] Queue setup completed'); + this.logger.debug('Queue setup completed'); } setupWorker(concurrency: number) { @@ -87,34 +92,47 @@ export class ScalingService { void this.queue.process(JOB_TYPE_NAME, concurrency, async (job: Job) => { try { await this.jobProcessor.processJob(job); - } catch (error: unknown) { - // Errors thrown here will be sent to the main instance by bull. Logging - // them out and rethrowing them allows to find out which worker had the - // issue. - this.logger.error('[ScalingService] Executing a job errored', { - jobId: job.id, - executionId: job.data.executionId, - error, - }); - ErrorReporterProxy.error(error); - throw error; + } catch (error) { + await this.reportJobProcessingError(ensureError(error), job); } }); - this.logger.debug('[ScalingService] Worker setup completed'); + this.logger.debug('Worker setup completed'); + } + + private async reportJobProcessingError(error: Error, job: Job) { + const { executionId } = job.data; + + this.logger.error(`Worker errored while running execution ${executionId} (job ${job.id})`, { + error, + executionId, + jobId: job.id, + }); + + const msg: JobFailedMessage = { + kind: 'job-failed', + executionId, + workerId: this.instanceSettings.hostId, + errorMsg: error.message, + errorStack: error.stack ?? '', + }; + + await job.progress(msg); + + ErrorReporterProxy.error(error, { executionId }); + + throw error; } @OnShutdown(HIGHEST_SHUTDOWN_PRIORITY) async stop() { - await this.queue.pause(true, true); + await this.queue.pause(true, true); // no more jobs will be picked up - this.logger.debug('[ScalingService] Queue paused'); + this.logger.debug('Queue paused'); this.stopQueueRecovery(); this.stopQueueMetrics(); - this.logger.debug('[ScalingService] Queue recovery and metrics stopped'); - let count = 0; while (this.getRunningJobsCount() !== 0) { @@ -159,7 +177,10 @@ export class ScalingService { const job = await this.queue.add(JOB_TYPE_NAME, jobData, jobOptions); - this.logger.info(`[ScalingService] Added job ${job.id} (execution ${jobData.executionId})`); + const { executionId } = jobData; + const jobId = job.id; + + this.logger.info(`Enqueued execution ${executionId} (job ${jobId})`, { executionId, jobId }); return job; } @@ -180,16 +201,16 @@ export class ScalingService { try { if (await job.isActive()) { await job.progress({ kind: 'abort-job' }); // being processed by worker - this.logger.debug('[ScalingService] Stopped active job', props); + this.logger.debug('Stopped active job', props); return true; } await job.remove(); // not yet picked up, or waiting for next pickup (stalled) - this.logger.debug('[ScalingService] Stopped inactive job', props); + this.logger.debug('Stopped inactive job', props); return true; } catch (error: unknown) { await job.progress({ kind: 'abort-job' }); - this.logger.error('[ScalingService] Failed to stop job', { ...props, error }); + this.logger.error('Failed to stop job', { ...props, error }); return false; } } @@ -216,7 +237,7 @@ export class ScalingService { */ private registerWorkerListeners() { this.queue.on('global:progress', (jobId: JobId, msg: unknown) => { - if (!this.isPubSubMessage(msg)) return; + if (!this.isJobMessage(msg)) return; if (msg.kind === 'abort-job') this.jobProcessor.stopJob(jobId); }); @@ -233,12 +254,12 @@ export class ScalingService { * Even if Redis recovers, worker will remain unable to process jobs. */ if (error.message.includes('Error initializing Lua scripts')) { - this.logger.error('[ScalingService] Fatal error initializing worker', { error }); - this.logger.error('[ScalingService] Exiting process...'); + this.logger.error('Fatal error initializing worker', { error }); + this.logger.error('Exiting process...'); process.exit(1); } - this.logger.error('[ScalingService] Queue errored', { error }); + this.logger.error('Queue errored', { error }); throw error; }); @@ -251,17 +272,47 @@ export class ScalingService { this.queue.on('error', (error: Error) => { if ('code' in error && error.code === 'ECONNREFUSED') return; // handled by RedisClientService.retryStrategy - this.logger.error('[ScalingService] Queue errored', { error }); + this.logger.error('Queue errored', { error }); throw error; }); - this.queue.on('global:progress', (_jobId: JobId, msg: unknown) => { - if (!this.isPubSubMessage(msg)) return; + this.queue.on('global:progress', (jobId: JobId, msg: unknown) => { + if (!this.isJobMessage(msg)) return; - if (msg.kind === 'respond-to-webhook') { - const decodedResponse = this.decodeWebhookResponse(msg.response); - this.activeExecutions.resolveResponsePromise(msg.executionId, decodedResponse); + // completion and failure are reported via `global:progress` to convey more details + // than natively provided by Bull in `global:completed` and `global:failed` events + + switch (msg.kind) { + case 'respond-to-webhook': + const decodedResponse = this.decodeWebhookResponse(msg.response); + this.activeExecutions.resolveResponsePromise(msg.executionId, decodedResponse); + break; + case 'job-finished': + this.logger.info(`Execution ${msg.executionId} (job ${jobId}) finished successfully`, { + workerId: msg.workerId, + executionId: msg.executionId, + jobId, + }); + break; + case 'job-failed': + this.logger.error( + [ + `Execution ${msg.executionId} (job ${jobId}) failed`, + msg.errorStack ? `\n${msg.errorStack}\n` : '', + ].join(''), + { + workerId: msg.workerId, + errorMsg: msg.errorMsg, + executionId: msg.executionId, + jobId, + }, + ); + break; + case 'abort-job': + break; // only for worker + default: + assertNever(msg); } }); @@ -271,7 +322,8 @@ export class ScalingService { } } - private isPubSubMessage(candidate: unknown): candidate is JobReport { + /** Whether the argument is a message sent via Bull's internal pubsub setup. */ + private isJobMessage(candidate: unknown): candidate is JobMessage { return typeof candidate === 'object' && candidate !== null && 'kind' in candidate; } @@ -343,6 +395,8 @@ export class ScalingService { if (this.queueMetricsInterval) { clearInterval(this.queueMetricsInterval); this.queueMetricsInterval = undefined; + + this.logger.debug('Queue metrics collection stopped'); } } @@ -361,10 +415,10 @@ export class ScalingService { const nextWaitMs = await this.recoverFromQueue(); this.scheduleQueueRecovery(nextWaitMs); } catch (error) { - this.logger.error('[ScalingService] Failed to recover dangling executions from queue', { + this.logger.error('Failed to recover dangling executions from queue', { msg: this.toErrorMsg(error), }); - this.logger.error('[ScalingService] Retrying...'); + this.logger.error('Retrying...'); this.scheduleQueueRecovery(); } @@ -372,11 +426,13 @@ export class ScalingService { const wait = [this.queueRecoveryContext.waitMs / Time.minutes.toMilliseconds, 'min'].join(' '); - this.logger.debug(`[ScalingService] Scheduled queue recovery check for next ${wait}`); + this.logger.debug(`Scheduled queue recovery check for next ${wait}`); } private stopQueueRecovery() { clearTimeout(this.queueRecoveryContext.timeout); + + this.logger.debug('Queue recovery stopped'); } /** @@ -389,7 +445,7 @@ export class ScalingService { const storedIds = await this.executionRepository.getInProgressExecutionIds(batchSize); if (storedIds.length === 0) { - this.logger.debug('[ScalingService] Completed queue recovery check, no dangling executions'); + this.logger.debug('Completed queue recovery check, no dangling executions'); return waitMs; } @@ -398,23 +454,22 @@ export class ScalingService { const queuedIds = new Set(runningJobs.map((job) => job.data.executionId)); if (queuedIds.size === 0) { - this.logger.debug('[ScalingService] Completed queue recovery check, no dangling executions'); + this.logger.debug('Completed queue recovery check, no dangling executions'); return waitMs; } const danglingIds = storedIds.filter((id) => !queuedIds.has(id)); if (danglingIds.length === 0) { - this.logger.debug('[ScalingService] Completed queue recovery check, no dangling executions'); + this.logger.debug('Completed queue recovery check, no dangling executions'); return waitMs; } await this.executionRepository.markAsCrashed(danglingIds); - this.logger.info( - '[ScalingService] Completed queue recovery check, recovered dangling executions', - { danglingIds }, - ); + this.logger.info('Completed queue recovery check, recovered dangling executions', { + danglingIds, + }); // if this cycle used up the whole batch size, it is possible for there to be // dangling executions outside this check, so speed up next cycle diff --git a/packages/cli/src/scaling/scaling.types.ts b/packages/cli/src/scaling/scaling.types.ts index fa8210450f..ae7e790a16 100644 --- a/packages/cli/src/scaling/scaling.types.ts +++ b/packages/cli/src/scaling/scaling.types.ts @@ -23,19 +23,44 @@ export type JobStatus = Bull.JobStatus; export type JobOptions = Bull.JobOptions; -export type JobReport = JobReportToMain | JobReportToWorker; +/** + * Message sent by main to worker and vice versa about a job. `JobMessage` is + * sent via Bull's internal pubsub setup - do not confuse with `PubSub.Command` + * and `PubSub.Response`, which are sent via n8n's own pubsub setup to keep + * main and worker processes in sync outside of a job's lifecycle. + */ +export type JobMessage = + | RespondToWebhookMessage + | JobFinishedMessage + | JobFailedMessage + | AbortJobMessage; -type JobReportToMain = RespondToWebhookMessage; - -type JobReportToWorker = AbortJobMessage; - -type RespondToWebhookMessage = { +/** Message sent by worker to main to respond to a webhook. */ +export type RespondToWebhookMessage = { kind: 'respond-to-webhook'; executionId: string; response: IExecuteResponsePromiseData; + workerId: string; }; -type AbortJobMessage = { +/** Message sent by worker to main to report a job has finished successfully. */ +export type JobFinishedMessage = { + kind: 'job-finished'; + executionId: string; + workerId: string; +}; + +/** Message sent by worker to main to report a job has failed. */ +export type JobFailedMessage = { + kind: 'job-failed'; + executionId: string; + workerId: string; + errorMsg: string; + errorStack: string; +}; + +/** Message sent by main to worker to abort a job. */ +export type AbortJobMessage = { kind: 'abort-job'; }; diff --git a/packages/cli/src/scaling/worker-server.ts b/packages/cli/src/scaling/worker-server.ts index 3cf6995882..0af948670f 100644 --- a/packages/cli/src/scaling/worker-server.ts +++ b/packages/cli/src/scaling/worker-server.ts @@ -58,6 +58,8 @@ export class WorkerServer { ) { assert(this.instanceSettings.instanceType === 'worker'); + this.logger = this.logger.withScope('scaling'); + this.app = express(); this.app.disable('x-powered-by'); @@ -84,6 +86,10 @@ export class WorkerServer { await this.mountEndpoints(); + this.logger.debug('Worker server initialized', { + endpoints: Object.keys(this.endpointsConfig), + }); + await new Promise((resolve) => this.server.listen(this.port, this.address, resolve)); await this.externalHooks.run('worker.ready'); @@ -141,6 +147,8 @@ export class WorkerServer { this.overwritesLoaded = true; + this.logger.debug('Worker loaded credentials overwrites'); + ResponseHelper.sendSuccessResponse(res, { success: true }, true, 200); } } diff --git a/packages/cli/src/scaling/worker-status.ts b/packages/cli/src/scaling/worker-status.service.ts similarity index 72% rename from packages/cli/src/scaling/worker-status.ts rename to packages/cli/src/scaling/worker-status.service.ts index cddccc7e1f..a50a1b8d2e 100644 --- a/packages/cli/src/scaling/worker-status.ts +++ b/packages/cli/src/scaling/worker-status.service.ts @@ -1,18 +1,22 @@ +import type { WorkerStatus } from '@n8n/api-types'; +import { InstanceSettings } from 'n8n-core'; import os from 'node:os'; import { Service } from 'typedi'; -import config from '@/config'; import { N8N_VERSION } from '@/constants'; import { JobProcessor } from './job-processor'; @Service() -export class WorkerStatus { - constructor(private readonly jobProcessor: JobProcessor) {} +export class WorkerStatusService { + constructor( + private readonly jobProcessor: JobProcessor, + private readonly instanceSettings: InstanceSettings, + ) {} - generateStatus() { + generateStatus(): WorkerStatus { return { - workerId: config.getEnv('redis.queueModeId'), + senderId: this.instanceSettings.hostId, runningJobsSummary: this.jobProcessor.getRunningJobsSummary(), freeMem: os.freemem(), totalMem: os.totalmem(), diff --git a/packages/cli/src/server.ts b/packages/cli/src/server.ts index b83e2bdb2a..3cfd93054b 100644 --- a/packages/cli/src/server.ts +++ b/packages/cli/src/server.ts @@ -39,7 +39,7 @@ import '@/controllers/annotation-tags.controller.ee'; import '@/controllers/auth.controller'; import '@/controllers/binary-data.controller'; import '@/controllers/curl.controller'; -import '@/controllers/ai-assistant.controller'; +import '@/controllers/ai.controller'; import '@/controllers/dynamic-node-parameters.controller'; import '@/controllers/invitation.controller'; import '@/controllers/me.controller'; @@ -79,8 +79,9 @@ export class Server extends AbstractServer { private readonly orchestrationService: OrchestrationService, private readonly postHogClient: PostHogClient, private readonly eventService: EventService, + private readonly instanceSettings: InstanceSettings, ) { - super('main'); + super(); this.testWebhooksEnabled = true; this.webhooksEnabled = !this.globalConfig.endpoints.disableProductionWebhooksOnMainProcess; @@ -97,7 +98,7 @@ export class Server extends AbstractServer { this.endpointPresetCredentials = this.globalConfig.credentials.overwrite.endpoint; await super.start(); - this.logger.debug(`Server ID: ${this.uniqueInstanceId}`); + this.logger.debug(`Server ID: ${this.instanceSettings.hostId}`); if (inDevelopment && process.env.N8N_DEV_RELOAD === 'true') { void this.loadNodesAndCredentials.setupHotReload(); @@ -252,6 +253,7 @@ export class Server extends AbstractServer { JSON.stringify({ dsn: this.globalConfig.sentry.frontendDsn, environment: process.env.ENVIRONMENT || 'development', + serverName: process.env.DEPLOYMENT_NAME, release: N8N_VERSION, }), ); diff --git a/packages/cli/src/services/__tests__/orchestration.service.test.ts b/packages/cli/src/services/__tests__/orchestration.service.test.ts index 7bbd797000..0169462891 100644 --- a/packages/cli/src/services/__tests__/orchestration.service.test.ts +++ b/packages/cli/src/services/__tests__/orchestration.service.test.ts @@ -6,20 +6,12 @@ import Container from 'typedi'; import { ActiveWorkflowManager } from '@/active-workflow-manager'; import config from '@/config'; -import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { Push } from '@/push'; -import type { PubSub } from '@/scaling/pubsub/pubsub.types'; -import * as helpers from '@/services/orchestration/helpers'; -import { handleCommandMessageMain } from '@/services/orchestration/main/handle-command-message-main'; -import { handleWorkerResponseMessageMain } from '@/services/orchestration/main/handle-worker-response-message-main'; -import { OrchestrationHandlerMainService } from '@/services/orchestration/main/orchestration.handler.main.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { RedisClientService } from '@/services/redis-client.service'; import { mockInstance } from '@test/mocking'; -import type { MainResponseReceivedHandlerOptions } from '../orchestration/main/types'; - config.set('executions.mode', 'queue'); config.set('generic.instanceType', 'main'); @@ -29,27 +21,13 @@ const mockRedisClient = mock(); redisClientService.createClient.mockReturnValue(mockRedisClient); const os = Container.get(OrchestrationService); -const handler = Container.get(OrchestrationHandlerMainService); mockInstance(ActiveWorkflowManager); -let queueModeId: string; - -const workerRestartEventBusResponse: PubSub.WorkerResponse = { - workerId: 'test', - command: 'restart-event-bus', - payload: { - result: 'success', - }, -}; - describe('Orchestration Service', () => { mockInstance(Push); mockInstance(ExternalSecretsManager); - const eventBus = mockInstance(MessageEventBus); beforeAll(async () => { - queueModeId = config.get('redis.queueModeId'); - // @ts-expect-error readonly property instanceSettings.instanceType = 'main'; }); @@ -64,71 +42,8 @@ describe('Orchestration Service', () => { test('should initialize', async () => { await os.init(); - await handler.init(); // @ts-expect-error Private field expect(os.publisher).toBeDefined(); - // @ts-expect-error Private field - expect(handler.subscriber).toBeDefined(); - expect(queueModeId).toBeDefined(); - }); - - test('should handle worker responses', async () => { - const response = await handleWorkerResponseMessageMain( - JSON.stringify(workerRestartEventBusResponse), - mock(), - ); - expect(response?.command).toEqual('restart-event-bus'); - }); - - test('should handle command messages from others', async () => { - const responseFalseId = await handleCommandMessageMain( - JSON.stringify({ - senderId: 'test', - command: 'reload-license', - }), - ); - expect(responseFalseId).toBeDefined(); - expect(responseFalseId!.command).toEqual('reload-license'); - expect(responseFalseId!.senderId).toEqual('test'); - }); - - test('should reject command messages from itself', async () => { - const response = await handleCommandMessageMain( - JSON.stringify({ ...workerRestartEventBusResponse, senderId: queueModeId }), - ); - expect(response).toBeDefined(); - expect(response!.command).toEqual('restart-event-bus'); - expect(response!.senderId).toEqual(queueModeId); - expect(eventBus.restart).not.toHaveBeenCalled(); - }); - - test('should send command messages', async () => { - // @ts-expect-error Private field - jest.spyOn(os.publisher, 'publishCommand').mockImplementation(async () => {}); - await os.getWorkerIds(); - // @ts-expect-error Private field - expect(os.publisher.publishCommand).toHaveBeenCalled(); - // @ts-expect-error Private field - jest.spyOn(os.publisher, 'publishCommand').mockRestore(); - }); - - test('should prevent receiving commands too often', async () => { - jest.spyOn(helpers, 'debounceMessageReceiver'); - const res1 = await handleCommandMessageMain( - JSON.stringify({ - senderId: 'test', - command: 'reload-external-secrets-providers', - }), - ); - const res2 = await handleCommandMessageMain( - JSON.stringify({ - senderId: 'test', - command: 'reload-external-secrets-providers', - }), - ); - expect(helpers.debounceMessageReceiver).toHaveBeenCalledTimes(2); - expect(res1!.payload).toBeUndefined(); - expect(res2!.payload).toEqual({ result: 'debounced' }); }); describe('shouldAddWebhooks', () => { diff --git a/packages/cli/src/services/ai-assistant.service.ts b/packages/cli/src/services/ai.service.ts similarity index 85% rename from packages/cli/src/services/ai-assistant.service.ts rename to packages/cli/src/services/ai.service.ts index 76b6e3fffb..a7b07219b5 100644 --- a/packages/cli/src/services/ai-assistant.service.ts +++ b/packages/cli/src/services/ai.service.ts @@ -3,7 +3,6 @@ import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk'; import { AiAssistantClient } from '@n8n_io/ai-assistant-sdk'; import { assert, type IUser } from 'n8n-workflow'; import { Service } from 'typedi'; -import type { Response } from 'undici'; import config from '@/config'; import type { AiAssistantRequest } from '@/requests'; @@ -12,7 +11,7 @@ import { N8N_VERSION } from '../constants'; import { License } from '../license'; @Service() -export class AiAssistantService { +export class AiService { private client: AiAssistantClient | undefined; constructor( @@ -40,7 +39,7 @@ export class AiAssistantService { }); } - async chat(payload: AiAssistantSDK.ChatRequestPayload, user: IUser): Promise { + async chat(payload: AiAssistantSDK.ChatRequestPayload, user: IUser) { if (!this.client) { await this.init(); } @@ -57,4 +56,13 @@ export class AiAssistantService { return await this.client.applySuggestion(payload, { id: user.id }); } + + async askAi(payload: AiAssistantSDK.AskAiRequestPayload, user: IUser) { + if (!this.client) { + await this.init(); + } + assert(this.client, 'Assistant client not setup'); + + return await this.client.askAi(payload, { id: user.id }); + } } diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index ea8a135ff2..a83158e96e 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -212,8 +212,8 @@ export class FrontendService { banners: { dismissed: [], }, - ai: { - enabled: config.getEnv('ai.enabled'), + askAi: { + enabled: false, }, workflowHistory: { pruneTime: -1, @@ -274,6 +274,7 @@ export class FrontendService { const isS3Available = config.getEnv('binaryDataManager.availableModes').includes('s3'); const isS3Licensed = this.license.isBinaryDataS3Licensed(); const isAiAssistantEnabled = this.license.isAiAssistantEnabled(); + const isAskAiEnabled = this.license.isAskAiEnabled(); this.settings.license.planName = this.license.getPlanName(); this.settings.license.consumerId = this.license.getConsumerId(); @@ -330,6 +331,10 @@ export class FrontendService { this.settings.aiAssistant.enabled = isAiAssistantEnabled; } + if (isAskAiEnabled) { + this.settings.askAi.enabled = isAskAiEnabled; + } + this.settings.mfa.enabled = config.get('mfa.enabled'); this.settings.executionMode = config.getEnv('executions.mode'); diff --git a/packages/cli/src/services/import.service.ts b/packages/cli/src/services/import.service.ts index a486ff8396..2402863bab 100644 --- a/packages/cli/src/services/import.service.ts +++ b/packages/cli/src/services/import.service.ts @@ -88,8 +88,7 @@ export class ImportService { try { await replaceInvalidCredentials(workflow); } catch (e) { - const error = e instanceof Error ? e : new Error(`${e}`); - this.logger.error('Failed to replace invalid credential', error); + this.logger.error('Failed to replace invalid credential', { error: e }); } } diff --git a/packages/cli/src/services/orchestration.handler.base.service.ts b/packages/cli/src/services/orchestration.handler.base.service.ts deleted file mode 100644 index e994ff6308..0000000000 --- a/packages/cli/src/services/orchestration.handler.base.service.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type { MainResponseReceivedHandlerOptions } from './orchestration/main/types'; -import type { WorkerCommandReceivedHandlerOptions } from './orchestration/worker/types'; - -export abstract class OrchestrationHandlerService { - protected initialized = false; - - async init() { - await this.initSubscriber(); - this.initialized = true; - } - - async initWithOptions( - options: WorkerCommandReceivedHandlerOptions | MainResponseReceivedHandlerOptions, - ) { - await this.initSubscriber(options); - this.initialized = true; - } - - async shutdown() { - this.initialized = false; - } - - protected abstract initSubscriber( - options?: WorkerCommandReceivedHandlerOptions | MainResponseReceivedHandlerOptions, - ): Promise; -} diff --git a/packages/cli/src/services/orchestration.service.ts b/packages/cli/src/services/orchestration.service.ts index 666fe48ac6..64dbd0ddae 100644 --- a/packages/cli/src/services/orchestration.service.ts +++ b/packages/cli/src/services/orchestration.service.ts @@ -43,20 +43,6 @@ export class OrchestrationService { return !this.isMultiMainSetupEnabled; } - get instanceId() { - return config.getEnv('redis.queueModeId'); - } - - /** @deprecated use InstanceSettings.isLeader */ - get isLeader() { - return this.instanceSettings.isLeader; - } - - /** @deprecated use InstanceSettings.isFollower */ - get isFollower() { - return this.instanceSettings.isFollower; - } - sanityCheck() { return this.isInitialized && config.get('executions.mode') === 'queue'; } @@ -104,7 +90,7 @@ export class OrchestrationService { if (!this.sanityCheck()) return; this.logger.debug( - `[Instance ID ${this.instanceId}] Publishing command "${commandKey}"`, + `[Instance ID ${this.instanceSettings.hostId}] Publishing command "${commandKey}"`, payload, ); @@ -128,16 +114,6 @@ export class OrchestrationService { }); } - async getWorkerIds() { - if (!this.sanityCheck()) return; - - const command = 'get-worker-id'; - - this.logger.debug(`Sending "${command}" to command channel`); - - await this.publisher.publishCommand({ command }); - } - // ---------------------------------- // activations // ---------------------------------- @@ -154,7 +130,7 @@ export class OrchestrationService { if (activationMode === 'leadershipChange') return false; - return this.isLeader; // 'update' or 'activate' + return this.instanceSettings.isLeader; // 'update' or 'activate' } /** @@ -164,6 +140,6 @@ export class OrchestrationService { * triggers and pollers in memory, to ensure they are not duplicated. */ shouldAddTriggersAndPollers() { - return this.isLeader; + return this.instanceSettings.isLeader; } } diff --git a/packages/cli/src/services/orchestration/helpers.ts b/packages/cli/src/services/orchestration/helpers.ts deleted file mode 100644 index f36bb4adf9..0000000000 --- a/packages/cli/src/services/orchestration/helpers.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { jsonParse } from 'n8n-workflow'; -import os from 'node:os'; -import { Container } from 'typedi'; - -import { Logger } from '@/logging/logger.service'; -import { COMMAND_PUBSUB_CHANNEL } from '@/scaling/constants'; -import type { PubSub } from '@/scaling/pubsub/pubsub.types'; - -export interface RedisServiceCommandLastReceived { - [date: string]: Date; -} - -export function messageToRedisServiceCommandObject(messageString: string) { - if (!messageString) return; - let message: PubSub.Command; - try { - message = jsonParse(messageString); - } catch { - Container.get(Logger).debug( - `Received invalid message via channel ${COMMAND_PUBSUB_CHANNEL}: "${messageString}"`, - ); - return; - } - return message; -} - -const lastReceived: RedisServiceCommandLastReceived = {}; - -export function debounceMessageReceiver(message: PubSub.Command, timeout: number = 100) { - const now = new Date(); - const lastReceivedDate = lastReceived[message.command]; - if (lastReceivedDate && now.getTime() - lastReceivedDate.getTime() < timeout) { - return false; - } - lastReceived[message.command] = now; - return true; -} - -export function getOsCpuString(): string { - const cpus = os.cpus(); - if (cpus.length === 0) return 'no CPU info'; - return `${cpus.length}x ${cpus[0].model} - speed: ${cpus[0].speed}`; -} diff --git a/packages/cli/src/services/orchestration/main/handle-command-message-main.ts b/packages/cli/src/services/orchestration/main/handle-command-message-main.ts deleted file mode 100644 index 7af2fa6f56..0000000000 --- a/packages/cli/src/services/orchestration/main/handle-command-message-main.ts +++ /dev/null @@ -1,234 +0,0 @@ -import { InstanceSettings } from 'n8n-core'; -import { Container } from 'typedi'; - -import { ActiveWorkflowManager } from '@/active-workflow-manager'; -import config from '@/config'; -import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; -import { License } from '@/license'; -import { Logger } from '@/logging/logger.service'; -import { Push } from '@/push'; -import { CommunityPackagesService } from '@/services/community-packages.service'; -import { OrchestrationService } from '@/services/orchestration.service'; -import { TestWebhooks } from '@/webhooks/test-webhooks'; - -import { debounceMessageReceiver, messageToRedisServiceCommandObject } from '../helpers'; - -// eslint-disable-next-line complexity -export async function handleCommandMessageMain(messageString: string) { - const queueModeId = config.getEnv('redis.queueModeId'); - const isMainInstance = Container.get(InstanceSettings).instanceType === 'main'; - const message = messageToRedisServiceCommandObject(messageString); - const logger = Container.get(Logger); - - if (message) { - logger.debug( - `RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`, - ); - - if ( - !message.selfSend && - (message.senderId === queueModeId || - (message.targets && !message.targets.includes(queueModeId))) - ) { - logger.debug( - `Skipping command message ${message.command} because it's not for this instance.`, - ); - return message; - } - - const push = Container.get(Push); - - switch (message.command) { - case 'reload-license': - if (!debounceMessageReceiver(message, 500)) { - return { ...message, payload: { result: 'debounced' } }; - } - - if (isMainInstance && !config.getEnv('multiMainSetup.enabled')) { - return message; // this main is the sender, so disregard - } - await Container.get(License).reload(); - break; - case 'restart-event-bus': - if (!debounceMessageReceiver(message, 200)) { - return { ...message, payload: { result: 'debounced' } }; - } - await Container.get(MessageEventBus).restart(); - case 'reload-external-secrets-providers': - if (!debounceMessageReceiver(message, 200)) { - return { ...message, payload: { result: 'debounced' } }; - } - await Container.get(ExternalSecretsManager).reloadAllProviders(); - break; - case 'community-package-install': - case 'community-package-update': - case 'community-package-uninstall': - if (!debounceMessageReceiver(message, 200)) { - return message; - } - const { packageName } = message.payload; - const communityPackagesService = Container.get(CommunityPackagesService); - if (message.command === 'community-package-uninstall') { - await communityPackagesService.removeNpmPackage(packageName); - } else { - await communityPackagesService.installOrUpdateNpmPackage( - packageName, - message.payload.packageVersion, - ); - } - break; - - case 'add-webhooks-triggers-and-pollers': { - if (!debounceMessageReceiver(message, 100)) { - return { ...message, payload: { result: 'debounced' } }; - } - - const orchestrationService = Container.get(OrchestrationService); - - if (orchestrationService.isFollower) break; - - if (typeof message.payload?.workflowId !== 'string') break; - - const { workflowId } = message.payload; - - try { - await Container.get(ActiveWorkflowManager).add(workflowId, 'activate', undefined, { - shouldPublish: false, // prevent leader re-publishing message - }); - - push.broadcast('workflowActivated', { workflowId }); - - // instruct followers to show activation in UI - await orchestrationService.publish('display-workflow-activation', { workflowId }); - } catch (error) { - if (error instanceof Error) { - await Container.get(WorkflowRepository).update(workflowId, { active: false }); - - Container.get(Push).broadcast('workflowFailedToActivate', { - workflowId, - errorMessage: error.message, - }); - - await Container.get(OrchestrationService).publish('display-workflow-activation-error', { - workflowId, - errorMessage: error.message, - }); - } - } - - break; - } - - case 'remove-triggers-and-pollers': { - if (!debounceMessageReceiver(message, 100)) { - return { ...message, payload: { result: 'debounced' } }; - } - - const orchestrationService = Container.get(OrchestrationService); - - if (orchestrationService.isFollower) break; - - if (typeof message.payload?.workflowId !== 'string') break; - - const { workflowId } = message.payload; - - const activeWorkflowManager = Container.get(ActiveWorkflowManager); - - await activeWorkflowManager.removeActivationError(workflowId); - await activeWorkflowManager.removeWorkflowTriggersAndPollers(workflowId); - - push.broadcast('workflowDeactivated', { workflowId }); - - // instruct followers to show workflow deactivation in UI - await orchestrationService.publish('display-workflow-deactivation', { workflowId }); - - break; - } - - case 'display-workflow-activation': { - if (!debounceMessageReceiver(message, 100)) { - return { ...message, payload: { result: 'debounced' } }; - } - - const { workflowId } = message.payload ?? {}; - - if (typeof workflowId !== 'string') break; - - push.broadcast('workflowActivated', { workflowId }); - - break; - } - - case 'display-workflow-deactivation': { - if (!debounceMessageReceiver(message, 100)) { - return { ...message, payload: { result: 'debounced' } }; - } - - const { workflowId } = message.payload ?? {}; - - if (typeof workflowId !== 'string') break; - - push.broadcast('workflowDeactivated', { workflowId }); - - break; - } - - case 'display-workflow-activation-error': { - if (!debounceMessageReceiver(message, 100)) { - return { ...message, payload: { result: 'debounced' } }; - } - - const { workflowId, errorMessage } = message.payload ?? {}; - - if (typeof workflowId !== 'string' || typeof errorMessage !== 'string') break; - - Container.get(Push).broadcast('workflowFailedToActivate', { workflowId, errorMessage }); - - break; - } - - case 'relay-execution-lifecycle-event': { - /** - * Do not debounce this - all events share the same message name. - */ - - const { type, args, pushRef } = message.payload; - - if (!push.getBackend().hasPushRef(pushRef)) break; - - push.send(type, args, pushRef); - - break; - } - - case 'clear-test-webhooks': { - if (!debounceMessageReceiver(message, 100)) { - // @ts-expect-error Legacy typing - message.payload = { result: 'debounced' }; - return message; - } - - const { webhookKey, workflowEntity, pushRef } = message.payload; - - if (!push.getBackend().hasPushRef(pushRef)) break; - - const testWebhooks = Container.get(TestWebhooks); - - testWebhooks.clearTimeout(webhookKey); - - const workflow = testWebhooks.toWorkflow(workflowEntity); - - await testWebhooks.deactivateWebhooks(workflow); - - break; - } - - default: - break; - } - return message; - } - return; -} diff --git a/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts b/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts deleted file mode 100644 index a3b5912fb4..0000000000 --- a/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { jsonParse } from 'n8n-workflow'; -import Container from 'typedi'; - -import { Logger } from '@/logging/logger.service'; -import { WORKER_RESPONSE_PUBSUB_CHANNEL } from '@/scaling/constants'; -import type { PubSub } from '@/scaling/pubsub/pubsub.types'; - -import type { MainResponseReceivedHandlerOptions } from './types'; -import { Push } from '../../../push'; - -export async function handleWorkerResponseMessageMain( - messageString: string, - options: MainResponseReceivedHandlerOptions, -) { - const workerResponse = jsonParse(messageString, { - fallbackValue: null, - }); - - if (!workerResponse) { - Container.get(Logger).debug( - `Received invalid message via channel ${WORKER_RESPONSE_PUBSUB_CHANNEL}: "${messageString}"`, - ); - return; - } - - if (workerResponse.targets && !workerResponse.targets.includes(options.queueModeId)) return; - - switch (workerResponse.command) { - case 'get-worker-status': - Container.get(Push).broadcast('sendWorkerStatusMessage', { - workerId: workerResponse.workerId, - status: workerResponse.payload, - }); - break; - case 'get-worker-id': - break; - default: - Container.get(Logger).debug( - `Received worker response ${workerResponse.command} from ${workerResponse.workerId}`, - ); - } - - return workerResponse; -} diff --git a/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts b/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts index bb1b52519c..034a214765 100644 --- a/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts +++ b/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts @@ -1,5 +1,4 @@ import { InstanceSettings } from 'n8n-core'; -import { ErrorReporterProxy as EventReporter } from 'n8n-workflow'; import { Service } from 'typedi'; import config from '@/config'; @@ -25,10 +24,6 @@ export class MultiMainSetup extends TypedEmitter { super(); } - get instanceId() { - return config.getEnv('redis.queueModeId'); - } - private leaderKey: string; private readonly leaderKeyTtl = config.getEnv('multiMainSetup.ttl'); @@ -58,23 +53,25 @@ export class MultiMainSetup extends TypedEmitter { private async checkLeader() { const leaderId = await this.publisher.get(this.leaderKey); - if (leaderId === this.instanceId) { - this.logger.debug(`[Instance ID ${this.instanceId}] Leader is this instance`); + const { hostId } = this.instanceSettings; + + if (leaderId === hostId) { + this.logger.debug(`[Instance ID ${hostId}] Leader is this instance`); await this.publisher.setExpiration(this.leaderKey, this.leaderKeyTtl); return; } - if (leaderId && leaderId !== this.instanceId) { - this.logger.debug(`[Instance ID ${this.instanceId}] Leader is other instance "${leaderId}"`); + if (leaderId && leaderId !== hostId) { + this.logger.debug(`[Instance ID ${hostId}] Leader is other instance "${leaderId}"`); if (this.instanceSettings.isLeader) { this.instanceSettings.markAsFollower(); this.emit('leader-stepdown'); // lost leadership - stop triggers, pollers, pruning, wait-tracking, queue recovery - EventReporter.info('[Multi-main setup] Leader failed to renew leader key'); + this.logger.warn('[Multi-main setup] Leader failed to renew leader key'); } return; @@ -82,7 +79,7 @@ export class MultiMainSetup extends TypedEmitter { if (!leaderId) { this.logger.debug( - `[Instance ID ${this.instanceId}] Leadership vacant, attempting to become leader...`, + `[Instance ID ${hostId}] Leadership vacant, attempting to become leader...`, ); this.instanceSettings.markAsFollower(); @@ -97,11 +94,13 @@ export class MultiMainSetup extends TypedEmitter { } private async tryBecomeLeader() { + const { hostId } = this.instanceSettings; + // this can only succeed if leadership is currently vacant - const keySetSuccessfully = await this.publisher.setIfNotExists(this.leaderKey, this.instanceId); + const keySetSuccessfully = await this.publisher.setIfNotExists(this.leaderKey, hostId); if (keySetSuccessfully) { - this.logger.debug(`[Instance ID ${this.instanceId}] Leader is now this instance`); + this.logger.debug(`[Instance ID ${hostId}] Leader is now this instance`); this.instanceSettings.markAsLeader(); diff --git a/packages/cli/src/services/orchestration/main/orchestration.handler.main.service.ts b/packages/cli/src/services/orchestration/main/orchestration.handler.main.service.ts deleted file mode 100644 index 7f4effdd4a..0000000000 --- a/packages/cli/src/services/orchestration/main/orchestration.handler.main.service.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Service } from 'typedi'; - -import { Subscriber } from '@/scaling/pubsub/subscriber.service'; - -import { handleCommandMessageMain } from './handle-command-message-main'; -import { handleWorkerResponseMessageMain } from './handle-worker-response-message-main'; -import type { MainResponseReceivedHandlerOptions } from './types'; -import { OrchestrationHandlerService } from '../../orchestration.handler.base.service'; - -@Service() -export class OrchestrationHandlerMainService extends OrchestrationHandlerService { - constructor(private readonly subscriber: Subscriber) { - super(); - } - - async initSubscriber(options: MainResponseReceivedHandlerOptions) { - await this.subscriber.subscribe('n8n.commands'); - await this.subscriber.subscribe('n8n.worker-response'); - - this.subscriber.setMessageHandler('n8n.worker-response', async (message: string) => { - await handleWorkerResponseMessageMain(message, options); - }); - - this.subscriber.setMessageHandler('n8n.commands', handleCommandMessageMain); - } -} diff --git a/packages/cli/src/services/orchestration/main/types.ts b/packages/cli/src/services/orchestration/main/types.ts deleted file mode 100644 index 7388a55032..0000000000 --- a/packages/cli/src/services/orchestration/main/types.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { Publisher } from '@/scaling/pubsub/publisher.service'; - -export type MainResponseReceivedHandlerOptions = { - queueModeId: string; - publisher: Publisher; -}; diff --git a/packages/cli/src/services/orchestration/webhook/orchestration.webhook.service.ts b/packages/cli/src/services/orchestration/webhook/orchestration.webhook.service.ts deleted file mode 100644 index 6b1c86fc6a..0000000000 --- a/packages/cli/src/services/orchestration/webhook/orchestration.webhook.service.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Service } from 'typedi'; - -import config from '@/config'; - -import { OrchestrationService } from '../../orchestration.service'; - -@Service() -export class OrchestrationWebhookService extends OrchestrationService { - sanityCheck(): boolean { - return ( - this.isInitialized && - config.get('executions.mode') === 'queue' && - this.instanceSettings.instanceType === 'webhook' - ); - } -} diff --git a/packages/cli/src/services/orchestration/worker/orchestration.worker.service.ts b/packages/cli/src/services/orchestration/worker/orchestration.worker.service.ts deleted file mode 100644 index 1d0d822aeb..0000000000 --- a/packages/cli/src/services/orchestration/worker/orchestration.worker.service.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Service } from 'typedi'; - -import config from '@/config'; - -import { OrchestrationService } from '../../orchestration.service'; - -@Service() -export class OrchestrationWorkerService extends OrchestrationService { - sanityCheck(): boolean { - return ( - this.isInitialized && - config.get('executions.mode') === 'queue' && - this.instanceSettings.instanceType === 'worker' - ); - } -} diff --git a/packages/cli/src/services/orchestration/worker/types.ts b/packages/cli/src/services/orchestration/worker/types.ts deleted file mode 100644 index d821a194b2..0000000000 --- a/packages/cli/src/services/orchestration/worker/types.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { RunningJobSummary } from '@n8n/api-types'; - -import type { Publisher } from '@/scaling/pubsub/publisher.service'; - -export interface WorkerCommandReceivedHandlerOptions { - queueModeId: string; - publisher: Publisher; - getRunningJobIds: () => Array; - getRunningJobsSummary: () => RunningJobSummary[]; -} diff --git a/packages/cli/src/services/pruning.service.ts b/packages/cli/src/services/pruning.service.ts index 48d4b0db3b..e9ceab5434 100644 --- a/packages/cli/src/services/pruning.service.ts +++ b/packages/cli/src/services/pruning.service.ts @@ -37,7 +37,8 @@ export class PruningService { * @important Requires `OrchestrationService` to be initialized. */ init() { - const { isLeader, isMultiMainSetupEnabled } = this.orchestrationService; + const { isLeader } = this.instanceSettings; + const { isMultiMainSetupEnabled } = this.orchestrationService; if (isLeader) this.startPruning(); diff --git a/packages/cli/src/sse-channel.d.ts b/packages/cli/src/sse-channel.d.ts deleted file mode 100644 index 6ea435b361..0000000000 --- a/packages/cli/src/sse-channel.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { PushRequest, PushResponse } from './push/types'; - -declare module 'sse-channel' { - declare class Channel { - constructor(); - - on(event: string, handler: (channel: string, res: PushResponse) => void): void; - - removeClient: (res: PushResponse) => void; - - addClient: (req: PushRequest, res: PushResponse) => void; - - send: (msg: string, clients?: PushResponse[]) => void; - } - - export = Channel; -} diff --git a/packages/cli/src/sso/saml/routes/__tests__/newFile.1.ts b/packages/cli/src/sso/saml/routes/__tests__/newFile.1.ts deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/packages/cli/src/sso/saml/routes/__tests__/newFile.ts b/packages/cli/src/sso/saml/routes/__tests__/newFile.ts deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/packages/cli/src/wait-tracker.ts b/packages/cli/src/wait-tracker.ts index 82b42c39df..e7e8b428ed 100644 --- a/packages/cli/src/wait-tracker.ts +++ b/packages/cli/src/wait-tracker.ts @@ -1,3 +1,4 @@ +import { InstanceSettings } from 'n8n-core'; import { ApplicationError, ErrorReporterProxy as ErrorReporter, @@ -28,7 +29,10 @@ export class WaitTracker { private readonly ownershipService: OwnershipService, private readonly workflowRunner: WorkflowRunner, private readonly orchestrationService: OrchestrationService, - ) {} + private readonly instanceSettings: InstanceSettings, + ) { + this.logger = this.logger.withScope('executions'); + } has(executionId: string) { return this.waitingExecutions[executionId] !== undefined; @@ -38,7 +42,8 @@ export class WaitTracker { * @important Requires `OrchestrationService` to be initialized. */ init() { - const { isLeader, isMultiMainSetupEnabled } = this.orchestrationService; + const { isLeader } = this.instanceSettings; + const { isMultiMainSetupEnabled } = this.orchestrationService; if (isLeader) this.startTracking(); @@ -50,7 +55,7 @@ export class WaitTracker { } private startTracking() { - this.logger.debug('Wait tracker started tracking waiting executions'); + this.logger.debug('Started tracking waiting executions'); // Poll every 60 seconds a list of upcoming executions this.mainTimer = setInterval(() => { @@ -61,7 +66,7 @@ export class WaitTracker { } async getWaitingExecutions() { - this.logger.debug('Wait tracker querying database for waiting executions'); + this.logger.debug('Querying database for waiting executions'); const executions = await this.executionRepository.getWaitingExecutions(); @@ -71,7 +76,7 @@ export class WaitTracker { const executionIds = executions.map((execution) => execution.id).join(', '); this.logger.debug( - `Wait tracker found ${executions.length} executions. Setting timer for IDs: ${executionIds}`, + `Found ${executions.length} executions. Setting timer for IDs: ${executionIds}`, ); // Add timers for each waiting execution that they get started at the correct time @@ -99,7 +104,7 @@ export class WaitTracker { } startExecution(executionId: string) { - this.logger.debug(`Wait tracker resuming execution ${executionId}`, { executionId }); + this.logger.debug(`Resuming execution ${executionId}`, { executionId }); delete this.waitingExecutions[executionId]; (async () => { @@ -141,7 +146,7 @@ export class WaitTracker { } stopTracking() { - this.logger.debug('Wait tracker shutting down'); + this.logger.debug('Shutting down wait tracking'); clearInterval(this.mainTimer); Object.keys(this.waitingExecutions).forEach((executionId) => { diff --git a/packages/cli/src/webhooks/webhook-server.ts b/packages/cli/src/webhooks/webhook-server.ts index d54f39f2cf..263375325b 100644 --- a/packages/cli/src/webhooks/webhook-server.ts +++ b/packages/cli/src/webhooks/webhook-server.ts @@ -3,8 +3,4 @@ import { Service } from 'typedi'; import { AbstractServer } from '@/abstract-server'; @Service() -export class WebhookServer extends AbstractServer { - constructor() { - super('webhook'); - } -} +export class WebhookServer extends AbstractServer {} diff --git a/packages/cli/src/workflow-execute-additional-data.ts b/packages/cli/src/workflow-execute-additional-data.ts index f357bbc018..2deae842fc 100644 --- a/packages/cli/src/workflow-execute-additional-data.ts +++ b/packages/cli/src/workflow-execute-additional-data.ts @@ -6,6 +6,13 @@ import type { PushType } from '@n8n/api-types'; import { GlobalConfig } from '@n8n/config'; import { WorkflowExecute } from 'n8n-core'; +import { + ApplicationError, + ErrorReporterProxy as ErrorReporter, + NodeOperationError, + Workflow, + WorkflowHooks, +} from 'n8n-workflow'; import type { IDataObject, IExecuteData, @@ -28,13 +35,7 @@ import type { ITaskDataConnections, ExecuteWorkflowOptions, IWorkflowExecutionDataProcess, -} from 'n8n-workflow'; -import { - ApplicationError, - ErrorReporterProxy as ErrorReporter, - NodeOperationError, - Workflow, - WorkflowHooks, + EnvProviderState, } from 'n8n-workflow'; import { Container } from 'typedi'; @@ -1008,6 +1009,7 @@ export async function getBase( connectionInputData: INodeExecutionData[], siblingParameters: INodeParameters, mode: WorkflowExecuteMode, + envProviderState: EnvProviderState, executeData?: IExecuteData, defaultReturnRunIndex?: number, selfData?: IDataObject, @@ -1028,6 +1030,7 @@ export async function getBase( connectionInputData, siblingParameters, mode, + envProviderState, executeData, defaultReturnRunIndex, selfData, diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index 8d1e147e85..0f1f37b71d 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -64,7 +64,7 @@ export class WorkflowRunner { executionId: string, hooks?: WorkflowHooks, ) { - ErrorReporter.error(error); + ErrorReporter.error(error, { executionId }); const isQueueMode = config.getEnv('executions.mode') === 'queue'; @@ -476,7 +476,6 @@ export class WorkflowRunner { clearWatchdogInterval(); } } catch (error) { - ErrorReporter.error(error); // We use "getWorkflowHooksWorkerExecuter" as "getWorkflowHooksWorkerMain" does not contain the // "workflowExecuteAfter" which we require. const hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerExecuter( diff --git a/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts b/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts index cb1b3952ad..a2a48587f0 100644 --- a/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts +++ b/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts @@ -3,13 +3,12 @@ import { mockClear } from 'jest-mock-extended'; import { User } from '@/databases/entities/user'; import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository'; -import { Logger } from '@/logging/logger.service'; import { WorkflowHistoryService } from '@/workflows/workflow-history/workflow-history.service.ee'; -import { mockInstance } from '@test/mocking'; +import { mockInstance, mockLogger } from '@test/mocking'; import { getWorkflow } from '@test-integration/workflow'; const workflowHistoryRepository = mockInstance(WorkflowHistoryRepository); -const logger = mockInstance(Logger); +const logger = mockLogger(); const sharedWorkflowRepository = mockInstance(SharedWorkflowRepository); const workflowHistoryService = new WorkflowHistoryService( logger, @@ -106,10 +105,6 @@ describe('WorkflowHistoryService', () => { // Assert expect(workflowHistoryRepository.insert).toHaveBeenCalled(); - expect(logger.error).toHaveBeenCalledWith( - 'Failed to save workflow history version for workflow 123', - expect.any(Error), - ); }); }); }); diff --git a/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts b/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts index eddb8bf7e6..3b171e3422 100644 --- a/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts +++ b/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts @@ -1,3 +1,4 @@ +import { ensureError } from 'n8n-workflow'; import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; @@ -79,10 +80,10 @@ export class WorkflowHistoryService { workflowId, }); } catch (e) { - this.logger.error( - `Failed to save workflow history version for workflow ${workflowId}`, - e as Error, - ); + const error = ensureError(e); + this.logger.error(`Failed to save workflow history version for workflow ${workflowId}`, { + error, + }); } } } diff --git a/packages/cli/test/integration/commands/worker.cmd.test.ts b/packages/cli/test/integration/commands/worker.cmd.test.ts index 585d64cfb4..ce3280aa48 100644 --- a/packages/cli/test/integration/commands/worker.cmd.test.ts +++ b/packages/cli/test/integration/commands/worker.cmd.test.ts @@ -1,6 +1,8 @@ process.argv[2] = 'worker'; +import { TaskRunnersConfig } from '@n8n/config'; import { BinaryDataService } from 'n8n-core'; +import Container from 'typedi'; import { Worker } from '@/commands/worker'; import config from '@/config'; @@ -10,16 +12,21 @@ import { ExternalHooks } from '@/external-hooks'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; +import { Push } from '@/push'; +import { TaskRunnerProcess } from '@/runners/task-runner-process'; +import { TaskRunnerServer } from '@/runners/task-runner-server'; import { Publisher } from '@/scaling/pubsub/publisher.service'; import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { ScalingService } from '@/scaling/scaling.service'; -import { OrchestrationWorkerService } from '@/services/orchestration/worker/orchestration.worker.service'; +import { OrchestrationService } from '@/services/orchestration.service'; +import { Telemetry } from '@/telemetry'; import { setupTestCommand } from '@test-integration/utils/test-command'; import { mockInstance } from '../../shared/mocking'; config.set('executions.mode', 'queue'); config.set('binaryDataManager.availableModes', 'filesystem'); +Container.get(TaskRunnersConfig).disabled = false; mockInstance(LoadNodesAndCredentials); const binaryDataService = mockInstance(BinaryDataService); const externalHooks = mockInstance(ExternalHooks); @@ -28,17 +35,21 @@ const license = mockInstance(License, { loadCertStr: async () => '' }); const messageEventBus = mockInstance(MessageEventBus); const logStreamingEventRelay = mockInstance(LogStreamingEventRelay); const scalingService = mockInstance(ScalingService); -const orchestrationWorkerService = mockInstance(OrchestrationWorkerService); +const orchestrationService = mockInstance(OrchestrationService); +const taskRunnerServer = mockInstance(TaskRunnerServer); +const taskRunnerProcess = mockInstance(TaskRunnerProcess); mockInstance(Publisher); mockInstance(Subscriber); +mockInstance(Telemetry); +mockInstance(Push); const command = setupTestCommand(Worker); test('worker initializes all its components', async () => { - const worker = await command.run(); - expect(worker.queueModeId).toBeDefined(); - expect(worker.queueModeId).toContain('worker'); - expect(worker.queueModeId.length).toBeGreaterThan(15); + config.set('executions.mode', 'regular'); // should be overridden + + await command.run(); + expect(license.init).toHaveBeenCalledTimes(1); expect(binaryDataService.init).toHaveBeenCalledTimes(1); expect(externalHooks.init).toHaveBeenCalledTimes(1); @@ -47,6 +58,10 @@ test('worker initializes all its components', async () => { expect(scalingService.setupQueue).toHaveBeenCalledTimes(1); expect(scalingService.setupWorker).toHaveBeenCalledTimes(1); expect(logStreamingEventRelay.init).toHaveBeenCalledTimes(1); - expect(orchestrationWorkerService.init).toHaveBeenCalledTimes(1); + expect(orchestrationService.init).toHaveBeenCalledTimes(1); expect(messageEventBus.send).toHaveBeenCalledTimes(1); + expect(taskRunnerServer.start).toHaveBeenCalledTimes(1); + expect(taskRunnerProcess.start).toHaveBeenCalledTimes(1); + + expect(config.getEnv('executions.mode')).toBe('queue'); }); diff --git a/packages/cli/test/integration/debug.controller.test.ts b/packages/cli/test/integration/debug.controller.test.ts index 723edea58a..47695e59aa 100644 --- a/packages/cli/test/integration/debug.controller.test.ts +++ b/packages/cli/test/integration/debug.controller.test.ts @@ -1,9 +1,11 @@ +import { InstanceSettings } from 'n8n-core'; +import Container from 'typedi'; + import { ActiveWorkflowManager } from '@/active-workflow-manager'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { generateNanoId } from '@/databases/utils/generators'; import { MultiMainSetup } from '@/services/orchestration/main/multi-main-setup.ee'; -import { OrchestrationService } from '@/services/orchestration.service'; import { createOwner } from './shared/db/users'; import { randomName } from './shared/random'; @@ -14,6 +16,8 @@ import { mockInstance } from '../shared/mocking'; describe('DebugController', () => { const workflowRepository = mockInstance(WorkflowRepository); const activeWorkflowManager = mockInstance(ActiveWorkflowManager); + const instanceSettings = Container.get(InstanceSettings); + instanceSettings.markAsLeader(); let testServer = setupTestServer({ endpointGroups: ['debug'] }); let ownerAgent: SuperAgentTest; @@ -30,7 +34,7 @@ describe('DebugController', () => { const webhooks = [{ id: workflowId, name: randomName() }] as WorkflowEntity[]; const triggersAndPollers = [{ id: workflowId, name: randomName() }] as WorkflowEntity[]; const activationErrors = { [workflowId]: 'Failed to activate' }; - const instanceId = 'main-71JdWtq306epIFki'; + const { instanceId } = instanceSettings; const leaderKey = 'some-leader-key'; workflowRepository.findIn.mockResolvedValue(triggersAndPollers); @@ -38,9 +42,7 @@ describe('DebugController', () => { activeWorkflowManager.allActiveInMemory.mockReturnValue([workflowId]); activeWorkflowManager.getAllWorkflowActivationErrors.mockResolvedValue(activationErrors); - jest.spyOn(OrchestrationService.prototype, 'instanceId', 'get').mockReturnValue(instanceId); jest.spyOn(MultiMainSetup.prototype, 'fetchLeaderKey').mockResolvedValue(leaderKey); - jest.spyOn(OrchestrationService.prototype, 'isLeader', 'get').mockReturnValue(true); const response = await ownerAgent.get('/debug/multi-main-setup').expect(200); diff --git a/packages/cli/test/integration/deduplication/deduplication-helper.test.ts b/packages/cli/test/integration/deduplication/deduplication-helper.test.ts new file mode 100644 index 0000000000..2859bb363c --- /dev/null +++ b/packages/cli/test/integration/deduplication/deduplication-helper.test.ts @@ -0,0 +1,532 @@ +import { DataDeduplicationService } from 'n8n-core'; +import type { ICheckProcessedContextData, INodeTypeData } from 'n8n-workflow'; +import type { IDeduplicationOutput, INode, DeduplicationItemTypes } from 'n8n-workflow'; +import { Workflow } from 'n8n-workflow'; + +import { getDataDeduplicationService } from '@/deduplication'; +import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; +import { NodeTypes } from '@/node-types'; +import { mockInstance } from '@test/mocking'; +import { createWorkflow } from '@test-integration/db/workflows'; + +import * as testDb from '../shared/test-db'; + +let workflow: Workflow; + +jest.mock('../../../src/telemetry'); + +const MOCK_NODE_TYPES_DATA = mockNodeTypesData(['set']); +mockInstance(LoadNodesAndCredentials, { + loaded: { + nodes: MOCK_NODE_TYPES_DATA, + credentials: {}, + }, +}); +function mockNodeTypesData( + nodeNames: string[], + options?: { + addTrigger?: boolean; + }, +) { + return nodeNames.reduce((acc, nodeName) => { + return ( + (acc[`n8n-nodes-base.${nodeName}`] = { + sourcePath: '', + type: { + description: { + displayName: nodeName, + name: nodeName, + group: [], + description: '', + version: 1, + defaults: {}, + inputs: [], + outputs: [], + properties: [], + }, + trigger: options?.addTrigger ? async () => undefined : undefined, + }, + }), + acc + ); + }, {}); +} +const node: INode = { + id: 'uuid-1234', + parameters: {}, + name: 'test', + type: 'test.set', + typeVersion: 1, + position: [0, 0], +}; + +beforeAll(async () => { + await testDb.init(); + + const nodeTypes = mockInstance(NodeTypes); + const workflowEntityOriginal = await createWorkflow(); + + workflow = new Workflow({ + id: workflowEntityOriginal.id, + nodes: [node], + connections: {}, + active: false, + nodeTypes, + }); + + const dataDeduplicationService = getDataDeduplicationService(); + await DataDeduplicationService.init(dataDeduplicationService); +}); + +beforeEach(async () => { + await testDb.truncate(['ProcessedData']); +}); + +afterAll(async () => { + await testDb.terminate(); +}); + +describe('Deduplication.DeduplicationHelper', () => { + test('Deduplication (mode: entries): DeduplicationHelper should record and check data correctly', async () => { + const context = 'node'; + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + let processedData: IDeduplicationOutput; + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b'], + context, + contextData, + { mode: 'entries' }, + ); + + // 'a' & 'b' got only checked before, so still has to be new + expect(processedData).toEqual({ new: ['a', 'b'], processed: [] }); + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c', 'd'], + context, + contextData, + { mode: 'entries' }, + ); + + // 'a' & 'b' got recorded before, 'c' only checked bfeore and 'd' has never been seen + expect(processedData).toEqual({ new: ['c', 'd'], processed: ['a', 'b'] }); + + await DataDeduplicationService.getInstance().removeProcessed(['b', 'd'], context, contextData, { + mode: 'entries', + }); + }); + + test('Deduplication (mode: entries): DeduplicationHelper different contexts should not interfere with each other', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + let processedData: IDeduplicationOutput; + + // Add data with context "node" + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b'], + 'node', + contextData, + { mode: 'entries' }, + ); + + // No data exists yet for context "node" so has to be new + expect(processedData).toEqual({ new: ['a', 'b'], processed: [] }); + + // Add data with context "workflow" + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextData, + { mode: 'entries' }, + ); + + // No data exists yet for context 'worklow' so has to be new + expect(processedData).toEqual({ new: ['a', 'b', 'c'], processed: [] }); + + await DataDeduplicationService.getInstance().removeProcessed(['a'], 'node', contextData, { + mode: 'entries', + }); + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'node', + contextData, + { mode: 'entries' }, + ); + + // 'a' got removed for the context 'node' and 'c' never got saved, so only 'b' should be known + expect(processedData).toEqual({ new: ['a', 'c'], processed: ['b'] }); + + await DataDeduplicationService.getInstance().removeProcessed(['b'], 'workflow', contextData, { + mode: 'entries', + }); + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c', 'd'], + 'workflow', + contextData, + { mode: 'entries' }, + ); + + // 'b' got removed for the context 'workflow' and 'd' never got saved for that reason new + // 'a' and 'c' should should be known + expect(processedData).toEqual({ new: ['b', 'd'], processed: ['a', 'c'] }); + }); + + test('Deduplication (mode: entries): DeduplicationHelper check maxEntries', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + let processedData: IDeduplicationOutput; + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['0', '1', '2', '3'], + 'node', + contextData, + { mode: 'entries', maxEntries: 5 }, + ); + + // All data should be new + expect(processedData).toEqual({ new: ['0', '1', '2', '3'], processed: [] }); + + // Add data with context "workflow" + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['4', '5', '6'], + 'node', + contextData, + { mode: 'entries', maxEntries: 5 }, + ); + + // All given data should be new + expect(processedData).toEqual({ new: ['4', '5', '6'], processed: [] }); + + // This should not make a difference, removing an item which does not exist + await DataDeduplicationService.getInstance().removeProcessed(['a'], 'node', contextData, { + mode: 'entries', + }); + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['0', '1', '2', '3', '4', '5', '6', '7'], + 'node', + contextData, + { mode: 'entries', maxEntries: 5 }, + ); + + // '7' should be new and '0' and '1' also because they got been pruned as max 5 get saved + expect(processedData).toEqual({ new: ['0', '1', '7'], processed: ['2', '3', '4', '5', '6'] }); + }); + + describe('Deduplication (mode: latestIncrementalKey): DeduplicationHelper should record and check data correctly', () => { + const tests: Array<{ + description: string; + data: Array<{ + operation: 'checkProcessedAndRecord'; + input: DeduplicationItemTypes[]; + output: IDeduplicationOutput; + }>; + }> = [ + { + description: 'dates', + data: [ + { + operation: 'checkProcessedAndRecord', + input: [new Date('2022-01-02').toISOString(), new Date('2022-01-03').toISOString()], + output: { + new: [new Date('2022-01-02').toISOString(), new Date('2022-01-03').toISOString()], + processed: [], + }, + }, + { + operation: 'checkProcessedAndRecord', + input: [ + new Date('2022-01-02').toISOString(), + new Date('2022-01-03').toISOString(), + new Date('2022-01-04').toISOString(), + new Date('2022-01-05').toISOString(), + ], + output: { + new: [new Date('2022-01-04').toISOString(), new Date('2022-01-05').toISOString()], + processed: [ + new Date('2022-01-02').toISOString(), + new Date('2022-01-03').toISOString(), + ], + }, + }, + ], + }, + { + description: 'numbers', + data: [ + { + operation: 'checkProcessedAndRecord', + input: [2, 3], + output: { new: [2, 3], processed: [] }, + }, + { + operation: 'checkProcessedAndRecord', + input: [2, 3, 4, 5], + output: { new: [4, 5], processed: [2, 3] }, + }, + ], + }, + ]; + + for (const testData of tests) { + test(testData.description, async () => { + const context = 'node'; + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + const mode = testData.description === 'dates' ? 'latestDate' : 'latestIncrementalKey'; + + let processedData: IDeduplicationOutput; + + for (const data of testData.data) { + processedData = await DataDeduplicationService.getInstance()[data.operation]( + data.input, + context, + contextData, + { mode }, + ); + + expect(processedData).toEqual(data.output); + } + }); + } + }); + + test('removeProcessed should throw error for latest modes', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + await expect( + DataDeduplicationService.getInstance().removeProcessed(['2022-01-01'], 'node', contextData, { + mode: 'latestDate', + }), + ).rejects.toThrow('Removing processed data is not possible in mode "latest"'); + + await expect( + DataDeduplicationService.getInstance().removeProcessed([1], 'node', contextData, { + mode: 'latestIncrementalKey', + }), + ).rejects.toThrow('Removing processed data is not possible in mode "latest"'); + }); + + test('clearAllProcessedItems should delete all processed items for workflow scope', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + }; + + // First, add some data + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextData, + { mode: 'entries' }, + ); + + // Clear all processed items + await DataDeduplicationService.getInstance().clearAllProcessedItems('workflow', contextData, { + mode: 'entries', + }); + + // Check that all items are now considered new + const processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextData, + { mode: 'entries' }, + ); + + expect(processedData).toEqual({ new: ['a', 'b', 'c'], processed: [] }); + }); + + test('clearAllProcessedItems should delete all processed items for node scope', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + // First, add some data + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'node', + contextData, + { mode: 'entries' }, + ); + + // Clear all processed items + await DataDeduplicationService.getInstance().clearAllProcessedItems('node', contextData, { + mode: 'entries', + }); + + // Check that all items are now considered new + const processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'node', + contextData, + { mode: 'entries' }, + ); + + expect(processedData).toEqual({ new: ['a', 'b', 'c'], processed: [] }); + }); + + test('clearAllProcessedItems should not clear workflow processed items when clearing node scope', async () => { + const contextDataWorkflow: ICheckProcessedContextData = { + workflow, + }; + + const contextDataNode: ICheckProcessedContextData = { + workflow, + node, + }; + + // Add data for workflow scope + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextDataWorkflow, + { mode: 'entries' }, + ); + + // Add data for node scope + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['d', 'e', 'f'], + 'node', + contextDataNode, + { mode: 'entries' }, + ); + + // Clear all processed items for node scope + await DataDeduplicationService.getInstance().clearAllProcessedItems('node', contextDataNode, { + mode: 'entries', + }); + + // Ensure workflow processed items are still intact + const processedDataWorkflow = + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextDataWorkflow, + { mode: 'entries' }, + ); + + // Workflow items should still be considered processed + expect(processedDataWorkflow).toEqual({ new: [], processed: ['a', 'b', 'c'] }); + + // Ensure node processed items have been cleared + const processedDataNode = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['d', 'e', 'f'], + 'node', + contextDataNode, + { mode: 'entries' }, + ); + + // Node items should be considered new + expect(processedDataNode).toEqual({ new: ['d', 'e', 'f'], processed: [] }); + }); + + test('clearAllProcessedItems should not clear node processed items when clearing workflow scope', async () => { + const contextDataWorkflow: ICheckProcessedContextData = { + workflow, + }; + + const contextDataNode: ICheckProcessedContextData = { + workflow, + node, + }; + + // Add data for workflow scope + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextDataWorkflow, + { mode: 'entries' }, + ); + + // Add data for node scope + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['d', 'e', 'f'], + 'node', + contextDataNode, + { mode: 'entries' }, + ); + + // Clear all processed items for workflow scope + await DataDeduplicationService.getInstance().clearAllProcessedItems( + 'workflow', + contextDataWorkflow, + { + mode: 'entries', + }, + ); + + // Ensure node processed items are still intact + const processedDataNode = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['d', 'e', 'f'], + 'node', + contextDataNode, + { mode: 'entries' }, + ); + + // Node items should still be considered processed + expect(processedDataNode).toEqual({ new: [], processed: ['d', 'e', 'f'] }); + + // Ensure workflow processed items have been cleared + const processedDataWorkflow = + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextDataWorkflow, + { mode: 'entries' }, + ); + + // Workflow items should be considered new + expect(processedDataWorkflow).toEqual({ new: ['a', 'b', 'c'], processed: [] }); + }); + + test('getProcessedDataCount should return correct count for different modes', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + // Test for 'entries' mode + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'node', + contextData, + { mode: 'entries' }, + ); + + const entriesCount = await DataDeduplicationService.getInstance().getProcessedDataCount( + 'node', + contextData, + { mode: 'entries' }, + ); + + expect(entriesCount).toBe(3); + + // Test for other modes (should return 0) + const latestCount = await DataDeduplicationService.getInstance().getProcessedDataCount( + 'node', + contextData, + { mode: 'latestDate' }, + ); + + expect(latestCount).toBe(0); + }); +}); diff --git a/packages/cli/test/integration/runners/task-runner-process.test.ts b/packages/cli/test/integration/runners/task-runner-process.test.ts index f517ee6398..e623d5f371 100644 --- a/packages/cli/test/integration/runners/task-runner-process.test.ts +++ b/packages/cli/test/integration/runners/task-runner-process.test.ts @@ -18,6 +18,11 @@ describe('TaskRunnerProcess', () => { const taskBroker = Container.get(TaskBroker); const taskRunnerService = Container.get(TaskRunnerService); + const startLauncherSpy = jest.spyOn(runnerProcess, 'startLauncher'); + const startNodeSpy = jest.spyOn(runnerProcess, 'startNode'); + const killLauncherSpy = jest.spyOn(runnerProcess, 'killLauncher'); + const killNodeSpy = jest.spyOn(runnerProcess, 'killNode'); + beforeAll(async () => { await taskRunnerServer.start(); // Set the port to the actually used port @@ -30,6 +35,11 @@ describe('TaskRunnerProcess', () => { afterEach(async () => { await runnerProcess.stop(); + + startLauncherSpy.mockClear(); + startNodeSpy.mockClear(); + killLauncherSpy.mockClear(); + killNodeSpy.mockClear(); }); const getNumConnectedRunners = () => taskRunnerService.runnerConnections.size; @@ -88,4 +98,46 @@ describe('TaskRunnerProcess', () => { expect(getNumConnectedRunners()).toBe(1); expect(getNumRegisteredRunners()).toBe(1); }); + + it('should launch runner directly if not using a launcher', async () => { + globalConfig.taskRunners.useLauncher = false; + + await runnerProcess.start(); + + expect(startLauncherSpy).toBeCalledTimes(0); + expect(startNodeSpy).toBeCalledTimes(1); + }); + + it('should use a launcher if configured', async () => { + globalConfig.taskRunners.useLauncher = true; + globalConfig.taskRunners.launcherPath = 'node'; + + await runnerProcess.start(); + + expect(startLauncherSpy).toBeCalledTimes(1); + expect(startNodeSpy).toBeCalledTimes(0); + globalConfig.taskRunners.useLauncher = false; + }); + + it('should kill the process directly if not using a launcher', async () => { + globalConfig.taskRunners.useLauncher = false; + + await runnerProcess.start(); + await runnerProcess.stop(); + + expect(killLauncherSpy).toBeCalledTimes(0); + expect(killNodeSpy).toBeCalledTimes(1); + }); + + it('should kill the process using a launcher if configured', async () => { + globalConfig.taskRunners.useLauncher = true; + globalConfig.taskRunners.launcherPath = 'node'; + + await runnerProcess.start(); + await runnerProcess.stop(); + + expect(killLauncherSpy).toBeCalledTimes(1); + expect(killNodeSpy).toBeCalledTimes(0); + globalConfig.taskRunners.useLauncher = false; + }); }); diff --git a/packages/cli/test/integration/shared/test-db.ts b/packages/cli/test/integration/shared/test-db.ts index 0d9b1672e1..7faaa3f6eb 100644 --- a/packages/cli/test/integration/shared/test-db.ts +++ b/packages/cli/test/integration/shared/test-db.ts @@ -67,6 +67,7 @@ const repositories = [ 'Project', 'ProjectRelation', 'Role', + 'ProcessedData', 'Project', 'ProjectRelation', 'Settings', diff --git a/packages/cli/test/shared/mocking.ts b/packages/cli/test/shared/mocking.ts index 60b712b115..099988a896 100644 --- a/packages/cli/test/shared/mocking.ts +++ b/packages/cli/test/shared/mocking.ts @@ -4,6 +4,8 @@ import type { Class } from 'n8n-core'; import type { DeepPartial } from 'ts-essentials'; import { Container } from 'typedi'; +import type { Logger } from '@/logging/logger.service'; + export const mockInstance = ( serviceClass: Class, data: DeepPartial | undefined = undefined, @@ -22,3 +24,6 @@ export const mockEntityManager = (entityClass: Class) => { Object.assign(entityManager, { connection: dataSource }); return entityManager; }; + +export const mockLogger = () => + mock({ withScope: jest.fn().mockReturnValue(mock()) }); diff --git a/packages/core/package.json b/packages/core/package.json index 50762d8c14..aec9b34891 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "1.62.1", + "version": "1.63.0", "description": "Core functionality of n8n", "main": "dist/index", "types": "dist/index.d.ts", @@ -36,6 +36,7 @@ "@types/xml2js": "catalog:" }, "dependencies": { + "@langchain/core": "catalog:", "@n8n/client-oauth2": "workspace:*", "aws4": "1.11.0", "axios": "catalog:", @@ -45,10 +46,10 @@ "file-type": "16.5.4", "form-data": "catalog:", "lodash": "catalog:", - "@langchain/core": "catalog:", "luxon": "catalog:", "mime-types": "2.1.35", "n8n-workflow": "workspace:*", + "nanoid": "catalog:", "oauth-1.0a": "2.2.6", "p-cancelable": "2.1.1", "pretty-bytes": "5.6.0", diff --git a/packages/core/src/Agent/index.ts b/packages/core/src/Agent/index.ts index 75195b6acf..ed842d99ee 100644 --- a/packages/core/src/Agent/index.ts +++ b/packages/core/src/Agent/index.ts @@ -10,7 +10,9 @@ import type { INodeParameters, IExecuteData, IDataObject, + Result, } from 'n8n-workflow'; +import { createEnvProviderState } from 'n8n-workflow'; export const createAgentStartJob = ( additionalData: IWorkflowExecuteAdditionalData, @@ -28,13 +30,13 @@ export const createAgentStartJob = ( selfData?: IDataObject, contextNodeName?: string, ): IExecuteFunctions['startJob'] => { - return async function startJob( + return async function startJob( this: IExecuteFunctions, jobType: string, settings: unknown, itemIndex: number, - ): Promise { - return await additionalData.startAgentJob( + ): Promise> { + return await additionalData.startAgentJob( additionalData, jobType, settings, @@ -49,6 +51,7 @@ export const createAgentStartJob = ( connectionInputData, siblingParameters, mode, + createEnvProviderState(), executeData, defaultReturnRunIndex, selfData, diff --git a/packages/core/src/DirectoryLoader.ts b/packages/core/src/DirectoryLoader.ts index a1401a8fb5..b0e77125a7 100644 --- a/packages/core/src/DirectoryLoader.ts +++ b/packages/core/src/DirectoryLoader.ts @@ -448,9 +448,9 @@ export class LazyPackageDirectoryLoader extends PackageDirectoryLoader { ); } - Logger.debug(`Lazy Loading credentials and nodes from ${this.packageJson.name}`, { - credentials: this.types.credentials?.length ?? 0, + Logger.debug(`Lazy-loading nodes and credentials from ${this.packageJson.name}`, { nodes: this.types.nodes?.length ?? 0, + credentials: this.types.credentials?.length ?? 0, }); this.isLazyLoaded = true; diff --git a/packages/core/src/InstanceSettings.ts b/packages/core/src/InstanceSettings.ts index 17ccf15def..4a050db121 100644 --- a/packages/core/src/InstanceSettings.ts +++ b/packages/core/src/InstanceSettings.ts @@ -1,9 +1,12 @@ import { createHash, randomBytes } from 'crypto'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; -import { ApplicationError, jsonParse } from 'n8n-workflow'; +import { ApplicationError, jsonParse, ALPHABET } from 'n8n-workflow'; +import { customAlphabet } from 'nanoid'; import path from 'path'; import { Service } from 'typedi'; +const nanoid = customAlphabet(ALPHABET, 16); + interface ReadOnlySettings { encryptionKey: string; } @@ -40,6 +43,12 @@ export class InstanceSettings { private settings = this.loadOrCreate(); + /** + * Fixed ID of this n8n instance, for telemetry. + * Derived from encryption key. Do not confuse with `hostId`. + * + * @example '258fce876abf5ea60eb86a2e777e5e190ff8f3e36b5b37aafec6636c31d4d1f9' + */ readonly instanceId = this.generateInstanceId(); readonly instanceType: InstanceType; @@ -49,6 +58,8 @@ export class InstanceSettings { this.instanceType = ['webhook', 'worker'].includes(command) ? (command as InstanceType) : 'main'; + + this.hostId = `${this.instanceType}-${nanoid()}`; } /** @@ -61,6 +72,16 @@ export class InstanceSettings { */ instanceRole: InstanceRole = 'unset'; + /** + * Transient ID of this n8n instance, for scaling mode. + * Reset on restart. Do not confuse with `instanceId`. + * + * @example 'main-bnxa1riryKUNHtln' + * @example 'worker-nDJR0FnSd2Vf6DB5' + * @example 'webhook-jxQ7AO8IzxEtfW1F' + */ + readonly hostId: string; + get isLeader() { return this.instanceRole === 'leader'; } diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index b672c9c8e8..6cbef1e1b8 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -102,6 +102,13 @@ import type { EnsureTypeOptions, SSHTunnelFunctions, SchedulingFunctions, + DeduplicationHelperFunctions, + IDeduplicationOutput, + IDeduplicationOutputItems, + ICheckProcessedOptions, + DeduplicationScope, + DeduplicationItemTypes, + ICheckProcessedContextData, AiEvent, } from 'n8n-workflow'; import { @@ -149,6 +156,7 @@ import { UM_EMAIL_TEMPLATES_PWRESET, } from './Constants'; import { createNodeAsTool } from './CreateNodeAsTool'; +import { DataDeduplicationService } from './data-deduplication-service'; import { getAllWorkflowExecutionMetadata, getWorkflowExecutionMetadata, @@ -1284,6 +1292,72 @@ async function prepareBinaryData( return await setBinaryDataBuffer(returnData, binaryData, workflowId, executionId); } +export async function checkProcessedAndRecord( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().checkProcessedAndRecord( + items, + scope, + contextData, + options, + ); +} + +export async function checkProcessedItemsAndRecord( + key: string, + items: IDataObject[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().checkProcessedItemsAndRecord( + key, + items, + scope, + contextData, + options, + ); +} + +export async function removeProcessed( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().removeProcessed( + items, + scope, + contextData, + options, + ); +} + +export async function clearAllProcessedItems( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().clearAllProcessedItems( + scope, + contextData, + options, + ); +} +export async function getProcessedDataCount( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().getProcessedDataCount( + scope, + contextData, + options, + ); +} function applyPaginationRequestData( requestData: IRequestOptions, paginationRequestData: PaginationOptions['request'], @@ -3453,6 +3527,52 @@ const getBinaryHelperFunctions = ( }, }); +const getCheckProcessedHelperFunctions = ( + workflow: Workflow, + node: INode, +): DeduplicationHelperFunctions => ({ + async checkProcessedAndRecord( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await checkProcessedAndRecord(items, scope, { node, workflow }, options); + }, + async checkProcessedItemsAndRecord( + propertyName: string, + items: IDataObject[], + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await checkProcessedItemsAndRecord( + propertyName, + items, + scope, + { node, workflow }, + options, + ); + }, + async removeProcessed( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await removeProcessed(items, scope, { node, workflow }, options); + }, + async clearAllProcessedItems( + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await clearAllProcessedItems(scope, { node, workflow }, options); + }, + async getProcessedDataCount( + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await getProcessedDataCount(scope, { node, workflow }, options); + }, +}); + /** * Returns a copy of the items which only contains the json data and * of that only the defined properties @@ -3896,6 +4016,7 @@ export function getExecuteFunctions( ...getSSHTunnelFunctions(), ...getFileSystemHelperFunctions(node), ...getBinaryHelperFunctions(additionalData, workflow.id), + ...getCheckProcessedHelperFunctions(workflow, node), assertBinaryData: (itemIndex, propertyName) => assertBinaryData(inputData, node, itemIndex, propertyName, 0), getBinaryDataBuffer: async (itemIndex, propertyName) => diff --git a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts index 33cc114698..606f624d02 100644 --- a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts +++ b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts @@ -12,6 +12,11 @@ export type GraphConnection = { // fromName-outputType-outputIndex-inputIndex-toName type DirectedGraphKey = `${string}-${NodeConnectionType}-${number}-${number}-${string}`; +type RemoveNodeBaseOptions = { + reconnectConnections: boolean; + skipConnectionFn?: (connection: GraphConnection) => boolean; +}; + /** * Represents a directed graph as an adjacency list, e.g. one list for the * vertices and one list for the edges. @@ -77,17 +82,34 @@ export class DirectedGraph { * connections making sure all parent nodes are connected to all child nodes * and return the new connections. */ - removeNode(node: INode, options?: { reconnectConnections: true }): GraphConnection[]; - removeNode(node: INode, options?: { reconnectConnections: false }): undefined; - removeNode(node: INode, { reconnectConnections = false } = {}): undefined | GraphConnection[] { - if (reconnectConnections) { - const incomingConnections = this.getDirectParents(node); - const outgoingConnections = this.getDirectChildren(node); + removeNode( + node: INode, + options?: { reconnectConnections: true } & RemoveNodeBaseOptions, + ): GraphConnection[]; + removeNode( + node: INode, + options?: { reconnectConnections: false } & RemoveNodeBaseOptions, + ): undefined; + removeNode( + node: INode, + options: RemoveNodeBaseOptions = { reconnectConnections: false }, + ): undefined | GraphConnection[] { + if (options.reconnectConnections) { + const incomingConnections = this.getDirectParentConnections(node); + const outgoingConnections = this.getDirectChildConnections(node); const newConnections: GraphConnection[] = []; for (const incomingConnection of incomingConnections) { + if (options.skipConnectionFn && options.skipConnectionFn(incomingConnection)) { + continue; + } + for (const outgoingConnection of outgoingConnections) { + if (options.skipConnectionFn && options.skipConnectionFn(outgoingConnection)) { + continue; + } + const newConnection = { ...incomingConnection, to: outgoingConnection.to, @@ -165,7 +187,7 @@ export class DirectedGraph { return this; } - getDirectChildren(node: INode) { + getDirectChildConnections(node: INode) { const nodeExists = this.nodes.get(node.name) === node; a.ok(nodeExists); @@ -183,7 +205,7 @@ export class DirectedGraph { } private getChildrenRecursive(node: INode, children: Set) { - const directChildren = this.getDirectChildren(node); + const directChildren = this.getDirectChildConnections(node); for (const directChild of directChildren) { // Break out if we found a cycle. @@ -202,13 +224,13 @@ export class DirectedGraph { * argument. * * If the node being passed in is a child of itself (e.g. is part of a - * cylce), the return set will contain it as well. + * cycle), the return set will contain it as well. */ getChildren(node: INode) { return this.getChildrenRecursive(node, new Set()); } - getDirectParents(node: INode) { + getDirectParentConnections(node: INode) { const nodeExists = this.nodes.get(node.name) === node; a.ok(nodeExists); @@ -225,6 +247,27 @@ export class DirectedGraph { return directParents; } + private getParentConnectionsRecursive(node: INode, connections: Set) { + const parentConnections = this.getDirectParentConnections(node); + + for (const connection of parentConnections) { + // break out of cycles + if (connections.has(connection)) { + continue; + } + + connections.add(connection); + + this.getParentConnectionsRecursive(connection.from, connections); + } + + return connections; + } + + getParentConnections(node: INode) { + return this.getParentConnectionsRecursive(node, new Set()); + } + getConnection( from: INode, outputIndex: number, diff --git a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts index 426a5405c7..d6eedf416d 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts @@ -89,6 +89,60 @@ describe('DirectedGraph', () => { }); }); + describe('getParentConnections', () => { + // ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ + // │node1├──►│node2├──►│node3│──►│node4│ + // └─────┘ └─────┘ └─────┘ └─────┘ + test('returns all parent connections', () => { + // ARRANGE + const node1 = createNodeData({ name: 'Node1' }); + const node2 = createNodeData({ name: 'Node2' }); + const node3 = createNodeData({ name: 'Node3' }); + const node4 = createNodeData({ name: 'Node4' }); + const graph = new DirectedGraph() + .addNodes(node1, node2, node3, node4) + .addConnections( + { from: node1, to: node2 }, + { from: node2, to: node3 }, + { from: node3, to: node4 }, + ); + + // ACT + const connections = graph.getParentConnections(node3); + + // ASSERT + const expectedConnections = graph.getConnections().filter((c) => c.to !== node4); + expect(connections.size).toBe(2); + expect(connections).toEqual(new Set(expectedConnections)); + }); + + // ┌─────┐ ┌─────┐ ┌─────┐ + // ┌─►│node1├───►│node2├──►│node3├─┐ + // │ └─────┘ └─────┘ └─────┘ │ + // │ │ + // └───────────────────────────────┘ + test('terminates when finding a cycle', () => { + // ARRANGE + const node1 = createNodeData({ name: 'Node1' }); + const node2 = createNodeData({ name: 'Node2' }); + const node3 = createNodeData({ name: 'Node3' }); + const graph = new DirectedGraph() + .addNodes(node1, node2, node3) + .addConnections( + { from: node1, to: node2 }, + { from: node2, to: node3 }, + { from: node3, to: node1 }, + ); + + // ACT + const connections = graph.getParentConnections(node3); + + // ASSERT + expect(connections.size).toBe(3); + expect(connections).toEqual(new Set(graph.getConnections())); + }); + }); + describe('removeNode', () => { // XX // ┌─────┐ ┌─────┐ ┌─────┐ diff --git a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts index 0ea2e4f611..57022d862c 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts @@ -46,7 +46,7 @@ describe('findStartNodes', () => { const node = createNodeData({ name: 'Basic Node' }); const graph = new DirectedGraph().addNode(node); - const startNodes = findStartNodes(graph, node, node); + const startNodes = findStartNodes({ graph, trigger: node, destination: node }); expect(startNodes).toHaveLength(1); expect(startNodes[0]).toEqual(node); @@ -65,7 +65,7 @@ describe('findStartNodes', () => { // if the trigger has no run data { - const startNodes = findStartNodes(graph, trigger, destination); + const startNodes = findStartNodes({ graph, trigger, destination }); expect(startNodes).toHaveLength(1); expect(startNodes[0]).toEqual(trigger); @@ -77,7 +77,7 @@ describe('findStartNodes', () => { [trigger.name]: [toITaskData([{ data: { value: 1 } }])], }; - const startNodes = findStartNodes(graph, trigger, destination, runData); + const startNodes = findStartNodes({ graph, trigger, destination, runData }); expect(startNodes).toHaveLength(1); expect(startNodes[0]).toEqual(destination); @@ -112,7 +112,7 @@ describe('findStartNodes', () => { }; // ACT - const startNodes = findStartNodes(graph, trigger, node, runData); + const startNodes = findStartNodes({ graph, trigger, destination: node, runData }); // ASSERT expect(startNodes).toHaveLength(1); @@ -153,7 +153,7 @@ describe('findStartNodes', () => { { // ACT - const startNodes = findStartNodes(graph, trigger, node4); + const startNodes = findStartNodes({ graph, trigger, destination: node4 }); // ASSERT expect(startNodes).toHaveLength(1); @@ -172,7 +172,7 @@ describe('findStartNodes', () => { }; // ACT - const startNodes = findStartNodes(graph, trigger, node4, runData); + const startNodes = findStartNodes({ graph, trigger, destination: node4, runData }); // ASSERT expect(startNodes).toHaveLength(1); @@ -201,8 +201,13 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, trigger, node, { - [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + const startNodes = findStartNodes({ + graph, + trigger, + destination: node, + runData: { + [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + }, }); // ASSERT @@ -231,8 +236,13 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, trigger, node, { - [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])], + const startNodes = findStartNodes({ + graph, + trigger, + destination: node, + runData: { + [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])], + }, }); // ASSERT @@ -261,13 +271,18 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, trigger, node, { - [trigger.name]: [ - toITaskData([ - { data: { value: 1 }, outputIndex: 0 }, - { data: { value: 1 }, outputIndex: 1 }, - ]), - ], + const startNodes = findStartNodes({ + graph, + trigger, + destination: node, + runData: { + [trigger.name]: [ + toITaskData([ + { data: { value: 1 }, outputIndex: 0 }, + { data: { value: 1 }, outputIndex: 1 }, + ]), + ], + }, }); // ASSERT @@ -297,10 +312,15 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, trigger, node3, { - [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], - [node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], - [node2.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + const startNodes = findStartNodes({ + graph, + trigger, + destination: node3, + runData: { + [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + [node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + [node2.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + }, }); // ASSERT @@ -329,9 +349,14 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, node1, node2, { - [trigger.name]: [toITaskData([{ data: { value: 1 } }])], - [node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])], + const startNodes = findStartNodes({ + graph, + trigger: node1, + destination: node2, + runData: { + [trigger.name]: [toITaskData([{ data: { value: 1 } }])], + [node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])], + }, }); // ASSERT @@ -364,7 +389,7 @@ describe('findStartNodes', () => { const pinData: IPinData = {}; // ACT - const startNodes = findStartNodes(graph, trigger, node2, runData, pinData); + const startNodes = findStartNodes({ graph, trigger, destination: node2, runData, pinData }); // ASSERT expect(startNodes).toHaveLength(1); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts index a5187eacf4..fceb22da06 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts @@ -9,6 +9,8 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data +import { NodeConnectionType } from 'n8n-workflow'; + import { createNodeData } from './helpers'; import { DirectedGraph } from '../DirectedGraph'; import { findSubgraph } from '../findSubgraph'; @@ -26,7 +28,7 @@ describe('findSubgraph', () => { .addNodes(trigger, destination) .addConnections({ from: trigger, to: destination }); - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); expect(subgraph).toEqual(graph); }); @@ -48,7 +50,7 @@ describe('findSubgraph', () => { { from: ifNode, to: noOp, outputIndex: 1 }, ); - const subgraph = findSubgraph(graph, noOp, ifNode); + const subgraph = findSubgraph({ graph, destination: noOp, trigger: ifNode }); expect(subgraph).toEqual(graph); }); @@ -68,7 +70,7 @@ describe('findSubgraph', () => { .addNodes(trigger, destination, node) .addConnections({ from: trigger, to: destination }, { from: destination, to: node }); - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); expect(subgraph).toEqual( new DirectedGraph() @@ -98,7 +100,7 @@ describe('findSubgraph', () => { .addNodes(trigger, disabled, destination) .addConnections({ from: trigger, to: disabled }, { from: disabled, to: destination }); - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); expect(subgraph).toEqual( new DirectedGraph() @@ -131,7 +133,7 @@ describe('findSubgraph', () => { ); // ACT - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); // ASSERT expect(subgraph).toEqual( @@ -161,7 +163,7 @@ describe('findSubgraph', () => { ); // ACT - const subgraph = findSubgraph(graph, node2, trigger); + const subgraph = findSubgraph({ graph, destination: node2, trigger }); // ASSERT expect(subgraph).toEqual(graph); @@ -185,7 +187,7 @@ describe('findSubgraph', () => { .addConnections({ from: trigger, to: node1 }, { from: node2, to: node1 }); // ACT - const subgraph = findSubgraph(graph, node1, trigger); + const subgraph = findSubgraph({ graph, destination: node1, trigger }); // ASSERT expect(subgraph).toEqual( @@ -213,7 +215,7 @@ describe('findSubgraph', () => { ); // ACT - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); // ASSERT expect(subgraph).toEqual( @@ -222,4 +224,110 @@ describe('findSubgraph', () => { .addConnections({ from: trigger, to: destination }), ); }); + + describe('root nodes', () => { + // ►► + // ┌───────┐ ┌───────────┐ + // │trigger├─────►│destination│ + // └───────┘ └──▲────────┘ + // │AiLanguageModel + // ┌┴──────┐ + // │aiModel│ + // └───────┘ + test('always retain connections that have a different type than `NodeConnectionType.Main`', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const destination = createNodeData({ name: 'destination' }); + const aiModel = createNodeData({ name: 'ai_model' }); + + const graph = new DirectedGraph() + .addNodes(trigger, destination, aiModel) + .addConnections( + { from: trigger, to: destination }, + { from: aiModel, type: NodeConnectionType.AiLanguageModel, to: destination }, + ); + + // ACT + const subgraph = findSubgraph({ graph, destination, trigger }); + + // ASSERT + expect(subgraph).toEqual(graph); + }); + + // This graph is not possible, it's only here to make sure `findSubgraph` + // does not follow non-Main connections. + // + // ┌────┐ ┌───────────┐ + // │root┼───►destination│ + // └──▲─┘ └───────────┘ + // │AiLanguageModel + // ┌┴──────┐ + // │aiModel│ + // └▲──────┘ + // ┌┴──────┐ + // │trigger│ + // └───────┘ + // turns into an empty graph, because there is no `Main` typed connection + // connecting destination and trigger. + test('skip non-Main connection types', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const root = createNodeData({ name: 'root' }); + const aiModel = createNodeData({ name: 'aiModel' }); + const destination = createNodeData({ name: 'destination' }); + const graph = new DirectedGraph() + .addNodes(trigger, root, aiModel, destination) + .addConnections( + { from: trigger, to: aiModel }, + { from: aiModel, type: NodeConnectionType.AiLanguageModel, to: root }, + { from: root, to: destination }, + ); + + // ACT + const subgraph = findSubgraph({ graph, destination, trigger }); + + // ASSERT + expect(subgraph.getConnections()).toHaveLength(0); + expect(subgraph.getNodes().size).toBe(0); + }); + + // + // XX + // ┌───────┐ ┌────┐ ┌───────────┐ + // │trigger├───►root├───►destination│ + // └───────┘ └──▲─┘ └───────────┘ + // │AiLanguageModel + // ┌┴──────┐ + // │aiModel│ + // └───────┘ + // turns into + // ┌───────┐ ┌───────────┐ + // │trigger├────────────►destination│ + // └───────┘ └───────────┘ + test('skip disabled root nodes', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const root = createNodeData({ name: 'root', disabled: true }); + const aiModel = createNodeData({ name: 'ai_model' }); + const destination = createNodeData({ name: 'destination' }); + + const graph = new DirectedGraph() + .addNodes(trigger, root, aiModel, destination) + .addConnections( + { from: trigger, to: root }, + { from: aiModel, type: NodeConnectionType.AiLanguageModel, to: root }, + { from: root, to: destination }, + ); + + // ACT + const subgraph = findSubgraph({ graph, destination: root, trigger }); + + // ASSERT + expect(subgraph).toEqual( + new DirectedGraph() + .addNodes(trigger, destination) + .addConnections({ from: trigger, to: destination }), + ); + }); + }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts index d0cc934b13..a4bcac23a5 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts @@ -32,7 +32,7 @@ describe('recreateNodeExecutionStack', () => { .addNodes(trigger, node) .addConnections({ from: trigger, to: node }); - const workflow = findSubgraph(graph, node, trigger); + const workflow = findSubgraph({ graph, destination: node, trigger }); const startNodes = [node]; const runData: IRunData = { [trigger.name]: [toITaskData([{ data: { value: 1 } }])], diff --git a/packages/core/src/PartialExecutionUtils/findStartNodes.ts b/packages/core/src/PartialExecutionUtils/findStartNodes.ts index 12a9688c1c..a6165f6564 100644 --- a/packages/core/src/PartialExecutionUtils/findStartNodes.ts +++ b/packages/core/src/PartialExecutionUtils/findStartNodes.ts @@ -80,7 +80,7 @@ function findStartNodesRecursive( } // Recurse with every direct child that is part of the sub graph. - const outGoingConnections = graph.getDirectChildren(current); + const outGoingConnections = graph.getDirectChildConnections(current); for (const outGoingConnection of outGoingConnections) { const nodeRunData = getIncomingData( runData, @@ -131,13 +131,19 @@ function findStartNodesRecursive( * - stop following the branch, there is no start node on this branch * 4. Recurse with every direct child that is part of the sub graph */ -export function findStartNodes( - graph: DirectedGraph, - trigger: INode, - destination: INode, - runData: IRunData = {}, - pinData: IPinData = {}, -): INode[] { +export function findStartNodes(options: { + graph: DirectedGraph; + trigger: INode; + destination: INode; + runData?: IRunData; + pinData?: IPinData; +}): INode[] { + const graph = options.graph; + const trigger = options.trigger; + const destination = options.destination; + const runData = options.runData ?? {}; + const pinData = options.pinData ?? {}; + const startNodes = findStartNodesRecursive( graph, trigger, diff --git a/packages/core/src/PartialExecutionUtils/findSubgraph.ts b/packages/core/src/PartialExecutionUtils/findSubgraph.ts index ea1df91840..4d3bc4cc3f 100644 --- a/packages/core/src/PartialExecutionUtils/findSubgraph.ts +++ b/packages/core/src/PartialExecutionUtils/findSubgraph.ts @@ -1,4 +1,4 @@ -import type { INode } from 'n8n-workflow'; +import { NodeConnectionType, type INode } from 'n8n-workflow'; import type { GraphConnection } from './DirectedGraph'; import { DirectedGraph } from './DirectedGraph'; @@ -21,7 +21,7 @@ function findSubgraphRecursive( return; } - let parentConnections = graph.getDirectParents(current); + let parentConnections = graph.getDirectParentConnections(current); // If the current node has no parents, don’t keep this branch. if (parentConnections.length === 0) { @@ -58,11 +58,24 @@ function findSubgraphRecursive( // The node is replaced by a set of new connections, connecting the parents // and children of it directly. In the recursive call below we'll follow // them further. - parentConnections = graph.removeNode(current, { reconnectConnections: true }); + parentConnections = graph.removeNode(current, { + reconnectConnections: true, + // If the node has non-Main connections we don't want to rewire those. + // Otherwise we'd end up connecting AI utilities to nodes that don't + // support them. + skipConnectionFn: (c) => c.type !== NodeConnectionType.Main, + }); } // Recurse on each parent. for (const parentConnection of parentConnections) { + // Skip parents that are connected via non-Main connection types. They are + // only utility nodes for AI and are not part of the data or control flow + // and can never lead too the trigger. + if (parentConnection.type !== NodeConnectionType.Main) { + continue; + } + findSubgraphRecursive(graph, destinationNode, parentConnection.from, trigger, newGraph, [ ...currentBranch, parentConnection, @@ -87,15 +100,41 @@ function findSubgraphRecursive( * - take every incoming connection and connect it to every node that is * connected to the current node’s first output * 6. Recurse on each parent + * 7. Re-add all connections that don't use the `Main` connections type. + * Theses are used by nodes called root nodes and they are not part of the + * dataflow in the graph they are utility nodes, like the AI model used in a + * lang chain node. */ -export function findSubgraph( - graph: DirectedGraph, - destinationNode: INode, - trigger: INode, -): DirectedGraph { - const newGraph = new DirectedGraph(); +export function findSubgraph(options: { + graph: DirectedGraph; + destination: INode; + trigger: INode; +}): DirectedGraph { + const graph = options.graph; + const destination = options.destination; + const trigger = options.trigger; + const subgraph = new DirectedGraph(); - findSubgraphRecursive(graph, destinationNode, destinationNode, trigger, newGraph, []); + findSubgraphRecursive(graph, destination, destination, trigger, subgraph, []); - return newGraph; + // For each node in the subgraph, if it has parent connections of a type that + // is not `Main` in the input graph, add the connections and the nodes + // connected to it to the subgraph + // + // Without this all AI related workflows would not work when executed + // partially, because all utility nodes would be missing. + for (const node of subgraph.getNodes().values()) { + const parentConnections = graph.getParentConnections(node); + + for (const connection of parentConnections) { + if (connection.type === NodeConnectionType.Main) { + continue; + } + + subgraph.addNodes(connection.from, connection.to); + subgraph.addConnection(connection); + } + } + + return subgraph; } diff --git a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts index f2f1f4af68..4926becb79 100644 --- a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts +++ b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts @@ -64,7 +64,7 @@ export function recreateNodeExecutionStack( for (const startNode of startNodes) { const incomingStartNodeConnections = graph - .getDirectParents(startNode) + .getDirectParentConnections(startNode) .filter((c) => c.type === NodeConnectionType.Main); let incomingData: INodeExecutionData[][] = []; @@ -135,7 +135,7 @@ export function recreateNodeExecutionStack( // Check if the destinationNode has to be added as waiting // because some input data is already fully available const incomingDestinationNodeConnections = graph - .getDirectParents(destinationNode) + .getDirectParentConnections(destinationNode) .filter((c) => c.type === NodeConnectionType.Main); if (incomingDestinationNodeConnections !== undefined) { for (const connection of incomingDestinationNodeConnections) { diff --git a/packages/core/src/ScheduledTaskManager.ts b/packages/core/src/ScheduledTaskManager.ts index fd2bb525a9..00396903a5 100644 --- a/packages/core/src/ScheduledTaskManager.ts +++ b/packages/core/src/ScheduledTaskManager.ts @@ -30,8 +30,9 @@ export class ScheduledTaskManager { deregisterCrons(workflowId: string) { const cronJobs = this.cronJobs.get(workflowId) ?? []; - for (const cronJob of cronJobs) { - cronJob.stop(); + while (cronJobs.length) { + const cronJob = cronJobs.pop(); + if (cronJob) cronJob.stop(); } } diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index 46de2472fc..ec5963a54b 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -87,7 +87,7 @@ export class WorkflowExecute { * Executes the given workflow. * * @param {Workflow} workflow The workflow to execute - * @param {INode[]} [startNodes] Node to start execution from + * @param {INode[]} [startNode] Node to start execution from * @param {string} [destinationNode] Node to stop execution at */ // IMPORTANT: Do not add "async" to this function, it will then convert the @@ -332,9 +332,9 @@ export class WorkflowExecute { 'a destinationNodeName is required for the new partial execution flow', ); - const destinationNode = workflow.getNode(destinationNodeName); + const destination = workflow.getNode(destinationNodeName); assert.ok( - destinationNode, + destination, `Could not find a node with the name ${destinationNodeName} in the workflow.`, ); @@ -348,11 +348,11 @@ export class WorkflowExecute { // 2. Find the Subgraph const graph = DirectedGraph.fromWorkflow(workflow); - const subgraph = findSubgraph(graph, destinationNode, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); const filteredNodes = subgraph.getNodes(); // 3. Find the Start Nodes - const startNodes = findStartNodes(subgraph, trigger, destinationNode, runData); + const startNodes = findStartNodes({ graph: subgraph, trigger, destination, runData }); // 4. Detect Cycles const cycles = findCycles(workflow); @@ -367,7 +367,7 @@ export class WorkflowExecute { // 7. Recreate Execution Stack const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(subgraph, startNodes, destinationNode, runData, pinData ?? {}); + recreateNodeExecutionStack(subgraph, startNodes, destination, runData, pinData ?? {}); // 8. Execute this.status = 'running'; @@ -1058,7 +1058,7 @@ export class WorkflowExecute { this.runExecutionData.startData!.runNodeFilter.indexOf(executionNode.name) === -1 ) { // If filter is set and node is not on filter skip it, that avoids the problem that it executes - // leafs that are parallel to a selected destinationNode. Normally it would execute them because + // leaves that are parallel to a selected destinationNode. Normally it would execute them because // they have the same parent and it executes all child nodes. continue; } @@ -1759,7 +1759,7 @@ export class WorkflowExecute { continue; } } else { - // A certain amout of inputs are required (amount of inputs) + // A certain amount of inputs are required (amount of inputs) if (inputsWithData.length < requiredInputs) { continue; } @@ -1817,7 +1817,7 @@ export class WorkflowExecute { // Node to add did not get found, rather an empty one removed so continue with search waitingNodes = Object.keys(this.runExecutionData.executionData!.waitingExecution); // Set counter to start again from the beginning. Set it to -1 as it auto increments - // after run. So only like that will we end up again ot 0. + // after run. So only like that will we end up again at 0. i = -1; } } diff --git a/packages/core/src/data-deduplication-service.ts b/packages/core/src/data-deduplication-service.ts new file mode 100644 index 0000000000..4b7a51fcc2 --- /dev/null +++ b/packages/core/src/data-deduplication-service.ts @@ -0,0 +1,124 @@ +import get from 'lodash/get'; +import type { + IDataDeduplicator, + ICheckProcessedOptions, + IDeduplicationOutput, + IDeduplicationOutputItems, + IDataObject, + DeduplicationScope, + DeduplicationItemTypes, + ICheckProcessedContextData, +} from 'n8n-workflow'; +import * as assert from 'node:assert/strict'; + +/** + * A singleton service responsible for data deduplication. + * This service wraps around the IDataDeduplicator interface and provides methods to handle + * deduplication-related operations such as checking, recording, and clearing processed data. + */ +export class DataDeduplicationService { + private static instance: DataDeduplicationService; + + private deduplicator: IDataDeduplicator; + + private constructor(deduplicator: IDataDeduplicator) { + this.deduplicator = deduplicator; + } + + private assertDeduplicator() { + assert.ok( + this.deduplicator, + 'Manager needs to initialized before use. Make sure to call init()', + ); + } + + private static assertInstance() { + assert.ok( + DataDeduplicationService.instance, + 'Instance needs to initialized before use. Make sure to call init()', + ); + } + + private static assertSingleInstance() { + assert.ok( + !DataDeduplicationService.instance, + 'Instance already initialized. Multiple initializations are not allowed.', + ); + } + + static async init(deduplicator: IDataDeduplicator): Promise { + this.assertSingleInstance(); + DataDeduplicationService.instance = new DataDeduplicationService(deduplicator); + } + + static getInstance(): DataDeduplicationService { + this.assertInstance(); + return DataDeduplicationService.instance; + } + + async checkProcessedItemsAndRecord( + propertyName: string, + items: IDataObject[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + let value; + const itemLookup = items.reduce((acc, cur, index) => { + value = JSON.stringify(get(cur, propertyName)); + acc[value ? value.toString() : ''] = index; + return acc; + }, {}); + + const checkedItems = await this.deduplicator.checkProcessedAndRecord( + Object.keys(itemLookup), + scope, + contextData, + options, + ); + + return { + new: checkedItems.new.map((key) => items[itemLookup[key] as number]), + processed: checkedItems.processed.map((key) => items[itemLookup[key] as number]), + }; + } + + async checkProcessedAndRecord( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + return await this.deduplicator.checkProcessedAndRecord(items, scope, contextData, options); + } + + async removeProcessed( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + return await this.deduplicator.removeProcessed(items, scope, contextData, options); + } + + async clearAllProcessedItems( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + return await this.deduplicator.clearAllProcessedItems(scope, contextData, options); + } + + async getProcessedDataCount( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + return await this.deduplicator.getProcessedDataCount(scope, contextData, options); + } +} diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index c6b8450a4f..ebe240b51e 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -14,6 +14,7 @@ export { InstanceSettings, InstanceType } from './InstanceSettings'; export * from './NodeExecuteFunctions'; export * from './WorkflowExecute'; export { NodeExecuteFunctions }; +export * from './data-deduplication-service'; export * from './errors'; export { ObjectStoreService } from './ObjectStore/ObjectStore.service.ee'; export { BinaryData } from './BinaryData/types'; diff --git a/packages/core/test/InstanceSettings.test.ts b/packages/core/test/InstanceSettings.test.ts index 64b6840f2f..7bc572b168 100644 --- a/packages/core/test/InstanceSettings.test.ts +++ b/packages/core/test/InstanceSettings.test.ts @@ -69,4 +69,19 @@ describe('InstanceSettings', () => { ); }); }); + + describe('constructor', () => { + it('should generate a `hostId`', () => { + const encryptionKey = 'test_key'; + process.env.N8N_ENCRYPTION_KEY = encryptionKey; + jest.spyOn(fs, 'existsSync').mockReturnValueOnce(true); + jest.spyOn(fs, 'readFileSync').mockReturnValueOnce(JSON.stringify({ encryptionKey })); + + const settings = new InstanceSettings(); + + const [instanceType, nanoid] = settings.hostId.split('-'); + expect(instanceType).toEqual('main'); + expect(nanoid).toHaveLength(16); // e.g. sDX6ZPc0bozv66zM + }); + }); }); diff --git a/packages/core/test/ScheduledTaskManager.test.ts b/packages/core/test/ScheduledTaskManager.test.ts index 3ff8837ca9..5166240856 100644 --- a/packages/core/test/ScheduledTaskManager.test.ts +++ b/packages/core/test/ScheduledTaskManager.test.ts @@ -56,8 +56,13 @@ describe('ScheduledTaskManager', () => { scheduledTaskManager.registerCron(workflow, everyMinute, onTick); scheduledTaskManager.registerCron(workflow, everyMinute, onTick); scheduledTaskManager.registerCron(workflow, everyMinute, onTick); + + expect(scheduledTaskManager.cronJobs.get(workflow.id)?.length).toBe(3); + scheduledTaskManager.deregisterCrons(workflow.id); + expect(scheduledTaskManager.cronJobs.get(workflow.id)?.length).toBe(0); + expect(onTick).not.toHaveBeenCalled(); jest.advanceTimersByTime(10 * 60 * 1000); // 10 minutes expect(onTick).not.toHaveBeenCalled(); diff --git a/packages/design-system/package.json b/packages/design-system/package.json index 42f2f75111..36fcf31528 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "1.52.1", + "version": "1.53.0", "main": "src/main.ts", "import": "src/main.ts", "scripts": { diff --git a/packages/design-system/src/components/AskAssistantChat/__tests__/AskAssistantChat.spec.ts b/packages/design-system/src/components/AskAssistantChat/__tests__/AskAssistantChat.spec.ts index 8977df997a..b4b7d281a5 100644 --- a/packages/design-system/src/components/AskAssistantChat/__tests__/AskAssistantChat.spec.ts +++ b/packages/design-system/src/components/AskAssistantChat/__tests__/AskAssistantChat.spec.ts @@ -4,21 +4,26 @@ import { n8nHtml } from 'n8n-design-system/directives'; import AskAssistantChat from '../AskAssistantChat.vue'; +const stubs = ['n8n-avatar', 'n8n-button', 'n8n-icon', 'n8n-icon-button']; + describe('AskAssistantChat', () => { it('renders default placeholder chat correctly', () => { const { container } = render(AskAssistantChat, { props: { user: { firstName: 'Kobi', lastName: 'Dog' }, }, + global: { stubs }, }); expect(container).toMatchSnapshot(); }); + it('renders chat with messages correctly', () => { const { container } = render(AskAssistantChat, { global: { directives: { n8nHtml, }, + stubs, }, props: { user: { firstName: 'Kobi', lastName: 'Dog' }, @@ -92,12 +97,14 @@ describe('AskAssistantChat', () => { }); expect(container).toMatchSnapshot(); }); + it('renders streaming chat correctly', () => { const { container } = render(AskAssistantChat, { global: { directives: { n8nHtml, }, + stubs, }, props: { user: { firstName: 'Kobi', lastName: 'Dog' }, @@ -116,12 +123,14 @@ describe('AskAssistantChat', () => { }); expect(container).toMatchSnapshot(); }); + it('renders end of session chat correctly', () => { const { container } = render(AskAssistantChat, { global: { directives: { n8nHtml, }, + stubs, }, props: { user: { firstName: 'Kobi', lastName: 'Dog' }, @@ -146,12 +155,14 @@ describe('AskAssistantChat', () => { }); expect(container).toMatchSnapshot(); }); + it('renders message with code snippet', () => { const { container } = render(AskAssistantChat, { global: { directives: { n8nHtml, }, + stubs, }, props: { user: { firstName: 'Kobi', lastName: 'Dog' }, diff --git a/packages/design-system/src/components/AskAssistantChat/__tests__/__snapshots__/AskAssistantChat.spec.ts.snap b/packages/design-system/src/components/AskAssistantChat/__tests__/__snapshots__/AskAssistantChat.spec.ts.snap index b3cfeff42a..891c10abf6 100644 --- a/packages/design-system/src/components/AskAssistantChat/__tests__/__snapshots__/AskAssistantChat.spec.ts.snap +++ b/packages/design-system/src/components/AskAssistantChat/__tests__/__snapshots__/AskAssistantChat.spec.ts.snap @@ -64,7 +64,7 @@ exports[`AskAssistantChat > renders chat with messages correctly 1`] = ` class="back" data-test-id="close-chat-button" > - @@ -400,16 +400,14 @@ exports[`AskAssistantChat > renders chat with messages correctly 1`] = `
- - Replace my code - + />
@@ -422,7 +420,7 @@ exports[`AskAssistantChat > renders chat with messages correctly 1`] = `
- - - Replace my code - + />
@@ -708,22 +704,18 @@ Testing more code
- - Give me another solution - + />
- - All good - + />
@@ -742,7 +734,7 @@ Testing more code rows="1" wrap="hard" /> - renders default placeholder chat correctly 1`] = ` class="back" data-test-id="close-chat-button" > - @@ -848,6 +840,7 @@ exports[`AskAssistantChat > renders default placeholder chat correctly 1`] = ` For specific tasks, you’ll see the