diff --git a/CHANGELOG.md b/CHANGELOG.md index 219c7b1726..ab14dc462e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,102 @@ +# [1.63.0](https://github.com/n8n-io/n8n/compare/n8n@1.62.1...n8n@1.63.0) (2024-10-09) + + +### Bug Fixes + +* **Convert to File Node:** Convert to ICS start date defaults to now ([#11114](https://github.com/n8n-io/n8n/issues/11114)) ([1146c4e](https://github.com/n8n-io/n8n/commit/1146c4e98d8c85c15ac67fa1c3bfb731234531e3)) +* **core:** Allow loading nodes from multiple custom directories ([#11130](https://github.com/n8n-io/n8n/issues/11130)) ([1b84b0e](https://github.com/n8n-io/n8n/commit/1b84b0e5e7485d9f99d61a8ae3df49efadca0745)) +* **core:** Always set `startedAt` when executions start running ([#11098](https://github.com/n8n-io/n8n/issues/11098)) ([722f4a8](https://github.com/n8n-io/n8n/commit/722f4a8b771058800b992a482ad5f644b650960d)) +* **core:** Fix AI nodes not working with new partial execution flow ([#11055](https://github.com/n8n-io/n8n/issues/11055)) ([0eee5df](https://github.com/n8n-io/n8n/commit/0eee5dfd597817819dbe0463a63f671fde53432f)) +* **core:** Print errors that happen before the execution starts on the worker instead of just on the main instance ([#11099](https://github.com/n8n-io/n8n/issues/11099)) ([1d14557](https://github.com/n8n-io/n8n/commit/1d145574611661ecd9ab1a39d815c0ea915b9a1c)) +* **core:** Separate error handlers for main and worker ([#11091](https://github.com/n8n-io/n8n/issues/11091)) ([bb59cc7](https://github.com/n8n-io/n8n/commit/bb59cc71acc9e494e54abc8402d58db39e5a664e)) +* **editor:** Shorten overflowing Node Label in InputLabels on hover and focus ([#11110](https://github.com/n8n-io/n8n/issues/11110)) ([87a0b68](https://github.com/n8n-io/n8n/commit/87a0b68f9009c1c776d937c6ca62096e88c95ed6)) +* **editor:** Add safety to prevent undefined errors ([#11104](https://github.com/n8n-io/n8n/issues/11104)) ([565b117](https://github.com/n8n-io/n8n/commit/565b117a52f8eac9202a1a62c43daf78b293dcf8)) +* **editor:** Fix design system form element sizing ([#11040](https://github.com/n8n-io/n8n/issues/11040)) ([67c3453](https://github.com/n8n-io/n8n/commit/67c3453885bc619fedc8338a6dd0d8d66dead931)) +* **editor:** Fix getInitials when Intl.Segmenter is not supported ([#11103](https://github.com/n8n-io/n8n/issues/11103)) ([7e8955b](https://github.com/n8n-io/n8n/commit/7e8955b322b1d2c84c0f479a5977484d8d5e3135)) +* **editor:** Fix schema view in AI tools ([#11089](https://github.com/n8n-io/n8n/issues/11089)) ([09cfdbd](https://github.com/n8n-io/n8n/commit/09cfdbd1817eba46c935308880fe9f95ded252b0)) +* **editor:** Respect tag querystring filter when listing workflows ([#11029](https://github.com/n8n-io/n8n/issues/11029)) ([59c5ff6](https://github.com/n8n-io/n8n/commit/59c5ff61354302562ba5a2340c66811afdd1523b)) +* **editor:** Show previous nodes autocomplete in AI tool nodes ([#11111](https://github.com/n8n-io/n8n/issues/11111)) ([8566b3a](https://github.com/n8n-io/n8n/commit/8566b3a99939f45ac263830eee30d0d4ade9305c)) +* **editor:** Update Usage page for Community+ edition ([#11074](https://github.com/n8n-io/n8n/issues/11074)) ([3974981](https://github.com/n8n-io/n8n/commit/3974981ea5c67f6f2bbb90a96b405d9d0cfa21af)) +* Fix transaction handling for 'revert' command ([#11145](https://github.com/n8n-io/n8n/issues/11145)) ([a782336](https://github.com/n8n-io/n8n/commit/a7823367f13c3dba0c339eaafaad0199bd524b13)) +* Forbid access to files outside source control work directory ([#11152](https://github.com/n8n-io/n8n/issues/11152)) ([606eedb](https://github.com/n8n-io/n8n/commit/606eedbf1b302e153bd13b7cef80847711e3a9ee)) +* **Gitlab Node:** Author name and email not being set ([#11077](https://github.com/n8n-io/n8n/issues/11077)) ([fce1233](https://github.com/n8n-io/n8n/commit/fce1233b58624d502c9c68f4b32a4bb7d76f1814)) +* Incorrect error message on calling wrong webhook method ([#11093](https://github.com/n8n-io/n8n/issues/11093)) ([d974b01](https://github.com/n8n-io/n8n/commit/d974b015d030c608158ff0c3fa3b7f4cbb8eadd3)) +* **n8n Form Trigger Node:** When clicking on a multiple choice label, the wrong one is selected ([#11059](https://github.com/n8n-io/n8n/issues/11059)) ([948edd1](https://github.com/n8n-io/n8n/commit/948edd1a047cf3dbddb3b0e9ec5de4bac3e97b9f)) +* **NASA Node:** Astronomy-Picture-Of-The-Day fails when it's YouTube video ([#11046](https://github.com/n8n-io/n8n/issues/11046)) ([c70969d](https://github.com/n8n-io/n8n/commit/c70969da2bcabeb33394073a69ccef208311461b)) +* **Postgres PGVector Store Node:** Fix filtering in retriever mode ([#11075](https://github.com/n8n-io/n8n/issues/11075)) ([dbd2ae1](https://github.com/n8n-io/n8n/commit/dbd2ae199506a24c2df4c983111a56f2adf63eee)) +* Show result of waiting execution on canvas after execution complete ([#10815](https://github.com/n8n-io/n8n/issues/10815)) ([90b4bfc](https://github.com/n8n-io/n8n/commit/90b4bfc472ef132d2280b175ae7410dfb8e549b2)) +* **Slack Node:** User id not sent correctly to API when updating user profile ([#11153](https://github.com/n8n-io/n8n/issues/11153)) ([ed9e61c](https://github.com/n8n-io/n8n/commit/ed9e61c46055d8e636a70c9c175d7d4ba596dd48)) + + +### Features + +* **core:** Introduce scoped logging ([#11127](https://github.com/n8n-io/n8n/issues/11127)) ([c68782c](https://github.com/n8n-io/n8n/commit/c68782c633b7ef6253ea705c5a222d4536491fd5)) +* **editor:** Add navigation dropdown component ([#11047](https://github.com/n8n-io/n8n/issues/11047)) ([e081fd1](https://github.com/n8n-io/n8n/commit/e081fd1f0b5a0700017a8dc92f013f0abdbad319)) +* **editor:** Add route for create / edit / share credentials ([#11134](https://github.com/n8n-io/n8n/issues/11134)) ([5697de4](https://github.com/n8n-io/n8n/commit/5697de4429c5d94f25ce1bd14c84fb4266ea47a7)) +* **editor:** Community+ enrollment ([#10776](https://github.com/n8n-io/n8n/issues/10776)) ([92cf860](https://github.com/n8n-io/n8n/commit/92cf860f9f2994442facfddc758bc60f5cbec520)) +* Human in the loop ([#10675](https://github.com/n8n-io/n8n/issues/10675)) ([41228b4](https://github.com/n8n-io/n8n/commit/41228b472de11affc8cd0821284427c2c9e8b421)) +* **OpenAI Node:** Allow to specify thread ID for Assistant -> Message operation ([#11080](https://github.com/n8n-io/n8n/issues/11080)) ([6a2f9e7](https://github.com/n8n-io/n8n/commit/6a2f9e72959fb0e89006b69c31fbcee1ead1cde9)) +* Opt in to additional features on community for existing users ([#11166](https://github.com/n8n-io/n8n/issues/11166)) ([c2adfc8](https://github.com/n8n-io/n8n/commit/c2adfc85451c5103eaad068f882066fd36c4aebe)) + + +### Performance Improvements + +* **core:** Optimize worker healthchecks ([#11092](https://github.com/n8n-io/n8n/issues/11092)) ([19fb728](https://github.com/n8n-io/n8n/commit/19fb728da0839c57603e55da4e407715e6c5b081)) + + + +## [1.62.1](https://github.com/n8n-io/n8n/compare/n8n@1.61.0...n8n@1.62.1) (2024-10-02) + + +### Bug Fixes + +* **AI Agent Node:** Fix output parsing and empty tool input handling in AI Agent node ([#10970](https://github.com/n8n-io/n8n/issues/10970)) ([3a65bdc](https://github.com/n8n-io/n8n/commit/3a65bdc1f522932d463b4da0e67d29076887d06c)) +* **API:** Fix workflow project transfer ([#10651](https://github.com/n8n-io/n8n/issues/10651)) ([5f89e3a](https://github.com/n8n-io/n8n/commit/5f89e3a01c1bbb3589ff0464fd5bc991426f55dc)) +* **AwsS3 Node:** Fix search only using first input parameters ([#10998](https://github.com/n8n-io/n8n/issues/10998)) ([846cfde](https://github.com/n8n-io/n8n/commit/846cfde8dcaf7bf80f0a4ca7d65fc2a7b61d0e23)) +* **Chat Trigger Node:** Fix Allowed Origins paramter ([#11011](https://github.com/n8n-io/n8n/issues/11011)) ([b5f4afe](https://github.com/n8n-io/n8n/commit/b5f4afe12ec77f527080a4b7f812e12f9f73f8df)) +* **core:** Fix ownerless project case in statistics service ([#11051](https://github.com/n8n-io/n8n/issues/11051)) ([bdaadf1](https://github.com/n8n-io/n8n/commit/bdaadf10e058e2c0b1141289189d6526c030a2ca)) +* **core:** Handle Redis disconnects gracefully ([#11007](https://github.com/n8n-io/n8n/issues/11007)) ([cd91648](https://github.com/n8n-io/n8n/commit/cd916480c2d2b55f2215c72309dc432340fc3f30)) +* **core:** Prevent backend from loading duplicate copies of nodes packages ([#10979](https://github.com/n8n-io/n8n/issues/10979)) ([4584f22](https://github.com/n8n-io/n8n/commit/4584f22a9b16883779d8555cda309fd8bd113f6c)) +* **core:** Upgrade @n8n/typeorm to address a rare mutex release issue ([#10993](https://github.com/n8n-io/n8n/issues/10993)) ([2af0fbf](https://github.com/n8n-io/n8n/commit/2af0fbf52f0b404697f5148f81ad0035c9ffb6b9)) +* **editor:** Allow resources to move between personal and team projects ([#10683](https://github.com/n8n-io/n8n/issues/10683)) ([136d491](https://github.com/n8n-io/n8n/commit/136d49132567558b7d27069c857c0e0bfee70ce2)) +* **editor:** Color scheme for a markdown code blocks in dark mode ([#11008](https://github.com/n8n-io/n8n/issues/11008)) ([b20d2eb](https://github.com/n8n-io/n8n/commit/b20d2eb403f71fe1dc21c92df118adcebef51ffe)) +* **editor:** Fix filter execution by "Queued" ([#10987](https://github.com/n8n-io/n8n/issues/10987)) ([819d20f](https://github.com/n8n-io/n8n/commit/819d20fa2eee314b88a7ce1c4db632afac514704)) +* **editor:** Fix performance issue in credentials list ([#10988](https://github.com/n8n-io/n8n/issues/10988)) ([7073ec6](https://github.com/n8n-io/n8n/commit/7073ec6fe5384cc8c50dcb242212999a1fbc9041)) +* **editor:** Fix schema view pill highlighting ([#10936](https://github.com/n8n-io/n8n/issues/10936)) ([1b973dc](https://github.com/n8n-io/n8n/commit/1b973dcd8dbce598e6ada490fd48fad52f7b4f3a)) +* **editor:** Fix workflow executions list page redirection ([#10981](https://github.com/n8n-io/n8n/issues/10981)) ([fe7d060](https://github.com/n8n-io/n8n/commit/fe7d0605681dc963f5e5d1607f9d40c5173e0f9f)) +* **editor:** Format action names properly when action is not defined ([#11030](https://github.com/n8n-io/n8n/issues/11030)) ([9c43fb3](https://github.com/n8n-io/n8n/commit/9c43fb301d1ccb82e42f46833e19587289803cd3)) +* **Elasticsearch Node:** Fix issue with self signed certificates not working ([#10954](https://github.com/n8n-io/n8n/issues/10954)) ([79622b5](https://github.com/n8n-io/n8n/commit/79622b5f267f2a4a53f3eb48e228939d6e3a9caa)) +* **Facebook Lead Ads Trigger Node:** Pagination fix in RLC ([#10956](https://github.com/n8n-io/n8n/issues/10956)) ([6322372](https://github.com/n8n-io/n8n/commit/632237261087ada0177b67922f9f48ca02ef1d9e)) +* **Github Document Loader Node:** Pass through apiUrl from credentials & fix log output ([#11049](https://github.com/n8n-io/n8n/issues/11049)) ([a7af981](https://github.com/n8n-io/n8n/commit/a7af98183c47a5e215869c8269729b0fb2f318b5)) +* **Google Sheets Node:** Updating on row_number using automatic matching ([#10940](https://github.com/n8n-io/n8n/issues/10940)) ([ed91495](https://github.com/n8n-io/n8n/commit/ed91495ebc1e09b89533ffef4b775eaa0139f365)) +* **HTTP Request Tool Node:** Remove default user agent header ([#10971](https://github.com/n8n-io/n8n/issues/10971)) ([5a99e93](https://github.com/n8n-io/n8n/commit/5a99e93f8d2c66d7dbcef382478badd63bc4a0b5)) +* **Postgres Node:** Falsy query parameters ignored ([#10960](https://github.com/n8n-io/n8n/issues/10960)) ([4a63cff](https://github.com/n8n-io/n8n/commit/4a63cff5ec722c810e3ff2bd7b0bb1e32f7f403b)) +* **Respond to Webhook Node:** Node does not work with Wait node ([#10992](https://github.com/n8n-io/n8n/issues/10992)) ([2df5a5b](https://github.com/n8n-io/n8n/commit/2df5a5b649f8ba3b747782d6d5045820aa74955d)) +* **RSS Feed Trigger Node:** Fix regression on missing timestamps ([#10991](https://github.com/n8n-io/n8n/issues/10991)) ([d2bc076](https://github.com/n8n-io/n8n/commit/d2bc0760e2b5c977fcc683f0a0281f099a9c538d)) +* **Supabase Node:** Fix issue with delete not always working ([#10952](https://github.com/n8n-io/n8n/issues/10952)) ([1944b46](https://github.com/n8n-io/n8n/commit/1944b46fd472bb59552b5fbf7783168a622a2bd2)) +* **Text Classifier Node:** Default system prompt template ([#11018](https://github.com/n8n-io/n8n/issues/11018)) ([77fec19](https://github.com/n8n-io/n8n/commit/77fec195d92e0fe23c60552a72e8c030cf7e5e5c)) +* **Todoist Node:** Fix listSearch filter bug in Todoist Node ([#10989](https://github.com/n8n-io/n8n/issues/10989)) ([c4b3272](https://github.com/n8n-io/n8n/commit/c4b327248d7aa1352e8d6acec5627ff406aea3d4)) +* **Todoist Node:** Make Section Name optional in Move Task operation ([#10732](https://github.com/n8n-io/n8n/issues/10732)) ([799006a](https://github.com/n8n-io/n8n/commit/799006a3cce6abe210469c839ae392d0c1aec486)) + + +### Features + +* Add more context to support chat ([#11014](https://github.com/n8n-io/n8n/issues/11014)) ([8a30f92](https://github.com/n8n-io/n8n/commit/8a30f92156d6a4fe73113bd3cdfb751b8c9ce4b4)) +* Add Sysdig API credentials for SecOps ([#7033](https://github.com/n8n-io/n8n/issues/7033)) ([a8d1a1e](https://github.com/n8n-io/n8n/commit/a8d1a1ea854fb2c69643b0a5738440b389121ca3)) +* **core:** Filter executions by project ID in internal API ([#10976](https://github.com/n8n-io/n8n/issues/10976)) ([06d749f](https://github.com/n8n-io/n8n/commit/06d749ffa7ced503141d8b07e22c47d971eb1623)) +* **core:** Implement Dynamic Parameters within regular nodes used as AI Tools ([#10862](https://github.com/n8n-io/n8n/issues/10862)) ([ef5b7cf](https://github.com/n8n-io/n8n/commit/ef5b7cf9b77b653111eb5b1d9de8116c9f6b9f92)) +* **editor:** Do not show error for remote options when credentials aren't specified ([#10944](https://github.com/n8n-io/n8n/issues/10944)) ([9fc3699](https://github.com/n8n-io/n8n/commit/9fc3699beb0c150909889ed17740a5cd9e0461c3)) +* **editor:** Enable drag and drop in code editors (Code/SQL/HTML) ([#10888](https://github.com/n8n-io/n8n/issues/10888)) ([af9e227](https://github.com/n8n-io/n8n/commit/af9e227ad4848995b9d82c72f814dbf9d1de506f)) +* **editor:** Overhaul document title management ([#10999](https://github.com/n8n-io/n8n/issues/10999)) ([bb28956](https://github.com/n8n-io/n8n/commit/bb2895689fb006897bc244271aca6f0bfa1839b9)) +* **editor:** Remove execution annotation feature flag ([#11020](https://github.com/n8n-io/n8n/issues/11020)) ([e7199db](https://github.com/n8n-io/n8n/commit/e7199dbfccdbdf1c4273f916e3006ca610c230e9)) +* **editor:** Support node-creator actions for vector store nodes ([#11032](https://github.com/n8n-io/n8n/issues/11032)) ([72b70d9](https://github.com/n8n-io/n8n/commit/72b70d9d98daeba654baf6785ff1ae234c73c977)) +* **Google BigQuery Node:** Return numeric values as integers ([#10943](https://github.com/n8n-io/n8n/issues/10943)) ([d7c1d24](https://github.com/n8n-io/n8n/commit/d7c1d24f74648740b2f425640909037ba06c5030)) +* **Invoice Ninja Node:** Add more query params to getAll requests ([#9238](https://github.com/n8n-io/n8n/issues/9238)) ([50b7238](https://github.com/n8n-io/n8n/commit/50b723836e70bbe405594f690b73057f9c33fbe4)) +* **Iterable Node:** Add support for EDC and USDC selection ([#10908](https://github.com/n8n-io/n8n/issues/10908)) ([0ca9c07](https://github.com/n8n-io/n8n/commit/0ca9c076ca51d313392e45c3b013f2e83aaea843)) +* **Question and Answer Chain Node:** Customize question and answer system prompt ([#10385](https://github.com/n8n-io/n8n/issues/10385)) ([08a27b3](https://github.com/n8n-io/n8n/commit/08a27b3148aac2282f64339ddc33ac7c90835d84)) + + + # [1.61.0](https://github.com/n8n-io/n8n/compare/n8n@1.60.0...n8n@1.61.0) (2024-09-25) diff --git a/cypress/composables/ndv.ts b/cypress/composables/ndv.ts index c3fab73f8c..5b3690e6a6 100644 --- a/cypress/composables/ndv.ts +++ b/cypress/composables/ndv.ts @@ -59,7 +59,7 @@ export function setCredentialByName(name: string) { export function clickCreateNewCredential() { openCredentialSelect(); - getCreateNewCredentialOption().click(); + getCreateNewCredentialOption().click({ force: true }); } export function clickGetBackToCanvas() { diff --git a/cypress/composables/projects.ts b/cypress/composables/projects.ts index 84379088d1..da9c6fcc65 100644 --- a/cypress/composables/projects.ts +++ b/cypress/composables/projects.ts @@ -32,8 +32,6 @@ export const addProjectMember = (email: string, role?: string) => { } }; export const getResourceMoveModal = () => cy.getByTestId('project-move-resource-modal'); -export const getResourceMoveConfirmModal = () => - cy.getByTestId('project-move-resource-confirm-modal'); export const getProjectMoveSelect = () => cy.getByTestId('project-move-resource-modal-select'); export function createProject(name: string) { diff --git a/cypress/composables/workflow.ts b/cypress/composables/workflow.ts index 8d37d5f2ad..394a35af18 100644 --- a/cypress/composables/workflow.ts +++ b/cypress/composables/workflow.ts @@ -144,6 +144,12 @@ export function addToolNodeToParent(nodeName: string, parentNodeName: string) { export function addOutputParserNodeToParent(nodeName: string, parentNodeName: string) { addSupplementalNodeToParent(nodeName, 'ai_outputParser', parentNodeName); } +export function addVectorStoreNodeToParent(nodeName: string, parentNodeName: string) { + addSupplementalNodeToParent(nodeName, 'ai_vectorStore', parentNodeName); +} +export function addRetrieverNodeToParent(nodeName: string, parentNodeName: string) { + addSupplementalNodeToParent(nodeName, 'ai_retriever', parentNodeName); +} export function clickExecuteWorkflowButton() { getExecuteWorkflowButton().click(); diff --git a/cypress/e2e/1-workflows.cy.ts b/cypress/e2e/1-workflows.cy.ts index 6835346012..a6683bbee4 100644 --- a/cypress/e2e/1-workflows.cy.ts +++ b/cypress/e2e/1-workflows.cy.ts @@ -73,4 +73,28 @@ describe('Workflows', () => { WorkflowsPage.getters.newWorkflowButtonCard().should('be.visible'); }); + + it('should respect tag querystring filter when listing workflows', () => { + WorkflowsPage.getters.newWorkflowButtonCard().click(); + + cy.createFixtureWorkflow('Test_workflow_2.json', getUniqueWorkflowName('My New Workflow')); + + cy.visit(WorkflowsPage.url); + + WorkflowsPage.getters.createWorkflowButton().click(); + + cy.createFixtureWorkflow('Test_workflow_1.json', 'Empty State Card Workflow'); + + cy.visit(WorkflowsPage.url); + + WorkflowsPage.getters.workflowFilterButton().click(); + + WorkflowsPage.getters.workflowTagsDropdown().click(); + + WorkflowsPage.getters.workflowTagItem('some-tag-1').click(); + + cy.reload(); + + WorkflowsPage.getters.workflowCards().should('have.length', 1); + }); }); diff --git a/cypress/e2e/20-workflow-executions.cy.ts b/cypress/e2e/20-workflow-executions.cy.ts index 19256f3bf9..5788af171c 100644 --- a/cypress/e2e/20-workflow-executions.cy.ts +++ b/cypress/e2e/20-workflow-executions.cy.ts @@ -229,6 +229,35 @@ describe('Workflow Executions', () => { cy.getByTestId('executions-filter-reset-button').should('be.visible').click(); executionsTab.getters.executionListItems().eq(11).should('be.visible'); }); + + it('should redirect back to editor after seeing a couple of execution using browser back button', () => { + createMockExecutions(); + cy.intercept('GET', '/rest/executions?filter=*').as('getExecutions'); + + executionsTab.actions.switchToExecutionsTab(); + + cy.wait(['@getExecutions']); + executionsTab.getters.workflowExecutionPreviewIframe().should('exist'); + + executionsTab.getters.executionListItems().eq(2).click(); + executionsTab.getters.workflowExecutionPreviewIframe().should('exist'); + executionsTab.getters.executionListItems().eq(4).click(); + executionsTab.getters.workflowExecutionPreviewIframe().should('exist'); + executionsTab.getters.executionListItems().eq(6).click(); + executionsTab.getters.workflowExecutionPreviewIframe().should('exist'); + + cy.go('back'); + executionsTab.getters.workflowExecutionPreviewIframe().should('exist'); + cy.go('back'); + executionsTab.getters.workflowExecutionPreviewIframe().should('exist'); + cy.go('back'); + executionsTab.getters.workflowExecutionPreviewIframe().should('exist'); + cy.go('back'); + + cy.url().should('not.include', '/executions'); + cy.url().should('include', '/workflow/'); + workflowPage.getters.nodeViewRoot().should('be.visible'); + }); }); describe('when new workflow is not saved', () => { diff --git a/cypress/e2e/26-resource-locator.cy.ts b/cypress/e2e/26-resource-locator.cy.ts index 6e431690ad..124e322c2b 100644 --- a/cypress/e2e/26-resource-locator.cy.ts +++ b/cypress/e2e/26-resource-locator.cy.ts @@ -65,7 +65,7 @@ describe('Resource Locator', () => { }); it('should show appropriate errors when search filter is required', () => { - workflowPage.actions.addNodeToCanvas('Github', true, true, 'On Pull Request'); + workflowPage.actions.addNodeToCanvas('Github', true, true, 'On pull request'); ndv.getters.resourceLocator('owner').should('be.visible'); ndv.getters.resourceLocatorInput('owner').click(); ndv.getters.resourceLocatorErrorMessage().should('contain', NO_CREDENTIALS_MESSAGE); diff --git a/cypress/e2e/39-projects.cy.ts b/cypress/e2e/39-projects.cy.ts index 59ed6bcb84..4e3bb583df 100644 --- a/cypress/e2e/39-projects.cy.ts +++ b/cypress/e2e/39-projects.cy.ts @@ -1,5 +1,11 @@ import * as projects from '../composables/projects'; -import { INSTANCE_MEMBERS, MANUAL_TRIGGER_NODE_NAME, NOTION_NODE_NAME } from '../constants'; +import { + INSTANCE_ADMIN, + INSTANCE_MEMBERS, + INSTANCE_OWNER, + MANUAL_TRIGGER_NODE_NAME, + NOTION_NODE_NAME, +} from '../constants'; import { WorkflowsPage, WorkflowPage, @@ -481,44 +487,15 @@ describe('Projects', { disableAutoLogin: true }, () => { projects .getResourceMoveModal() .should('be.visible') - .find('button:contains("Next")') + .find('button:contains("Move workflow")') .should('be.disabled'); projects.getProjectMoveSelect().click(); getVisibleSelect() .find('li') - .should('have.length', 2) - .first() - .should('contain.text', 'Project 1') - .click(); - projects.getResourceMoveModal().find('button:contains("Next")').click(); - - projects - .getResourceMoveConfirmModal() - .should('be.visible') - .find('button:contains("Confirm")') - .should('be.disabled'); - - projects - .getResourceMoveConfirmModal() - .find('input[type="checkbox"]') - .first() - .parents('label') - .click(); - projects - .getResourceMoveConfirmModal() - .find('button:contains("Confirm")') - .should('be.disabled'); - projects - .getResourceMoveConfirmModal() - .find('input[type="checkbox"]') - .last() - .parents('label') - .click(); - projects - .getResourceMoveConfirmModal() - .find('button:contains("Confirm")') - .should('not.be.disabled') + .should('have.length', 5) + .filter(':contains("Project 1")') .click(); + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); workflowsPage.getters .workflowCards() @@ -526,9 +503,77 @@ describe('Projects', { disableAutoLogin: true }, () => { .filter(':contains("Owned by me")') .should('not.exist'); - // Move the credential from Project 1 to Project 2 + // Move the workflow from Project 1 to Project 2 projects.getMenuItems().first().click(); workflowsPage.getters.workflowCards().should('have.length', 2); + workflowsPage.getters.workflowCardActions('Workflow in Home project').click(); + workflowsPage.getters.workflowMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move workflow")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(':contains("Project 2")') + .click(); + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); + + // Move the workflow from Project 2 to a member user + projects.getMenuItems().last().click(); + workflowsPage.getters.workflowCards().should('have.length', 2); + workflowsPage.getters.workflowCardActions('Workflow in Home project').click(); + workflowsPage.getters.workflowMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move workflow")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(`:contains("${INSTANCE_MEMBERS[0].email}")`) + .click(); + + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); + workflowsPage.getters.workflowCards().should('have.length', 1); + + // Move the workflow from member user back to Home + projects.getHomeButton().click(); + workflowsPage.getters + .workflowCards() + .should('have.length', 3) + .filter(':has(.n8n-badge:contains("Project"))') + .should('have.length', 2); + workflowsPage.getters.workflowCardActions('Workflow in Home project').click(); + workflowsPage.getters.workflowMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move workflow")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(`:contains("${INSTANCE_OWNER.email}")`) + .click(); + + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); + workflowsPage.getters + .workflowCards() + .should('have.length', 3) + .filter(':contains("Owned by me")') + .should('have.length', 1); + + // Move the credential from Project 1 to Project 2 + projects.getMenuItems().first().click(); projects.getProjectTabCredentials().click(); credentialsPage.getters.credentialCards().should('have.length', 1); credentialsPage.getters.credentialCardActions('Credential in Project 1').click(); @@ -537,48 +582,162 @@ describe('Projects', { disableAutoLogin: true }, () => { projects .getResourceMoveModal() .should('be.visible') - .find('button:contains("Next")') + .find('button:contains("Move credential")') .should('be.disabled'); projects.getProjectMoveSelect().click(); getVisibleSelect() .find('li') - .should('have.length', 1) - .first() - .should('contain.text', 'Project 2') + .should('have.length', 5) + .filter(':contains("Project 2")') .click(); - projects.getResourceMoveModal().find('button:contains("Next")').click(); + projects.getResourceMoveModal().find('button:contains("Move credential")').click(); - projects - .getResourceMoveConfirmModal() - .should('be.visible') - .find('button:contains("Confirm")') - .should('be.disabled'); - - projects - .getResourceMoveConfirmModal() - .find('input[type="checkbox"]') - .first() - .parents('label') - .click(); - projects - .getResourceMoveConfirmModal() - .find('button:contains("Confirm")') - .should('be.disabled'); - projects - .getResourceMoveConfirmModal() - .find('input[type="checkbox"]') - .last() - .parents('label') - .click(); - projects - .getResourceMoveConfirmModal() - .find('button:contains("Confirm")') - .should('not.be.disabled') - .click(); credentialsPage.getters.credentialCards().should('not.have.length'); + + // Move the credential from Project 2 to admin user projects.getMenuItems().last().click(); projects.getProjectTabCredentials().click(); credentialsPage.getters.credentialCards().should('have.length', 2); + + credentialsPage.getters.credentialCardActions('Credential in Project 1').click(); + credentialsPage.getters.credentialMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move credential")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(`:contains("${INSTANCE_ADMIN.email}")`) + .click(); + projects.getResourceMoveModal().find('button:contains("Move credential")').click(); + credentialsPage.getters.credentialCards().should('have.length', 1); + + // Move the credential from admin user back to instance owner + projects.getHomeButton().click(); + projects.getProjectTabCredentials().click(); + credentialsPage.getters.credentialCards().should('have.length', 3); + + credentialsPage.getters.credentialCardActions('Credential in Project 1').click(); + credentialsPage.getters.credentialMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move credential")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(`:contains("${INSTANCE_OWNER.email}")`) + .click(); + projects.getResourceMoveModal().find('button:contains("Move credential")').click(); + + credentialsPage.getters + .credentialCards() + .should('have.length', 3) + .filter(':contains("Owned by me")') + .should('have.length', 2); + + // Move the credential from admin user back to its original project (Project 1) + credentialsPage.getters.credentialCardActions('Credential in Project 1').click(); + credentialsPage.getters.credentialMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move credential")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(':contains("Project 1")') + .click(); + projects.getResourceMoveModal().find('button:contains("Move credential")').click(); + + projects.getMenuItems().first().click(); + projects.getProjectTabCredentials().click(); + credentialsPage.getters + .credentialCards() + .filter(':contains("Credential in Project 1")') + .should('have.length', 1); + }); + + it('should allow to change inaccessible credential when the workflow was moved to a team project', () => { + cy.signinAsOwner(); + cy.visit(workflowsPage.url); + + // Create a credential in the Home project + projects.getProjectTabCredentials().should('be.visible').click(); + credentialsPage.getters.emptyListCreateCredentialButton().click(); + projects.createCredential('Credential in Home project'); + + // Create a workflow in the Home project + projects.getHomeButton().click(); + workflowsPage.getters.workflowCards().should('not.have.length'); + workflowsPage.getters.newWorkflowButtonCard().click(); + workflowsPage.getters.workflowCards().should('not.have.length'); + + workflowsPage.getters.newWorkflowButtonCard().click(); + workflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME, true, true); + ndv.getters.backToCanvas().click(); + workflowPage.actions.saveWorkflowOnButtonClick(); + + // Create a project and add a user to it + projects.createProject('Project 1'); + projects.addProjectMember(INSTANCE_MEMBERS[0].email); + projects.getProjectSettingsSaveButton().click(); + + // Move the workflow from Home to Project 1 + projects.getHomeButton().click(); + workflowsPage.getters + .workflowCards() + .should('have.length', 1) + .filter(':contains("Owned by me")') + .should('exist'); + workflowsPage.getters.workflowCardActions('My workflow').click(); + workflowsPage.getters.workflowMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move workflow")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 4) + .filter(':contains("Project 1")') + .click(); + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); + + workflowsPage.getters + .workflowCards() + .should('have.length', 1) + .filter(':contains("Owned by me")') + .should('not.exist'); + + //Log out with instance owner and log in with the member user + mainSidebar.actions.openUserMenu(); + cy.getByTestId('user-menu-item-logout').click(); + + cy.get('input[name="email"]').type(INSTANCE_MEMBERS[0].email); + cy.get('input[name="password"]').type(INSTANCE_MEMBERS[0].password); + cy.getByTestId('form-submit-button').click(); + + // Open the moved workflow + workflowsPage.getters.workflowCards().should('have.length', 1); + workflowsPage.getters.workflowCards().first().click(); + + // Check if the credential can be changed + workflowPage.getters.canvasNodeByName(NOTION_NODE_NAME).should('be.visible').dblclick(); + ndv.getters.credentialInput().find('input').should('be.enabled'); }); it('should handle viewer role', () => { diff --git a/cypress/e2e/4-node-creator.cy.ts b/cypress/e2e/4-node-creator.cy.ts index 9dfe128322..a2cd5968d1 100644 --- a/cypress/e2e/4-node-creator.cy.ts +++ b/cypress/e2e/4-node-creator.cy.ts @@ -1,3 +1,9 @@ +import { + addNodeToCanvas, + addRetrieverNodeToParent, + addVectorStoreNodeToParent, + getNodeCreatorItems, +} from '../composables/workflow'; import { IF_NODE_NAME } from '../constants'; import { NodeCreator } from '../pages/features/node-creator'; import { NDV } from '../pages/ndv'; @@ -504,4 +510,38 @@ describe('Node Creator', () => { nodeCreatorFeature.getters.searchBar().find('input').clear().type('gith'); nodeCreatorFeature.getters.nodeItemName().first().should('have.text', 'GitHub'); }); + + it('should show vector stores actions', () => { + const actions = [ + 'Get ranked documents from vector store', + 'Add documents to vector store', + 'Retrieve documents for AI processing', + ]; + + nodeCreatorFeature.actions.openNodeCreator(); + + nodeCreatorFeature.getters.searchBar().find('input').clear().type('Vector Store'); + + getNodeCreatorItems().then((items) => { + const vectorStores = items.map((_i, el) => el.innerText); + + // Loop over all vector stores and check if they have the three actions + vectorStores.each((_i, vectorStore) => { + nodeCreatorFeature.getters.getCreatorItem(vectorStore).click(); + actions.forEach((action) => { + nodeCreatorFeature.getters.getCreatorItem(action).should('be.visible'); + }); + cy.realPress('ArrowLeft'); + }); + }); + }); + + it('should add node directly for sub-connection', () => { + addNodeToCanvas('Question and Answer Chain', true); + addRetrieverNodeToParent('Vector Store Retriever', 'Question and Answer Chain'); + cy.realPress('Escape'); + addVectorStoreNodeToParent('In-Memory Vector Store', 'Vector Store Retriever'); + cy.realPress('Escape'); + WorkflowPage.getters.canvasNodes().should('have.length', 4); + }); }); diff --git a/cypress/e2e/45-ai-assistant.cy.ts b/cypress/e2e/45-ai-assistant.cy.ts index 6c69a97708..4fc1407bb9 100644 --- a/cypress/e2e/45-ai-assistant.cy.ts +++ b/cypress/e2e/45-ai-assistant.cy.ts @@ -78,11 +78,11 @@ describe('AI Assistant::enabled', () => { }); it('should start chat session from node error view', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Stop and Error'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -96,11 +96,11 @@ describe('AI Assistant::enabled', () => { }); it('should render chat input correctly', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Stop and Error'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -129,11 +129,11 @@ describe('AI Assistant::enabled', () => { }); it('should render and handle quick replies', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/quick_reply_message_response.json', + fixture: 'aiAssistant/responses/quick_reply_message_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Stop and Error'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -146,18 +146,24 @@ describe('AI Assistant::enabled', () => { }); it('should show quick replies when node is executed after new suggestion', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', (req) => { + cy.intercept('POST', '/rest/ai/chat', (req) => { req.reply((res) => { if (['init-error-helper', 'message'].includes(req.body.payload.type)) { - res.send({ statusCode: 200, fixture: 'aiAssistant/simple_message_response.json' }); + res.send({ + statusCode: 200, + fixture: 'aiAssistant/responses/simple_message_response.json', + }); } else if (req.body.payload.type === 'event') { - res.send({ statusCode: 200, fixture: 'aiAssistant/node_execution_error_response.json' }); + res.send({ + statusCode: 200, + fixture: 'aiAssistant/responses/node_execution_error_response.json', + }); } else { res.send({ statusCode: 500 }); } }); }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Edit Fields'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -172,16 +178,15 @@ describe('AI Assistant::enabled', () => { aiAssistant.getters.quickReplies().should('not.exist'); ndv.getters.nodeExecuteButton().click(); // But after executing the node again, quick replies should be shown - aiAssistant.getters.chatMessagesAssistant().should('have.length', 4); aiAssistant.getters.quickReplies().should('have.length', 2); }); it('should warn before starting a new session', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Edit Fields'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click({ force: true }); @@ -204,15 +209,15 @@ describe('AI Assistant::enabled', () => { }); it('should apply code diff to code node', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/code_diff_suggestion_response.json', + fixture: 'aiAssistant/responses/code_diff_suggestion_response.json', }).as('chatRequest'); - cy.intercept('POST', '/rest/ai-assistant/chat/apply-suggestion', { + cy.intercept('POST', '/rest/ai/chat/apply-suggestion', { statusCode: 200, - fixture: 'aiAssistant/apply_code_diff_response.json', + fixture: 'aiAssistant/responses/apply_code_diff_response.json', }).as('applySuggestion'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Code'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click({ force: true }); @@ -254,11 +259,11 @@ describe('AI Assistant::enabled', () => { }); it('should end chat session when `end_session` event is received', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/end_session_response.json', + fixture: 'aiAssistant/responses/end_session_response.json', }).as('chatRequest'); - cy.createFixtureWorkflow('aiAssistant/test_workflow.json'); + cy.createFixtureWorkflow('aiAssistant/workflows/test_workflow.json'); wf.actions.openNode('Stop and Error'); ndv.getters.nodeExecuteButton().click(); aiAssistant.getters.nodeErrorViewAssistantButton().click(); @@ -268,12 +273,15 @@ describe('AI Assistant::enabled', () => { }); it('should reset session after it ended and sidebar is closed', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', (req) => { + cy.intercept('POST', '/rest/ai/chat', (req) => { req.reply((res) => { if (['init-support-chat'].includes(req.body.payload.type)) { - res.send({ statusCode: 200, fixture: 'aiAssistant/simple_message_response.json' }); + res.send({ + statusCode: 200, + fixture: 'aiAssistant/responses/simple_message_response.json', + }); } else { - res.send({ statusCode: 200, fixture: 'aiAssistant/end_session_response.json' }); + res.send({ statusCode: 200, fixture: 'aiAssistant/responses/end_session_response.json' }); } }); }).as('chatRequest'); @@ -296,9 +304,9 @@ describe('AI Assistant::enabled', () => { }); it('Should not reset assistant session when workflow is saved', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); wf.actions.addInitialNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); aiAssistant.actions.openChat(); @@ -321,9 +329,9 @@ describe('AI Assistant Credential Help', () => { }); it('should start credential help from node credential', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); wf.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); wf.actions.addNodeToCanvas(GMAIL_NODE_NAME); @@ -347,9 +355,9 @@ describe('AI Assistant Credential Help', () => { }); it('should start credential help from credential list', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/simple_message_response.json', + fixture: 'aiAssistant/responses/simple_message_response.json', }).as('chatRequest'); cy.visit(credentialsPage.url); @@ -446,9 +454,9 @@ describe('General help', () => { }); it('assistant returns code snippet', () => { - cy.intercept('POST', '/rest/ai-assistant/chat', { + cy.intercept('POST', '/rest/ai/chat', { statusCode: 200, - fixture: 'aiAssistant/code_snippet_response.json', + fixture: 'aiAssistant/responses/code_snippet_response.json', }).as('chatRequest'); aiAssistant.getters.askAssistantFloatingButton().should('be.visible'); @@ -492,4 +500,65 @@ describe('General help', () => { ); aiAssistant.getters.codeSnippet().should('have.text', '{{$json.body.city}}'); }); + + it('should send current context to support chat', () => { + cy.createFixtureWorkflow('aiAssistant/workflows/simple_http_request_workflow.json'); + cy.intercept('POST', '/rest/ai/chat', { + statusCode: 200, + fixture: 'aiAssistant/responses/simple_message_response.json', + }).as('chatRequest'); + + aiAssistant.getters.askAssistantFloatingButton().click(); + aiAssistant.actions.sendMessage('What is wrong with this workflow?'); + + cy.wait('@chatRequest').then((interception) => { + const { body } = interception.request; + // Body should contain the current workflow context + expect(body.payload).to.have.property('context'); + expect(body.payload.context).to.have.property('currentView'); + expect(body.payload.context.currentView.name).to.equal('NodeViewExisting'); + expect(body.payload.context).to.have.property('currentWorkflow'); + }); + }); + + it('should not send workflow context if nothing changed', () => { + cy.createFixtureWorkflow('aiAssistant/workflows/simple_http_request_workflow.json'); + cy.intercept('POST', '/rest/ai/chat', { + statusCode: 200, + fixture: 'aiAssistant/responses/simple_message_response.json', + }).as('chatRequest'); + + aiAssistant.getters.askAssistantFloatingButton().click(); + aiAssistant.actions.sendMessage('What is wrong with this workflow?'); + cy.wait('@chatRequest'); + + // Send another message without changing workflow or executing any node + aiAssistant.actions.sendMessage('And now?'); + + cy.wait('@chatRequest').then((interception) => { + const { body } = interception.request; + // Workflow context should be empty + expect(body.payload).to.have.property('context'); + expect(body.payload.context).not.to.have.property('currentWorkflow'); + }); + + // Update http request node url + wf.actions.openNode('HTTP Request'); + ndv.actions.typeIntoParameterInput('url', 'https://example.com'); + ndv.actions.close(); + // Also execute the workflow + wf.actions.executeWorkflow(); + + // Send another message + aiAssistant.actions.sendMessage('What about now?'); + cy.wait('@chatRequest').then((interception) => { + const { body } = interception.request; + // Both workflow and execution context should be sent + expect(body.payload).to.have.property('context'); + expect(body.payload.context).to.have.property('currentWorkflow'); + expect(body.payload.context.currentWorkflow).not.to.be.empty; + expect(body.payload.context).to.have.property('executionData'); + expect(body.payload.context.executionData).not.to.be.empty; + }); + }); }); diff --git a/cypress/e2e/5-ndv.cy.ts b/cypress/e2e/5-ndv.cy.ts index 4608b5eefc..a591d62895 100644 --- a/cypress/e2e/5-ndv.cy.ts +++ b/cypress/e2e/5-ndv.cy.ts @@ -674,6 +674,23 @@ describe('NDV', () => { ndv.getters.parameterInput('operation').find('input').should('have.value', 'Delete'); }); + it('Should show a notice when remote options cannot be fetched because of missing credentials', () => { + cy.intercept('POST', '/rest/dynamic-node-parameters/options', { statusCode: 403 }).as( + 'parameterOptions', + ); + + workflowPage.actions.addInitialNodeToCanvas(NOTION_NODE_NAME, { + keepNdvOpen: true, + action: 'Update a database page', + }); + + ndv.actions.addItemToFixedCollection('propertiesUi'); + ndv.getters + .parameterInput('key') + .find('input') + .should('have.value', 'Set up credential to see options'); + }); + it('Should show error state when remote options cannot be fetched', () => { cy.intercept('POST', '/rest/dynamic-node-parameters/options', { statusCode: 500 }).as( 'parameterOptions', @@ -684,6 +701,11 @@ describe('NDV', () => { action: 'Update a database page', }); + clickCreateNewCredential(); + setCredentialValues({ + apiKey: 'sk_test_123', + }); + ndv.actions.addItemToFixedCollection('propertiesUi'); ndv.getters .parameterInput('key') diff --git a/cypress/e2e/6-code-node.cy.ts b/cypress/e2e/6-code-node.cy.ts index 5b422b4589..5a6182c25a 100644 --- a/cypress/e2e/6-code-node.cy.ts +++ b/cypress/e2e/6-code-node.cy.ts @@ -91,28 +91,12 @@ return [] }); describe('Ask AI', () => { - it('tab should display based on experiment', () => { - WorkflowPage.actions.visit(); - cy.window().then((win) => { - win.featureFlags.override('011_ask_AI', 'control'); - WorkflowPage.actions.addInitialNodeToCanvas('Manual'); - WorkflowPage.actions.addNodeToCanvas('Code'); - WorkflowPage.actions.openNode('Code'); - - cy.getByTestId('code-node-tab-ai').should('not.exist'); - - ndv.actions.close(); - win.featureFlags.override('011_ask_AI', undefined); - WorkflowPage.actions.openNode('Code'); - cy.getByTestId('code-node-tab-ai').should('not.exist'); - }); - }); - describe('Enabled', () => { beforeEach(() => { + cy.enableFeature('askAi'); WorkflowPage.actions.visit(); - cy.window().then((win) => { - win.featureFlags.override('011_ask_AI', 'gpt3'); + + cy.window().then(() => { WorkflowPage.actions.addInitialNodeToCanvas('Manual'); WorkflowPage.actions.addNodeToCanvas('Code', true, true); }); @@ -157,7 +141,7 @@ return [] cy.getByTestId('ask-ai-prompt-input').type(prompt); - cy.intercept('POST', '/rest/ask-ai', { + cy.intercept('POST', '/rest/ai/ask-ai', { statusCode: 200, body: { data: { @@ -169,9 +153,7 @@ return [] cy.getByTestId('ask-ai-cta').click(); const askAiReq = cy.wait('@ask-ai'); - askAiReq - .its('request.body') - .should('have.keys', ['question', 'model', 'context', 'n8nVersion']); + askAiReq.its('request.body').should('have.keys', ['question', 'context', 'forNode']); askAiReq.its('context').should('have.keys', ['schema', 'ndvPushRef', 'pushRef']); @@ -195,7 +177,7 @@ return [] ]; handledCodes.forEach(({ code, message }) => { - cy.intercept('POST', '/rest/ask-ai', { + cy.intercept('POST', '/rest/ai/ask-ai', { statusCode: code, status: code, }).as('ask-ai'); diff --git a/cypress/fixtures/aiAssistant/apply_code_diff_response.json b/cypress/fixtures/aiAssistant/responses/apply_code_diff_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/apply_code_diff_response.json rename to cypress/fixtures/aiAssistant/responses/apply_code_diff_response.json diff --git a/cypress/fixtures/aiAssistant/code_diff_suggestion_response.json b/cypress/fixtures/aiAssistant/responses/code_diff_suggestion_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/code_diff_suggestion_response.json rename to cypress/fixtures/aiAssistant/responses/code_diff_suggestion_response.json diff --git a/cypress/fixtures/aiAssistant/code_snippet_response.json b/cypress/fixtures/aiAssistant/responses/code_snippet_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/code_snippet_response.json rename to cypress/fixtures/aiAssistant/responses/code_snippet_response.json diff --git a/cypress/fixtures/aiAssistant/end_session_response.json b/cypress/fixtures/aiAssistant/responses/end_session_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/end_session_response.json rename to cypress/fixtures/aiAssistant/responses/end_session_response.json diff --git a/cypress/fixtures/aiAssistant/node_execution_error_response.json b/cypress/fixtures/aiAssistant/responses/node_execution_error_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/node_execution_error_response.json rename to cypress/fixtures/aiAssistant/responses/node_execution_error_response.json diff --git a/cypress/fixtures/aiAssistant/quick_reply_message_response.json b/cypress/fixtures/aiAssistant/responses/quick_reply_message_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/quick_reply_message_response.json rename to cypress/fixtures/aiAssistant/responses/quick_reply_message_response.json diff --git a/cypress/fixtures/aiAssistant/simple_message_response.json b/cypress/fixtures/aiAssistant/responses/simple_message_response.json similarity index 100% rename from cypress/fixtures/aiAssistant/simple_message_response.json rename to cypress/fixtures/aiAssistant/responses/simple_message_response.json diff --git a/cypress/fixtures/aiAssistant/workflows/simple_http_request_workflow.json b/cypress/fixtures/aiAssistant/workflows/simple_http_request_workflow.json new file mode 100644 index 0000000000..28a0ee5359 --- /dev/null +++ b/cypress/fixtures/aiAssistant/workflows/simple_http_request_workflow.json @@ -0,0 +1,35 @@ +{ + "nodes": [ + { + "parameters": {}, + "id": "298d3dc9-5e99-4b3f-919e-05fdcdfbe2d0", + "name": "When clicking ‘Test workflow’", + "type": "n8n-nodes-base.manualTrigger", + "typeVersion": 1, + "position": [360, 220] + }, + { + "parameters": { + "options": {} + }, + "id": "65c32346-e939-4ec7-88a9-1f9184e2258d", + "name": "HTTP Request", + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [580, 220] + } + ], + "connections": { + "When clicking ‘Test workflow’": { + "main": [ + [ + { + "node": "HTTP Request", + "type": "main", + "index": 0 + } + ] + ] + } + } +} diff --git a/cypress/fixtures/aiAssistant/test_workflow.json b/cypress/fixtures/aiAssistant/workflows/test_workflow.json similarity index 100% rename from cypress/fixtures/aiAssistant/test_workflow.json rename to cypress/fixtures/aiAssistant/workflows/test_workflow.json diff --git a/cypress/pages/ndv.ts b/cypress/pages/ndv.ts index b775deec6d..cae1fb47b0 100644 --- a/cypress/pages/ndv.ts +++ b/cypress/pages/ndv.ts @@ -156,7 +156,7 @@ export class NDV extends BasePage { this.getters.nodeExecuteButton().first().click(); }, close: () => { - this.getters.backToCanvas().click(); + this.getters.backToCanvas().click({ force: true }); }, openInlineExpressionEditor: () => { cy.contains('Expression').invoke('show').click(); diff --git a/lefthook.yml b/lefthook.yml index cc39a32495..aa17417824 100644 --- a/lefthook.yml +++ b/lefthook.yml @@ -8,7 +8,7 @@ pre-commit: - merge - rebase prettier_check: - glob: 'packages/**/*.{vue,yml,md}' + glob: 'packages/**/*.{vue,yml,md,css,scss}' run: ./node_modules/.bin/prettier --write --ignore-unknown --no-error-on-unmatched-pattern {staged_files} stage_fixed: true skip: diff --git a/n8n.code-workspace b/n8n.code-workspace deleted file mode 100644 index 8f4183e8f0..0000000000 --- a/n8n.code-workspace +++ /dev/null @@ -1,7 +0,0 @@ -{ - "folders": [ - { - "path": "." - } - ] -} diff --git a/package.json b/package.json index 8253d86dfd..ee888f53dd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "n8n-monorepo", - "version": "1.61.0", + "version": "1.63.0", "private": true, "engines": { "node": ">=20.15", @@ -15,7 +15,7 @@ "build:frontend": "turbo run build:frontend", "build:nodes": "turbo run build:nodes", "typecheck": "turbo typecheck", - "dev": "turbo run dev --parallel --env-mode=loose --filter=!n8n-design-system --filter=!@n8n/chat", + "dev": "turbo run dev --parallel --env-mode=loose --filter=!n8n-design-system --filter=!@n8n/chat --filter=!@n8n/task-runner", "dev:ai": "turbo run dev --parallel --env-mode=loose --filter=@n8n/nodes-langchain --filter=n8n --filter=n8n-core", "clean": "turbo run clean --parallel", "reset": "node scripts/ensure-zx.mjs && zx scripts/reset.mjs", @@ -59,7 +59,7 @@ "ts-jest": "^29.1.1", "tsc-alias": "^1.8.7", "tsc-watch": "^6.0.4", - "turbo": "2.0.6", + "turbo": "2.1.2", "typescript": "*", "zx": "^8.1.4" }, @@ -69,14 +69,15 @@ ], "overrides": { "@types/node": "^18.16.16", - "chokidar": "3.5.2", - "esbuild": "^0.20.2", + "chokidar": "^4.0.1", + "esbuild": "^0.24.0", "formidable": "3.5.1", "pug": "^3.0.3", "semver": "^7.5.4", "tslib": "^2.6.2", "tsconfig-paths": "^4.2.0", "typescript": "^5.6.2", + "vue-tsc": "^2.1.6", "ws": ">=8.17.1" }, "patchedDependencies": { diff --git a/packages/@n8n/api-types/package.json b/packages/@n8n/api-types/package.json index ec2bf1bd32..e2614bcf68 100644 --- a/packages/@n8n/api-types/package.json +++ b/packages/@n8n/api-types/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/api-types", - "version": "0.3.0", + "version": "0.4.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/api-types/src/dto/index.ts b/packages/@n8n/api-types/src/dto/index.ts index 68c1972a46..41a55f050a 100644 --- a/packages/@n8n/api-types/src/dto/index.ts +++ b/packages/@n8n/api-types/src/dto/index.ts @@ -2,3 +2,4 @@ export { PasswordUpdateRequestDto } from './user/password-update-request.dto'; export { RoleChangeRequestDto } from './user/role-change-request.dto'; export { SettingsUpdateRequestDto } from './user/settings-update-request.dto'; export { UserUpdateRequestDto } from './user/user-update-request.dto'; +export { CommunityRegisteredRequestDto } from './license/community-registered-request.dto'; diff --git a/packages/@n8n/api-types/src/dto/license/__tests__/community-registered-request.dto.test.ts b/packages/@n8n/api-types/src/dto/license/__tests__/community-registered-request.dto.test.ts new file mode 100644 index 0000000000..84e583e63b --- /dev/null +++ b/packages/@n8n/api-types/src/dto/license/__tests__/community-registered-request.dto.test.ts @@ -0,0 +1,27 @@ +import { CommunityRegisteredRequestDto } from '../community-registered-request.dto'; + +describe('CommunityRegisteredRequestDto', () => { + it('should fail validation for missing email', () => { + const invalidRequest = {}; + + const result = CommunityRegisteredRequestDto.safeParse(invalidRequest); + + expect(result.success).toBe(false); + expect(result.error?.issues[0]).toEqual( + expect.objectContaining({ message: 'Required', path: ['email'] }), + ); + }); + + it('should fail validation for an invalid email', () => { + const invalidRequest = { + email: 'invalid-email', + }; + + const result = CommunityRegisteredRequestDto.safeParse(invalidRequest); + + expect(result.success).toBe(false); + expect(result.error?.issues[0]).toEqual( + expect.objectContaining({ message: 'Invalid email', path: ['email'] }), + ); + }); +}); diff --git a/packages/@n8n/api-types/src/dto/license/community-registered-request.dto.ts b/packages/@n8n/api-types/src/dto/license/community-registered-request.dto.ts new file mode 100644 index 0000000000..9763787767 --- /dev/null +++ b/packages/@n8n/api-types/src/dto/license/community-registered-request.dto.ts @@ -0,0 +1,4 @@ +import { z } from 'zod'; +import { Z } from 'zod-class'; + +export class CommunityRegisteredRequestDto extends Z.class({ email: z.string().email() }) {} diff --git a/packages/@n8n/api-types/src/frontend-settings.ts b/packages/@n8n/api-types/src/frontend-settings.ts index d50408bc3c..5084344aeb 100644 --- a/packages/@n8n/api-types/src/frontend-settings.ts +++ b/packages/@n8n/api-types/src/frontend-settings.ts @@ -33,6 +33,7 @@ export interface FrontendSettings { endpointFormWaiting: string; endpointWebhook: string; endpointWebhookTest: string; + endpointWebhookWaiting: string; saveDataErrorExecution: WorkflowSettings.SaveDataExecution; saveDataSuccessExecution: WorkflowSettings.SaveDataExecution; saveManualExecutions: boolean; @@ -106,6 +107,9 @@ export interface FrontendSettings { aiAssistant: { enabled: boolean; }; + askAi: { + enabled: boolean; + }; deployment: { type: string; }; @@ -153,9 +157,6 @@ export interface FrontendSettings { banners: { dismissed: string[]; }; - ai: { - enabled: boolean; - }; workflowHistory: { pruneTime: number; licensePruneTime: number; diff --git a/packages/@n8n/api-types/src/scaling.ts b/packages/@n8n/api-types/src/scaling.ts index 8e15f06804..f0c3627e84 100644 --- a/packages/@n8n/api-types/src/scaling.ts +++ b/packages/@n8n/api-types/src/scaling.ts @@ -11,7 +11,7 @@ export type RunningJobSummary = { }; export type WorkerStatus = { - workerId: string; + senderId: string; runningJobsSummary: RunningJobSummary[]; freeMem: number; totalMem: number; diff --git a/packages/@n8n/benchmark/package.json b/packages/@n8n/benchmark/package.json index f0b11ddeb2..98edd1dabd 100644 --- a/packages/@n8n/benchmark/package.json +++ b/packages/@n8n/benchmark/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/n8n-benchmark", - "version": "1.5.0", + "version": "1.7.0", "description": "Cli for running benchmark tests for n8n", "main": "dist/index", "scripts": { diff --git a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.manifest.json b/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.manifest.json deleted file mode 100644 index 1d768f706e..0000000000 --- a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.manifest.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "$schema": "../scenario.schema.json", - "name": "CodeNodeJsOnceForEach", - "description": "A JS Code Node that runs once for each item and adds, modifies and removes properties. The data of 5 items is generated using DebugHelper Node, and returned with RespondToWebhook Node.", - "scenarioData": { "workflowFiles": ["js-code-node-once-for-each.json"] }, - "scriptPath": "js-code-node-once-for-each.script.js" -} diff --git a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.json b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.json similarity index 58% rename from packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.json rename to packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.json index 7b89ffde96..d6f30ac5ea 100644 --- a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.json +++ b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.json @@ -1,9 +1,31 @@ { "createdAt": "2024-08-06T12:19:51.268Z", "updatedAt": "2024-08-06T12:20:45.000Z", - "name": "JS Code Node Once For Each", + "name": "JS Code Node", "active": true, "nodes": [ + { + "parameters": { + "respondWith": "allIncomingItems", + "options": {} + }, + "type": "n8n-nodes-base.respondToWebhook", + "typeVersion": 1.1, + "position": [1280, 460], + "id": "0067e317-09b8-478a-8c50-e19b4c9e294c", + "name": "Respond to Webhook" + }, + { + "parameters": { + "mode": "runOnceForEachItem", + "jsCode": "// Add new field\n$input.item.json.age = 10 + Math.floor(Math.random() * 30);\n// Mutate existing field\n$input.item.json.password = $input.item.json.password.split('').map(() => '*').join(\"\")\n// Remove field\ndelete $input.item.json.lastname\n// New object field\nconst emailParts = $input.item.json.email.split(\"@\")\n$input.item.json.emailData = {\n user: emailParts[0],\n domain: emailParts[1]\n}\n\nreturn $input.item;" + }, + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [1040, 460], + "id": "56d751c0-0d30-43c3-89fa-bebf3a9d436f", + "name": "OnceForEachItemJSCode" + }, { "parameters": { "httpMethod": "POST", @@ -13,68 +35,23 @@ }, "type": "n8n-nodes-base.webhook", "typeVersion": 2, - "position": [0, 0], - "id": "849350b3-4212-4416-a462-1cf331157d37", + "position": [580, 460], + "id": "417d749d-156c-4ffe-86ea-336f702dc5da", "name": "Webhook", "webhookId": "34ca1895-ccf4-4a4a-8bb8-a042f5edb567" }, { "parameters": { - "respondWith": "allIncomingItems", - "options": {} - }, - "type": "n8n-nodes-base.respondToWebhook", - "typeVersion": 1.1, - "position": [660, 0], - "id": "f0660aa1-8a65-490f-b5cd-f8d134070c13", - "name": "Respond to Webhook" - }, - { - "parameters": { - "category": "randomData", - "randomDataCount": 5 - }, - "type": "n8n-nodes-base.debugHelper", - "typeVersion": 1, - "position": [220, 0], - "id": "50f1efe8-bd2d-4061-9f51-b38c0e3daeb2", - "name": "DebugHelper" - }, - { - "parameters": { - "mode": "runOnceForEachItem", - "jsCode": "// Add new field\n$input.item.json.age = 10 + Math.floor(Math.random() * 30);\n// Mutate existing field\n$input.item.json.password = $input.item.json.password.split('').map(() => '*').join(\"\")\n// Remove field\ndelete $input.item.json.lastname\n// New object field\nconst emailParts = $input.item.json.email.split(\"@\")\n$input.item.json.emailData = {\n user: emailParts[0],\n domain: emailParts[1]\n}\n\nreturn $input.item;" + "jsCode": "const digits = '0123456789';\nconst uppercaseLetters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';\nconst lowercaseLetters = uppercaseLetters.toLowerCase();\nconst alphabet = [digits, uppercaseLetters, lowercaseLetters].join('').split('')\n\nconst randomInt = (min, max) => Math.floor(Math.random() * (max - min + 1)) + min;\nconst randomItem = (arr) => arr.at(randomInt(0, arr.length - 1))\nconst randomString = (len) => Array.from({ length: len }).map(() => randomItem(alphabet)).join('')\n\nconst randomUid = () => [8,4,4,4,8].map(len => randomString(len)).join(\"-\")\nconst randomEmail = () => `${randomString(8)}@${randomString(10)}.com`\n\nconst randomPerson = () => ({\n uid: randomUid(),\n email: randomEmail(),\n firstname: randomString(5),\n lastname: randomString(12),\n password: randomString(10)\n})\n\nreturn Array.from({ length: 100 }).map(() => ({\n json: randomPerson()\n}))" }, + "id": "c30db155-73ca-48b9-8860-c3fe7a0926fb", + "name": "Code", "type": "n8n-nodes-base.code", "typeVersion": 2, - "position": [440, 0], - "id": "f9f2f865-e228-403d-8e47-72308359e207", - "name": "OnceForEachItemJSCode" + "position": [820, 460] } ], "connections": { - "Webhook": { - "main": [ - [ - { - "node": "DebugHelper", - "type": "main", - "index": 0 - } - ] - ] - }, - "DebugHelper": { - "main": [ - [ - { - "node": "OnceForEachItemJSCode", - "type": "main", - "index": 0 - } - ] - ] - }, "OnceForEachItemJSCode": { "main": [ [ @@ -85,6 +62,28 @@ } ] ] + }, + "Webhook": { + "main": [ + [ + { + "node": "Code", + "type": "main", + "index": 0 + } + ] + ] + }, + "Code": { + "main": [ + [ + { + "node": "OnceForEachItemJSCode", + "type": "main", + "index": 0 + } + ] + ] } }, "settings": { "executionOrder": "v1" }, diff --git a/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.manifest.json b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.manifest.json new file mode 100644 index 0000000000..8b0165baf7 --- /dev/null +++ b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.manifest.json @@ -0,0 +1,7 @@ +{ + "$schema": "../scenario.schema.json", + "name": "CodeNodeJs", + "description": "A JS Code Node that first generates 100 items and then runs once for each item and adds, modifies and removes properties. The data returned with RespondToWebhook Node.", + "scenarioData": { "workflowFiles": ["js-code-node.json"] }, + "scriptPath": "js-code-node.script.js" +} diff --git a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.script.js b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js similarity index 88% rename from packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.script.js rename to packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js index 11e8e87ac3..74cef4f441 100644 --- a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.script.js +++ b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js @@ -12,7 +12,7 @@ export default function () { try { const body = JSON.parse(r.body); - return Array.isArray(body) ? body.length === 5 : false; + return Array.isArray(body) ? body.length === 100 : false; } catch (error) { console.error('Error parsing response body: ', error); return false; diff --git a/packages/@n8n/benchmark/scripts/n8n-setups/postgres/docker-compose.yml b/packages/@n8n/benchmark/scripts/n8n-setups/postgres/docker-compose.yml index 2ca26c79b3..3cc08227c1 100644 --- a/packages/@n8n/benchmark/scripts/n8n-setups/postgres/docker-compose.yml +++ b/packages/@n8n/benchmark/scripts/n8n-setups/postgres/docker-compose.yml @@ -7,7 +7,7 @@ services: - ${MOCK_API_DATA_PATH}/mappings:/home/wiremock/mappings postgres: - image: postgres:16 + image: postgres:16.4 restart: always user: root:root environment: diff --git a/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/docker-compose.yml b/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/docker-compose.yml index 3298a47556..ca3ad9c23d 100644 --- a/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/docker-compose.yml +++ b/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/docker-compose.yml @@ -7,7 +7,7 @@ services: - ${MOCK_API_DATA_PATH}/mappings:/home/wiremock/mappings redis: - image: redis:6-alpine + image: redis:6.2.14-alpine restart: always ports: - 6379:6379 @@ -17,7 +17,7 @@ services: timeout: 3s postgres: - image: postgres:16 + image: postgres:16.4 restart: always environment: - POSTGRES_DB=n8n diff --git a/packages/@n8n/benchmark/scripts/n8n-setups/scaling-single-main/docker-compose.yml b/packages/@n8n/benchmark/scripts/n8n-setups/scaling-single-main/docker-compose.yml index 27590459d2..fe9e3a26c0 100644 --- a/packages/@n8n/benchmark/scripts/n8n-setups/scaling-single-main/docker-compose.yml +++ b/packages/@n8n/benchmark/scripts/n8n-setups/scaling-single-main/docker-compose.yml @@ -7,7 +7,7 @@ services: - ${MOCK_API_DATA_PATH}/mappings:/home/wiremock/mappings redis: - image: redis:6-alpine + image: redis:6.2.14-alpine ports: - 6379:6379 healthcheck: @@ -16,7 +16,7 @@ services: timeout: 3s postgres: - image: postgres:16 + image: postgres:16.4 user: root:root restart: always environment: diff --git a/packages/@n8n/benchmark/scripts/run-for-n8n-setup.mjs b/packages/@n8n/benchmark/scripts/run-for-n8n-setup.mjs index d3389f1e7b..8c8ea5f6dd 100755 --- a/packages/@n8n/benchmark/scripts/run-for-n8n-setup.mjs +++ b/packages/@n8n/benchmark/scripts/run-for-n8n-setup.mjs @@ -105,9 +105,8 @@ async function main() { console.error(error.message); console.error(''); await printContainerStatus(dockerComposeClient); - console.error(''); - await dumpLogs(dockerComposeClient); } finally { + await dumpLogs(dockerComposeClient); await dockerComposeClient.$('down'); } } @@ -118,7 +117,7 @@ async function printContainerStatus(dockerComposeClient) { } async function dumpLogs(dockerComposeClient) { - console.error('Container logs:'); + console.info('Container logs:'); await dockerComposeClient.$('logs'); } diff --git a/packages/@n8n/chat/README.md b/packages/@n8n/chat/README.md index 0ed53a5774..6299c80d12 100644 --- a/packages/@n8n/chat/README.md +++ b/packages/@n8n/chat/README.md @@ -184,6 +184,16 @@ createChat({ - **Type**: `string[]` - **Description**: The initial messages to be displayed in the Chat window. +### `allowFileUploads` +- **Type**: `Ref | boolean` +- **Default**: `false` +- **Description**: Whether to allow file uploads in the chat. If set to `true`, users will be able to upload files through the chat interface. + +### `allowedFilesMimeTypes` +- **Type**: `Ref | string` +- **Default**: `''` +- **Description**: A comma-separated list of allowed MIME types for file uploads. Only applicable if `allowFileUploads` is set to `true`. If left empty, all file types are allowed. For example: `'image/*,application/pdf'`. + ## Customization The Chat window is entirely customizable using CSS variables. diff --git a/packages/@n8n/chat/package.json b/packages/@n8n/chat/package.json index 0848422a05..24d6cf6f1c 100644 --- a/packages/@n8n/chat/package.json +++ b/packages/@n8n/chat/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/chat", - "version": "0.26.0", + "version": "0.28.0", "scripts": { "dev": "pnpm run storybook", "build": "pnpm build:vite && pnpm build:bundle", @@ -50,7 +50,7 @@ "unplugin-icons": "^0.19.0", "vite": "catalog:frontend", "vitest": "catalog:frontend", - "vite-plugin-dts": "^3.9.1", + "vite-plugin-dts": "^4.2.3", "vue-tsc": "catalog:frontend" }, "files": [ diff --git a/packages/@n8n/chat/src/css/markdown.scss b/packages/@n8n/chat/src/css/markdown.scss index 6d219bbe02..070e6d6a5f 100644 --- a/packages/@n8n/chat/src/css/markdown.scss +++ b/packages/@n8n/chat/src/css/markdown.scss @@ -1,4 +1,20 @@ -@import 'highlight.js/styles/github.css'; +@use 'sass:meta'; + +@include meta.load-css('highlight.js/styles/github.css'); + +@mixin hljs-dark-theme { + @include meta.load-css('highlight.js/styles/github-dark-dimmed.css'); +} + +body { + &[data-theme='dark'] { + @include hljs-dark-theme; + } + + @media (prefers-color-scheme: dark) { + @include hljs-dark-theme; + } +} // https://github.com/pxlrbt/markdown-css .chat-message-markdown { @@ -561,7 +577,6 @@ kbd, /* different style for kbd? */ code { - background: #eee; padding: 0.1em 0.25em; border-radius: 0.2rem; -webkit-box-decoration-break: clone; diff --git a/packages/@n8n/config/package.json b/packages/@n8n/config/package.json index e1ac104f04..10c8cbcf5b 100644 --- a/packages/@n8n/config/package.json +++ b/packages/@n8n/config/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/config", - "version": "1.11.0", + "version": "1.13.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/config/src/configs/logging.config.ts b/packages/@n8n/config/src/configs/logging.config.ts new file mode 100644 index 0000000000..c29bb6d5a8 --- /dev/null +++ b/packages/@n8n/config/src/configs/logging.config.ts @@ -0,0 +1,73 @@ +import { Config, Env, Nested } from '../decorators'; +import { StringArray } from '../utils'; + +/** + * Scopes (areas of functionality) to filter logs by. + * + * `executions` -> execution lifecycle + * `license` -> license SDK + * `scaling` -> scaling mode + */ +export const LOG_SCOPES = ['executions', 'license', 'scaling'] as const; + +export type LogScope = (typeof LOG_SCOPES)[number]; + +@Config +class FileLoggingConfig { + /** + * Max number of log files to keep, or max number of days to keep logs for. + * Once the limit is reached, the oldest log files will be rotated out. + * If using days, append a `d` suffix. Only for `file` log output. + * + * @example `N8N_LOG_FILE_COUNT_MAX=7` will keep at most 7 files. + * @example `N8N_LOG_FILE_COUNT_MAX=7d` will keep at most 7 days worth of files. + */ + @Env('N8N_LOG_FILE_COUNT_MAX') + fileCountMax: number = 100; + + /** Max size (in MiB) for each log file. Only for `file` log output. */ + @Env('N8N_LOG_FILE_SIZE_MAX') + fileSizeMax: number = 16; + + /** Location of the log files inside `~/.n8n`. Only for `file` log output. */ + @Env('N8N_LOG_FILE_LOCATION') + location: string = 'logs/n8n.log'; +} + +@Config +export class LoggingConfig { + /** + * Minimum level of logs to output. Logs with this or higher level will be output; + * logs with lower levels will not. Exception: `silent` disables all logging. + * + * @example `N8N_LOG_LEVEL=info` will output `error`, `warn` and `info` logs, but not `debug`. + */ + @Env('N8N_LOG_LEVEL') + level: 'error' | 'warn' | 'info' | 'debug' | 'silent' = 'info'; + + /** + * Where to output logs to. Options are: `console` or `file` or both in a comma separated list. + * + * @example `N8N_LOG_OUTPUT=console,file` will output to both console and file. + */ + @Env('N8N_LOG_OUTPUT') + outputs: StringArray<'console' | 'file'> = ['console']; + + @Nested + file: FileLoggingConfig; + + /** + * Scopes to filter logs by. Nothing is filtered by default. + * + * Currently supported log scopes: + * - `executions` + * - `license` + * - `scaling` + * + * @example + * `N8N_LOG_SCOPES=license` + * `N8N_LOG_SCOPES=license,executions` + */ + @Env('N8N_LOG_SCOPES') + scopes: StringArray = []; +} diff --git a/packages/@n8n/config/src/configs/runners.config.ts b/packages/@n8n/config/src/configs/runners.config.ts new file mode 100644 index 0000000000..e7335e8827 --- /dev/null +++ b/packages/@n8n/config/src/configs/runners.config.ts @@ -0,0 +1,22 @@ +import { Config, Env } from '../decorators'; + +@Config +export class TaskRunnersConfig { + // Defaults to true for now + @Env('N8N_RUNNERS_DISABLED') + disabled: boolean = true; + + @Env('N8N_RUNNERS_PATH') + path: string = '/runners'; + + @Env('N8N_RUNNERS_AUTH_TOKEN') + authToken: string = ''; + + /** IP address task runners server should listen on */ + @Env('N8N_RUNNERS_SERVER_PORT') + port: number = 5679; + + /** IP address task runners server should listen on */ + @Env('N8N_RUNNERS_SERVER_LISTEN_ADDRESS') + listen_address: string = '127.0.0.1'; +} diff --git a/packages/@n8n/config/src/configs/scaling-mode.config.ts b/packages/@n8n/config/src/configs/scaling-mode.config.ts index 750de77b07..05ee6b4841 100644 --- a/packages/@n8n/config/src/configs/scaling-mode.config.ts +++ b/packages/@n8n/config/src/configs/scaling-mode.config.ts @@ -2,13 +2,21 @@ import { Config, Env, Nested } from '../decorators'; @Config class HealthConfig { - /** Whether to enable the worker health check endpoint `/healthz`. */ + /** + * Whether to enable the worker health check endpoints: + * - `/healthz` (worker alive) + * - `/healthz/readiness` (worker connected to migrated database and connected to Redis) + */ @Env('QUEUE_HEALTH_CHECK_ACTIVE') active: boolean = false; - /** Port for worker to respond to health checks requests on, if enabled. */ + /** Port for worker server to listen on. */ @Env('QUEUE_HEALTH_CHECK_PORT') port: number = 5678; + + /** IP address for worker server to listen on. */ + @Env('N8N_WORKER_SERVER_ADDRESS') + address: string = '0.0.0.0'; } @Config diff --git a/packages/@n8n/config/src/index.ts b/packages/@n8n/config/src/index.ts index 5098093db4..9044ffa0fa 100644 --- a/packages/@n8n/config/src/index.ts +++ b/packages/@n8n/config/src/index.ts @@ -5,8 +5,11 @@ import { EndpointsConfig } from './configs/endpoints.config'; import { EventBusConfig } from './configs/event-bus.config'; import { ExternalSecretsConfig } from './configs/external-secrets.config'; import { ExternalStorageConfig } from './configs/external-storage.config'; +import { LoggingConfig } from './configs/logging.config'; import { NodesConfig } from './configs/nodes.config'; import { PublicApiConfig } from './configs/public-api.config'; +import { TaskRunnersConfig } from './configs/runners.config'; +export { TaskRunnersConfig } from './configs/runners.config'; import { ScalingModeConfig } from './configs/scaling-mode.config'; import { SentryConfig } from './configs/sentry.config'; import { TemplatesConfig } from './configs/templates.config'; @@ -15,6 +18,9 @@ import { VersionNotificationsConfig } from './configs/version-notifications.conf import { WorkflowsConfig } from './configs/workflows.config'; import { Config, Env, Nested } from './decorators'; +export { LOG_SCOPES } from './configs/logging.config'; +export type { LogScope } from './configs/logging.config'; + @Config export class GlobalConfig { @Nested @@ -81,4 +87,10 @@ export class GlobalConfig { @Nested queue: ScalingModeConfig; + + @Nested + logging: LoggingConfig; + + @Nested + taskRunners: TaskRunnersConfig; } diff --git a/packages/@n8n/config/src/utils.ts b/packages/@n8n/config/src/utils.ts new file mode 100644 index 0000000000..c90fcb8266 --- /dev/null +++ b/packages/@n8n/config/src/utils.ts @@ -0,0 +1,7 @@ +export class StringArray extends Array { + constructor(str: string) { + super(); + const parsed = str.split(',') as StringArray; + return parsed.every((i) => typeof i === 'string') ? parsed : []; + } +} diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index 11fd97a5db..301022ca3e 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -198,6 +198,7 @@ describe('GlobalConfig', () => { health: { active: false, port: 5678, + address: '0.0.0.0', }, bull: { redis: { @@ -221,16 +222,32 @@ describe('GlobalConfig', () => { }, }, }, + taskRunners: { + disabled: true, + path: '/runners', + authToken: '', + listen_address: '127.0.0.1', + port: 5679, + }, sentry: { backendDsn: '', frontendDsn: '', }, + logging: { + level: 'info', + outputs: ['console'], + file: { + fileCountMax: 100, + fileSizeMax: 16, + location: 'logs/n8n.log', + }, + scopes: [], + }, }; it('should use all default values when no env variables are defined', () => { process.env = {}; const config = Container.get(GlobalConfig); - expect(deepCopy(config)).toEqual(defaultConfig); expect(mockFs.readFileSync).not.toHaveBeenCalled(); }); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts index a6dc4a63f2..90952bac41 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts @@ -1,27 +1,28 @@ -import { BINARY_ENCODING, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; - -import type { AgentAction, AgentFinish } from 'langchain/agents'; -import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; +import { HumanMessage } from '@langchain/core/messages'; +import type { BaseMessage } from '@langchain/core/messages'; +import type { BaseOutputParser, StructuredOutputParser } from '@langchain/core/output_parsers'; import type { BaseMessagePromptTemplateLike } from '@langchain/core/prompts'; import { ChatPromptTemplate } from '@langchain/core/prompts'; -import { omit } from 'lodash'; +import { RunnableSequence } from '@langchain/core/runnables'; import type { Tool } from '@langchain/core/tools'; import { DynamicStructuredTool } from '@langchain/core/tools'; +import type { AgentAction, AgentFinish } from 'langchain/agents'; +import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'; +import { OutputFixingParser } from 'langchain/output_parsers'; +import { omit } from 'lodash'; +import { BINARY_ENCODING, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import type { ZodObject } from 'zod'; import { z } from 'zod'; -import type { BaseOutputParser, StructuredOutputParser } from '@langchain/core/output_parsers'; -import { OutputFixingParser } from 'langchain/output_parsers'; -import { HumanMessage } from '@langchain/core/messages'; -import { RunnableSequence } from '@langchain/core/runnables'; + +import { SYSTEM_MESSAGE } from './prompt'; import { isChatInstance, getPromptInputByType, getOptionalOutputParsers, getConnectedTools, } from '../../../../../utils/helpers'; -import { SYSTEM_MESSAGE } from './prompt'; function getOutputParserSchema(outputParser: BaseOutputParser): ZodObject { const parserType = outputParser.lc_namespace[outputParser.lc_namespace.length - 1]; @@ -74,6 +75,39 @@ async function extractBinaryMessages(ctx: IExecuteFunctions) { content: [...binaryMessages], }); } +/** + * Fixes empty content messages in agent steps. + * + * This function is necessary when using RunnableSequence.from in LangChain. + * If a tool doesn't have any arguments, LangChain returns input: '' (empty string). + * This can throw an error for some providers (like Anthropic) which expect the input to always be an object. + * This function replaces empty string inputs with empty objects to prevent such errors. + * + * @param steps - The agent steps to fix + * @returns The fixed agent steps + */ +function fixEmptyContentMessage(steps: AgentFinish | AgentAction[]) { + if (!Array.isArray(steps)) return steps; + + steps.forEach((step) => { + if ('messageLog' in step && step.messageLog !== undefined) { + if (Array.isArray(step.messageLog)) { + step.messageLog.forEach((message: BaseMessage) => { + if ('content' in message && Array.isArray(message.content)) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + (message.content as Array<{ input?: string | object }>).forEach((content) => { + if (content.input === '') { + content.input = {}; + } + }); + } + }); + } + } + }); + + return steps; +} export async function toolsAgentExecute(this: IExecuteFunctions): Promise { this.logger.debug('Executing Tools Agent'); @@ -156,6 +190,14 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise) { + return { + returnValues: memory ? { output: JSON.stringify(output) } : output, + log: 'Final response formatted', + }; + } async function agentStepsParser( steps: AgentFinish | AgentAction[], ): Promise { @@ -168,24 +210,18 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise; - return { - returnValues, - log: 'Final response formatted', - }; + return handleParsedStepOutput(returnValues); } } - // If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will parse the output manually + + // If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will try to parse the output manually if (outputParser && typeof steps === 'object' && (steps as AgentFinish).returnValues) { const finalResponse = (steps as AgentFinish).returnValues; const returnValues = (await outputParser.parse(finalResponse as unknown as string)) as Record< string, unknown >; - - return { - returnValues, - log: 'Final response formatted', - }; + return handleParsedStepOutput(returnValues); } return handleAgentFinishOutput(steps); } @@ -233,7 +269,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise }>( + response.output as string, + ); + response.output = parsedOutput?.output ?? parsedOutput; + } + returnData.push({ json: omit( response, diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts index 8249cbd90c..dfbdb3e9d1 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts @@ -10,10 +10,21 @@ import { import { RetrievalQAChain } from 'langchain/chains'; import type { BaseLanguageModel } from '@langchain/core/language_models/base'; import type { BaseRetriever } from '@langchain/core/retrievers'; +import { + ChatPromptTemplate, + SystemMessagePromptTemplate, + HumanMessagePromptTemplate, + PromptTemplate, +} from '@langchain/core/prompts'; import { getTemplateNoticeField } from '../../../utils/sharedFields'; -import { getPromptInputByType } from '../../../utils/helpers'; +import { getPromptInputByType, isChatInstance } from '../../../utils/helpers'; import { getTracingConfig } from '../../../utils/tracing'; +const SYSTEM_PROMPT_TEMPLATE = `Use the following pieces of context to answer the users question. +If you don't know the answer, just say that you don't know, don't try to make up an answer. +---------------- +{context}`; + export class ChainRetrievalQa implements INodeType { description: INodeTypeDescription = { displayName: 'Question and Answer Chain', @@ -137,6 +148,26 @@ export class ChainRetrievalQa implements INodeType { }, }, }, + { + displayName: 'Options', + name: 'options', + type: 'collection', + default: {}, + placeholder: 'Add Option', + options: [ + { + displayName: 'System Prompt Template', + name: 'systemPromptTemplate', + type: 'string', + default: SYSTEM_PROMPT_TEMPLATE, + description: + 'Template string used for the system prompt. This should include the variable `{context}` for the provided context. For text completion models, you should also include the variable `{question}` for the user’s query.', + typeOptions: { + rows: 6, + }, + }, + ], + }, ], }; @@ -154,7 +185,6 @@ export class ChainRetrievalQa implements INodeType { )) as BaseRetriever; const items = this.getInputData(); - const chain = RetrievalQAChain.fromLLM(model, retriever); const returnData: INodeExecutionData[] = []; @@ -178,6 +208,35 @@ export class ChainRetrievalQa implements INodeType { throw new NodeOperationError(this.getNode(), 'The ‘query‘ parameter is empty.'); } + const options = this.getNodeParameter('options', itemIndex, {}) as { + systemPromptTemplate?: string; + }; + + const chainParameters = {} as { + prompt?: PromptTemplate | ChatPromptTemplate; + }; + + if (options.systemPromptTemplate !== undefined) { + if (isChatInstance(model)) { + const messages = [ + SystemMessagePromptTemplate.fromTemplate(options.systemPromptTemplate), + HumanMessagePromptTemplate.fromTemplate('{question}'), + ]; + const chatPromptTemplate = ChatPromptTemplate.fromMessages(messages); + + chainParameters.prompt = chatPromptTemplate; + } else { + const completionPromptTemplate = new PromptTemplate({ + template: options.systemPromptTemplate, + inputVariables: ['context', 'question'], + }); + + chainParameters.prompt = completionPromptTemplate; + } + } + + const chain = RetrievalQAChain.fromLLM(model, retriever, chainParameters); + const response = await chain.withConfig(getTracingConfig(this)).invoke({ query }); returnData.push({ json: { response } }); } catch (error) { diff --git a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts index 0399fc2d5a..7ccfddc5e4 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts @@ -262,7 +262,7 @@ export class InformationExtractor implements INodeType { } const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0); - const zodSchema = (await zodSchemaSandbox.runCode()) as z.ZodSchema; + const zodSchema = await zodSchemaSandbox.runCode>(); parser = OutputFixingParser.fromLLM(llm, StructuredOutputParser.fromZodSchema(zodSchema)); } diff --git a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts index 3c0740664c..7afc317c37 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts @@ -1,3 +1,8 @@ +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import { HumanMessage } from '@langchain/core/messages'; +import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts'; +import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; +import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; import type { IDataObject, IExecuteFunctions, @@ -6,14 +11,8 @@ import type { INodeType, INodeTypeDescription, } from 'n8n-workflow'; - -import { NodeConnectionType } from 'n8n-workflow'; - -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import { HumanMessage } from '@langchain/core/messages'; -import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts'; -import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; import { z } from 'zod'; + import { getTracingConfig } from '../../../utils/tracing'; const SYSTEM_PROMPT_TEMPLATE = @@ -172,11 +171,15 @@ export class TextClassifier implements INodeType { 0, )) as BaseLanguageModel; - const categories = this.getNodeParameter('categories.categories', 0) as Array<{ + const categories = this.getNodeParameter('categories.categories', 0, []) as Array<{ category: string; description: string; }>; + if (categories.length === 0) { + throw new NodeOperationError(this.getNode(), 'At least one category must be defined'); + } + const options = this.getNodeParameter('options', 0, {}) as { multiClass: boolean; fallback?: string; @@ -229,6 +232,7 @@ export class TextClassifier implements INodeType { const systemPromptTemplateOpt = this.getNodeParameter( 'options.systemPromptTemplate', itemIdx, + SYSTEM_PROMPT_TEMPLATE, ) as string; const systemPromptTemplate = SystemMessagePromptTemplate.fromTemplate( `${systemPromptTemplateOpt ?? SYSTEM_PROMPT_TEMPLATE} diff --git a/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts b/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts index 085dec98c2..abe9b01530 100644 --- a/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts @@ -107,7 +107,7 @@ function getSandbox( } // eslint-disable-next-line @typescript-eslint/unbound-method - const sandbox = new JavaScriptSandbox(context, code, itemIndex, this.helpers, { + const sandbox = new JavaScriptSandbox(context, code, this.helpers, { resolver: vmResolver, }); @@ -368,7 +368,7 @@ export class Code implements INodeType { } const sandbox = getSandbox.call(this, code.supplyData.code, { itemIndex }); - const response = (await sandbox.runCode()) as Tool; + const response = await sandbox.runCode(); return { response: logWrapper(response, this), diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts index 7123ebbc38..916f0e7159 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts @@ -109,17 +109,22 @@ export class DocumentGithubLoader implements INodeType { 0, )) as CharacterTextSplitter | undefined; + const { index } = this.addInputData(NodeConnectionType.AiDocument, [ + [{ json: { repository, branch, ignorePaths, recursive } }], + ]); const docs = new GithubRepoLoader(repository, { branch, ignorePaths: (ignorePaths ?? '').split(',').map((p) => p.trim()), recursive, accessToken: (credentials.accessToken as string) || '', + apiUrl: credentials.server as string, }); const loadedDocs = textSplitter ? await textSplitter.splitDocuments(await docs.load()) : await docs.load(); + this.addOutputData(NodeConnectionType.AiDocument, index, [[{ json: { loadedDocs } }]]); return { response: logWrapper(loadedDocs, this), }; diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts index 027ce04e6e..6ce6bff76b 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts @@ -48,7 +48,7 @@ export class N8nStructuredOutputParser extends Structure sandboxedSchema: JavaScriptSandbox, nodeVersion: number, ): Promise>> { - const zodSchema = (await sandboxedSchema.runCode()) as z.ZodSchema; + const zodSchema = await sandboxedSchema.runCode>(); let returnSchema: z.ZodSchema; if (nodeVersion === 1) { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts index df68fb0c6a..7980f5fa9d 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts @@ -199,9 +199,9 @@ export class ToolCode implements INodeType { let sandbox: Sandbox; if (language === 'javaScript') { - sandbox = new JavaScriptSandbox(context, code, index, this.helpers); + sandbox = new JavaScriptSandbox(context, code, this.helpers); } else { - sandbox = new PythonSandbox(context, code, index, this.helpers); + sandbox = new PythonSandbox(context, code, this.helpers); } sandbox.on( @@ -216,7 +216,7 @@ export class ToolCode implements INodeType { const runFunction = async (query: string | IDataObject): Promise => { const sandbox = getSandbox(query, itemIndex); - return await (sandbox.runCode() as Promise); + return await sandbox.runCode(); }; const toolHandler = async (query: string | IDataObject): Promise => { @@ -274,7 +274,7 @@ export class ToolCode implements INodeType { : jsonParse(inputSchema); const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0); - const zodSchema = (await zodSchemaSandbox.runCode()) as DynamicZodObject; + const zodSchema = await zodSchemaSandbox.runCode(); tool = new DynamicStructuredTool({ schema: zodSchema, diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts index 421e85e1b5..32f6be42e7 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts @@ -275,7 +275,11 @@ export class ToolHttpRequest implements INodeType { method: this.getNodeParameter('method', itemIndex, 'GET') as IHttpRequestMethods, url: this.getNodeParameter('url', itemIndex) as string, qs: {}, - headers: {}, + headers: { + // FIXME: This is a workaround to prevent the node from sending a default User-Agent (`n8n`) when the header is not set. + // Needs to be replaced with a proper fix after NODE-1777 is resolved + 'User-Agent': undefined, + }, body: {}, }; diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts index 0b00e17ac4..352a727d11 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts @@ -530,7 +530,7 @@ export class ToolWorkflow implements INodeType { : jsonParse(inputSchema); const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0); - const zodSchema = (await zodSchemaSandbox.runCode()) as DynamicZodObject; + const zodSchema = await zodSchemaSandbox.runCode(); tool = new DynamicStructuredTool({ schema: zodSchema, diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts index 5c53a69006..489b4fe28b 100644 --- a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts @@ -1,4 +1,6 @@ -import { Node, NodeConnectionType } from 'n8n-workflow'; +import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; +import { pick } from 'lodash'; +import { Node, NodeConnectionType, commonCORSParameters } from 'n8n-workflow'; import type { IDataObject, IWebhookFunctions, @@ -10,10 +12,8 @@ import type { INodeProperties, } from 'n8n-workflow'; -import { pick } from 'lodash'; -import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; -import { createPage } from './templates'; import { validateAuth } from './GenericFunctions'; +import { createPage } from './templates'; import type { LoadPreviousSessionChatOption } from './types'; const CHAT_TRIGGER_PATH_IDENTIFIER = 'chat'; @@ -56,7 +56,6 @@ export class ChatTrigger extends Node { ], }, }, - supportsCORS: true, maxNodes: 1, inputs: `={{ (() => { if (!['hostedChat', 'webhook'].includes($parameter.mode)) { @@ -241,6 +240,15 @@ export class ChatTrigger extends Node { placeholder: 'Add Field', default: {}, options: [ + // CORS parameters are only valid for when chat is used in hosted or webhook mode + ...commonCORSParameters.map((p) => ({ + ...p, + displayOptions: { + show: { + '/mode': ['hostedChat', 'webhook'], + }, + }, + })), { ...allowFileUploadsOption, displayOptions: { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts index 7e14eb4887..0c9a148bec 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts @@ -1,14 +1,16 @@ -import { type INodeProperties } from 'n8n-workflow'; import { PGVectorStore, type DistanceStrategy, type PGVectorStoreArgs, } from '@langchain/community/vectorstores/pgvector'; -import { configurePostgres } from 'n8n-nodes-base/dist/nodes/Postgres/v2/transport'; +import type { EmbeddingsInterface } from '@langchain/core/embeddings'; import type { PostgresNodeCredentials } from 'n8n-nodes-base/dist/nodes/Postgres/v2/helpers/interfaces'; +import { configurePostgres } from 'n8n-nodes-base/dist/nodes/Postgres/v2/transport'; +import type { INodeProperties } from 'n8n-workflow'; import type pg from 'pg'; -import { createVectorStoreNode } from '../shared/createVectorStoreNode'; + import { metadataFilterField } from '../../../utils/sharedFields'; +import { createVectorStoreNode } from '../shared/createVectorStoreNode'; type CollectionOptions = { useCollection?: boolean; @@ -177,13 +179,46 @@ const retrieveFields: INodeProperties[] = [ }, ]; +/** + * Extended PGVectorStore class to handle custom filtering. + * This wrapper is necessary because when used as a retriever, + * similaritySearchVectorWithScore should use this.filter instead of + * expecting it from the parameter + */ +class ExtendedPGVectorStore extends PGVectorStore { + static async initialize( + embeddings: EmbeddingsInterface, + args: PGVectorStoreArgs & { dimensions?: number }, + ): Promise { + const { dimensions, ...rest } = args; + const postgresqlVectorStore = new this(embeddings, rest); + + await postgresqlVectorStore._initializeClient(); + await postgresqlVectorStore.ensureTableInDatabase(dimensions); + if (postgresqlVectorStore.collectionTableName) { + await postgresqlVectorStore.ensureCollectionTableInDatabase(); + } + + return postgresqlVectorStore; + } + + async similaritySearchVectorWithScore( + query: number[], + k: number, + filter?: PGVectorStore['FilterType'], + ) { + const mergedFilter = { ...this.filter, ...filter }; + return await super.similaritySearchVectorWithScore(query, k, mergedFilter); + } +} + export const VectorStorePGVector = createVectorStoreNode({ meta: { description: 'Work with your data in Postgresql with the PGVector extension', icon: 'file:postgres.svg', displayName: 'Postgres PGVector Store', docsUrl: - 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoresupabase/', + 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorepgvector/', name: 'vectorStorePGVector', credentials: [ { @@ -236,7 +271,7 @@ export const VectorStorePGVector = createVectorStoreNode({ 'cosine', ) as DistanceStrategy; - return await PGVectorStore.initialize(embeddings, config); + return await ExtendedPGVectorStore.initialize(embeddings, config); }, async populateVectorStore(context, embeddings, documents, itemIndex) { // NOTE: if you are to create the HNSW index before use, you need to consider moving the distanceStrategy field to diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts index 10ea879bdd..d487969073 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts @@ -88,25 +88,25 @@ function getOperationModeOptions(args: VectorStoreNodeConstructorArgs): INodePro name: 'Get Many', value: 'load', description: 'Get many ranked documents from vector store for query', - action: 'Get many ranked documents from vector store for query', + action: 'Get ranked documents from vector store', }, { name: 'Insert Documents', value: 'insert', description: 'Insert documents into vector store', - action: 'Insert documents into vector store', + action: 'Add documents to vector store', }, { name: 'Retrieve Documents (For Agent/Chain)', value: 'retrieve', description: 'Retrieve documents from vector store to be used with AI nodes', - action: 'Retrieve documents from vector store to be used with AI nodes', + action: 'Retrieve documents for AI processing', }, { name: 'Update Documents', value: 'update', description: 'Update documents in vector store by ID', - action: 'Update documents in vector store by ID', + action: 'Update vector store documents', }, ]; diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts index 3ec2e46eea..cf770e4057 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts @@ -1,30 +1,27 @@ +import type { BaseMessage } from '@langchain/core/messages'; import { AgentExecutor } from 'langchain/agents'; - -import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; -import { OpenAI as OpenAIClient } from 'openai'; - -import { - ApplicationError, - NodeConnectionType, - NodeOperationError, - updateDisplayOptions, -} from 'n8n-workflow'; +import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; +import type { BufferWindowMemory } from 'langchain/memory'; +import omit from 'lodash/omit'; import type { IDataObject, IExecuteFunctions, INodeExecutionData, INodeProperties, } from 'n8n-workflow'; - -import type { BufferWindowMemory } from 'langchain/memory'; -import omit from 'lodash/omit'; -import type { BaseMessage } from '@langchain/core/messages'; -import { formatToOpenAIAssistantTool } from '../../helpers/utils'; -import { assistantRLC } from '../descriptions'; +import { + ApplicationError, + NodeConnectionType, + NodeOperationError, + updateDisplayOptions, +} from 'n8n-workflow'; +import { OpenAI as OpenAIClient } from 'openai'; import { getConnectedTools } from '../../../../../utils/helpers'; import { getTracingConfig } from '../../../../../utils/tracing'; +import { formatToOpenAIAssistantTool } from '../../helpers/utils'; +import { assistantRLC } from '../descriptions'; const properties: INodeProperties[] = [ assistantRLC, @@ -63,6 +60,46 @@ const properties: INodeProperties[] = [ }, }, }, + { + displayName: 'Memory', + name: 'memory', + type: 'options', + options: [ + { + // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased + name: 'Use memory connector', + value: 'connector', + description: 'Connect one of the supported memory nodes', + }, + { + // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased + name: 'Use thread ID', + value: 'threadId', + description: 'Specify the ID of the thread to continue', + }, + ], + displayOptions: { + show: { + '@version': [{ _cnd: { gte: 1.6 } }], + }, + }, + default: 'connector', + }, + { + displayName: 'Thread ID', + name: 'threadId', + type: 'string', + default: '', + placeholder: '', + description: 'The ID of the thread to continue, a new thread will be created if not specified', + hint: 'If the thread ID is empty or undefined a new thread will be created and included in the response', + displayOptions: { + show: { + '@version': [{ _cnd: { gte: 1.6 } }], + memory: ['threadId'], + }, + }, + }, { displayName: 'Connect your own custom n8n tools to this node on the canvas', name: 'noticeTools', @@ -201,9 +238,19 @@ export async function execute(this: IExecuteFunctions, i: number): Promise= 1.6 && this.getNodeParameter('memory', i) === 'connector'; + const memory = + useMemoryConnector || nodeVersion < 1.6 + ? ((await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as + | BufferWindowMemory + | undefined) + : undefined; + + const threadId = + nodeVersion >= 1.6 && !useMemoryConnector + ? (this.getNodeParameter('threadId', i) as string) + : undefined; const chainValues: IDataObject = { content: input, @@ -231,6 +278,8 @@ export async function execute(this: IExecuteFunctions, i: number): Promise { return `${capitalize(operation)} ${capitalize(resource)}`; }; -const configureNodeInputs = (resource: string, operation: string, hideTools: string) => { +const configureNodeInputs = ( + resource: string, + operation: string, + hideTools: string, + memory: string | undefined, +) => { if (resource === 'assistant' && operation === 'message') { - return [ + const inputs: INodeInputConfiguration[] = [ { type: NodeConnectionType.Main }, - { type: NodeConnectionType.AiMemory, displayName: 'Memory', maxConnections: 1 }, { type: NodeConnectionType.AiTool, displayName: 'Tools' }, ]; + if (memory !== 'threadId') { + inputs.push({ type: NodeConnectionType.AiMemory, displayName: 'Memory', maxConnections: 1 }); + } + return inputs; } if (resource === 'text' && operation === 'message') { if (hideTools === 'hide') { @@ -69,7 +77,7 @@ export const versionDescription: INodeTypeDescription = { name: 'openAi', icon: { light: 'file:openAi.svg', dark: 'file:openAi.dark.svg' }, group: ['transform'], - version: [1, 1.1, 1.2, 1.3, 1.4, 1.5], + version: [1, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6], subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`, description: 'Message an assistant or GPT, analyze images, generate audio, etc.', defaults: { @@ -89,7 +97,7 @@ export const versionDescription: INodeTypeDescription = { ], }, }, - inputs: `={{(${configureNodeInputs})($parameter.resource, $parameter.operation, $parameter.hideTools)}}`, + inputs: `={{(${configureNodeInputs})($parameter.resource, $parameter.operation, $parameter.hideTools, $parameter.memory ?? undefined)}}`, outputs: [NodeConnectionType.Main], credentials: [ { diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index ec7001286d..4df337ed52 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/n8n-nodes-langchain", - "version": "1.61.0", + "version": "1.63.0", "description": "", "main": "index.js", "scripts": { @@ -124,18 +124,20 @@ "@types/cheerio": "^0.22.15", "@types/html-to-text": "^9.0.1", "@types/json-schema": "^7.0.15", + "@types/pg": "^8.11.6", "@types/temp": "^0.9.1", "n8n-core": "workspace:*" }, "dependencies": { - "@getzep/zep-cloud": "1.0.11", + "@aws-sdk/client-sso-oidc": "3.666.0", + "@getzep/zep-cloud": "1.0.12", "@getzep/zep-js": "0.9.0", "@google-ai/generativelanguage": "2.6.0", "@google-cloud/resource-manager": "5.3.0", "@google/generative-ai": "0.19.0", "@huggingface/inference": "2.8.0", "@langchain/anthropic": "0.3.1", - "@langchain/aws": "^0.1.0", + "@langchain/aws": "0.1.0", "@langchain/cohere": "0.3.0", "@langchain/community": "0.3.2", "@langchain/core": "catalog:", @@ -149,23 +151,22 @@ "@langchain/qdrant": "0.1.0", "@langchain/redis": "0.1.0", "@langchain/textsplitters": "0.1.0", - "@mozilla/readability": "^0.5.0", - "@n8n/typeorm": "0.3.20-10", + "@mozilla/readability": "0.5.0", + "@n8n/typeorm": "0.3.20-12", "@n8n/vm2": "3.9.25", "@pinecone-database/pinecone": "3.0.3", "@qdrant/js-client-rest": "1.11.0", "@supabase/supabase-js": "2.45.4", - "@types/pg": "^8.11.6", - "@xata.io/client": "0.30.0", + "@xata.io/client": "0.28.4", "basic-auth": "catalog:", - "cheerio": "1.0.0-rc.12", + "cheerio": "1.0.0", "cohere-ai": "7.13.2", "d3-dsv": "2.0.0", "epub2": "3.0.2", "form-data": "catalog:", "generate-schema": "2.6.0", "html-to-text": "9.0.5", - "jsdom": "^23.0.1", + "jsdom": "23.0.1", "json-schema-to-zod": "2.1.0", "langchain": "0.3.2", "lodash": "catalog:", diff --git a/packages/@n8n/nodes-langchain/utils/schemaParsing.ts b/packages/@n8n/nodes-langchain/utils/schemaParsing.ts index 8d5f61153d..0591483e2c 100644 --- a/packages/@n8n/nodes-langchain/utils/schemaParsing.ts +++ b/packages/@n8n/nodes-langchain/utils/schemaParsing.ts @@ -57,7 +57,6 @@ export function getSandboxWithZod(ctx: IExecuteFunctions, schema: JSONSchema7, i const itemSchema = new Function('z', 'return (' + zodSchema + ')')(z) return itemSchema `, - itemIndex, ctx.helpers, { resolver: vmResolver }, ); diff --git a/packages/@n8n/storybook/package.json b/packages/@n8n/storybook/package.json index 7a08f9ce51..7b2230a36b 100644 --- a/packages/@n8n/storybook/package.json +++ b/packages/@n8n/storybook/package.json @@ -4,18 +4,18 @@ "version": "0.0.1", "devDependencies": { "@chromatic-com/storybook": "^2.0.2", - "@storybook/addon-a11y": "^8.3.1", - "@storybook/addon-actions": "^8.3.1", - "@storybook/addon-docs": "^8.3.1", - "@storybook/addon-essentials": "^8.3.1", - "@storybook/addon-interactions": "^8.3.1", - "@storybook/addon-links": "^8.3.1", - "@storybook/addon-themes": "^8.3.1", - "@storybook/blocks": "^8.3.1", - "@storybook/test": "^8.3.1", - "@storybook/vue3": "^8.3.1", - "@storybook/vue3-vite": "^8.3.1", + "@storybook/addon-a11y": "^8.3.5", + "@storybook/addon-actions": "^8.3.5", + "@storybook/addon-docs": "^8.3.5", + "@storybook/addon-essentials": "^8.3.5", + "@storybook/addon-interactions": "^8.3.5", + "@storybook/addon-links": "^8.3.5", + "@storybook/addon-themes": "^8.3.5", + "@storybook/blocks": "^8.3.5", + "@storybook/test": "^8.3.5", + "@storybook/vue3": "^8.3.5", + "@storybook/vue3-vite": "^8.3.5", "chromatic": "^11.10.2", - "storybook": "^8.3.1" + "storybook": "^8.3.5" } } diff --git a/packages/@n8n/task-runner/.eslintrc.js b/packages/@n8n/task-runner/.eslintrc.js new file mode 100644 index 0000000000..dd79f2157e --- /dev/null +++ b/packages/@n8n/task-runner/.eslintrc.js @@ -0,0 +1,19 @@ +const sharedOptions = require('@n8n_io/eslint-config/shared'); + +/** + * @type {import('@types/eslint').ESLint.ConfigData} + */ +module.exports = { + extends: ['@n8n_io/eslint-config/node'], + + ...sharedOptions(__dirname), + + ignorePatterns: ['jest.config.js'], + + rules: { + 'unicorn/filename-case': ['error', { case: 'kebabCase' }], + '@typescript-eslint/no-duplicate-imports': 'off', + + complexity: 'error', + }, +}; diff --git a/packages/@n8n/task-runner/jest.config.js b/packages/@n8n/task-runner/jest.config.js new file mode 100644 index 0000000000..5c3abe1ef7 --- /dev/null +++ b/packages/@n8n/task-runner/jest.config.js @@ -0,0 +1,5 @@ +/** @type {import('jest').Config} */ +module.exports = { + ...require('../../../jest.config'), + testTimeout: 10_000, +}; diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json new file mode 100644 index 0000000000..a82b97975d --- /dev/null +++ b/packages/@n8n/task-runner/package.json @@ -0,0 +1,33 @@ +{ + "name": "@n8n/task-runner", + "version": "1.1.0", + "scripts": { + "clean": "rimraf dist .turbo", + "start": "node dist/start.js", + "dev": "pnpm build && pnpm start", + "typecheck": "tsc --noEmit", + "build": "tsc -p ./tsconfig.build.json && tsc-alias -p tsconfig.build.json", + "format": "biome format --write src", + "format:check": "biome ci src", + "test": "jest", + "test:watch": "jest --watch", + "lint": "eslint . --quiet", + "lintfix": "eslint . --fix", + "watch": "concurrently \"tsc -w -p tsconfig.build.json\" \"tsc-alias -w -p tsconfig.build.json\"" + }, + "main": "dist/start.js", + "module": "src/start.ts", + "types": "dist/start.d.ts", + "files": [ + "dist/**/*" + ], + "dependencies": { + "n8n-workflow": "workspace:*", + "n8n-core": "workspace:*", + "nanoid": "^3.3.6", + "ws": "^8.18.0" + }, + "devDependencies": { + "luxon": "catalog:" + } +} diff --git a/packages/@n8n/task-runner/src/authenticator.ts b/packages/@n8n/task-runner/src/authenticator.ts new file mode 100644 index 0000000000..7edb4cadf6 --- /dev/null +++ b/packages/@n8n/task-runner/src/authenticator.ts @@ -0,0 +1,47 @@ +import { ApplicationError } from 'n8n-workflow'; +import * as a from 'node:assert/strict'; + +export type AuthOpts = { + n8nUri: string; + authToken: string; +}; + +/** + * Requests a one-time token that can be used to establish a task runner connection + */ +export async function authenticate(opts: AuthOpts) { + try { + const authEndpoint = `http://${opts.n8nUri}/runners/auth`; + const response = await fetch(authEndpoint, { + method: 'POST', + headers: { + // eslint-disable-next-line @typescript-eslint/naming-convention + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + token: opts.authToken, + }), + }); + + if (!response.ok) { + throw new ApplicationError( + `Invalid response status ${response.status}: ${await response.text()}`, + ); + } + + const { data } = (await response.json()) as { data: { token: string } }; + const grantToken = data.token; + a.ok(grantToken); + + return grantToken; + } catch (e) { + console.error(e); + const error = e as Error; + throw new ApplicationError( + `Could not connect to n8n message broker ${opts.n8nUri}: ${error.message}`, + { + cause: error, + }, + ); + } +} diff --git a/packages/@n8n/task-runner/src/index.ts b/packages/@n8n/task-runner/src/index.ts new file mode 100644 index 0000000000..59e6f6d288 --- /dev/null +++ b/packages/@n8n/task-runner/src/index.ts @@ -0,0 +1,2 @@ +export * from './task-runner'; +export * from './runner-types'; diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts new file mode 100644 index 0000000000..499105f39d --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts @@ -0,0 +1,762 @@ +import { DateTime } from 'luxon'; +import type { CodeExecutionMode, IDataObject } from 'n8n-workflow'; +import fs from 'node:fs'; +import { builtinModules } from 'node:module'; + +import { ValidationError } from '@/js-task-runner/errors/validation-error'; +import type { JsTaskRunnerOpts } from '@/js-task-runner/js-task-runner'; +import { + JsTaskRunner, + type AllCodeTaskData, + type JSExecSettings, +} from '@/js-task-runner/js-task-runner'; +import type { Task } from '@/task-runner'; + +import { newAllCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data'; +import { ExecutionError } from '../errors/execution-error'; + +jest.mock('ws'); + +describe('JsTaskRunner', () => { + const createRunnerWithOpts = (opts: Partial = {}) => + new JsTaskRunner({ + wsUrl: 'ws://localhost', + grantToken: 'grantToken', + maxConcurrency: 1, + ...opts, + }); + + const defaultTaskRunner = createRunnerWithOpts(); + + const execTaskWithParams = async ({ + task, + taskData, + runner = defaultTaskRunner, + }: { + task: Task; + taskData: AllCodeTaskData; + runner?: JsTaskRunner; + }) => { + jest.spyOn(runner, 'requestData').mockResolvedValue(taskData); + return await runner.executeTask(task); + }; + + afterEach(() => { + jest.restoreAllMocks(); + }); + + const executeForAllItems = async ({ + code, + inputItems, + settings, + runner, + }: { + code: string; + inputItems: IDataObject[]; + settings?: Partial; + runner?: JsTaskRunner; + }) => { + return await execTaskWithParams({ + task: newTaskWithSettings({ + code, + nodeMode: 'runOnceForAllItems', + ...settings, + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + runner, + }); + }; + + const executeForEachItem = async ({ + code, + inputItems, + settings, + runner, + }: { + code: string; + inputItems: IDataObject[]; + settings?: Partial; + + runner?: JsTaskRunner; + }) => { + return await execTaskWithParams({ + task: newTaskWithSettings({ + code, + nodeMode: 'runOnceForEachItem', + ...settings, + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)), + runner, + }); + }; + + describe('console', () => { + test.each<[CodeExecutionMode]>([['runOnceForAllItems'], ['runOnceForEachItem']])( + 'should make an rpc call for console log in %s mode', + async (nodeMode) => { + jest.spyOn(defaultTaskRunner, 'makeRpcCall').mockResolvedValue(undefined); + const task = newTaskWithSettings({ + code: "console.log('Hello', 'world!'); return {}", + nodeMode, + }); + + await execTaskWithParams({ + task, + taskData: newAllCodeTaskData([wrapIntoJson({})]), + }); + + expect(defaultTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [ + 'Hello world!', + ]); + }, + ); + }); + + describe('built-in methods and variables available in the context', () => { + const inputItems = [{ a: 1 }]; + + const testExpressionForAllItems = async ( + expression: string, + expected: IDataObject | string | number | boolean, + ) => { + const needsWrapping = typeof expected !== 'object'; + const outcome = await executeForAllItems({ + code: needsWrapping ? `return { val: ${expression} }` : `return ${expression}`, + inputItems, + }); + + expect(outcome.result).toEqual([wrapIntoJson(needsWrapping ? { val: expected } : expected)]); + }; + + const testExpressionForEachItem = async ( + expression: string, + expected: IDataObject | string | number | boolean, + ) => { + const needsWrapping = typeof expected !== 'object'; + const outcome = await executeForEachItem({ + code: needsWrapping ? `return { val: ${expression} }` : `return ${expression}`, + inputItems, + }); + + expect(outcome.result).toEqual([ + withPairedItem(0, wrapIntoJson(needsWrapping ? { val: expected } : expected)), + ]); + }; + + const testGroups = { + // https://docs.n8n.io/code/builtin/current-node-input/ + 'current node input': [ + ['$input.first()', inputItems[0]], + ['$input.last()', inputItems[inputItems.length - 1]], + ['$input.params', { manualTriggerParam: 'empty' }], + ], + // https://docs.n8n.io/code/builtin/output-other-nodes/ + 'output of other nodes': [ + ['$("Trigger").first()', inputItems[0]], + ['$("Trigger").last()', inputItems[inputItems.length - 1]], + ['$("Trigger").params', { manualTriggerParam: 'empty' }], + ], + // https://docs.n8n.io/code/builtin/date-time/ + 'date and time': [ + ['$now', expect.any(DateTime)], + ['$today', expect.any(DateTime)], + ['{dt: DateTime}', { dt: expect.any(Function) }], + ], + // https://docs.n8n.io/code/builtin/jmespath/ + JMESPath: [['{ val: $jmespath([{ f: 1 },{ f: 2 }], "[*].f") }', { val: [1, 2] }]], + // https://docs.n8n.io/code/builtin/n8n-metadata/ + 'n8n metadata': [ + [ + '$execution', + { + id: 'exec-id', + mode: 'test', + resumeFormUrl: 'http://formWaitingBaseUrl/exec-id', + resumeUrl: 'http://webhookWaitingBaseUrl/exec-id', + customData: { + get: expect.any(Function), + getAll: expect.any(Function), + set: expect.any(Function), + setAll: expect.any(Function), + }, + }, + ], + ['$("Trigger").isExecuted', true], + ['$nodeVersion', 2], + ['$prevNode.name', 'Trigger'], + ['$prevNode.outputIndex', 0], + ['$runIndex', 0], + ['{ wf: $workflow }', { wf: { active: true, id: '1', name: 'Test Workflow' } }], + ['$vars', { var: 'value' }], + ], + }; + + for (const [groupName, tests] of Object.entries(testGroups)) { + describe(`${groupName} runOnceForAllItems`, () => { + test.each(tests)( + 'should have the %s available in the context', + async (expression, expected) => { + await testExpressionForAllItems(expression, expected); + }, + ); + }); + + describe(`${groupName} runOnceForEachItem`, () => { + test.each(tests)( + 'should have the %s available in the context', + async (expression, expected) => { + await testExpressionForEachItem(expression, expected); + }, + ); + }); + } + + describe('$env', () => { + it('should have the env available in context when access has not been blocked', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $env.VAR1 }', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: { + isEnvAccessBlocked: false, + isProcessAvailable: true, + env: { VAR1: 'value' }, + }, + }), + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: 'value' })]); + }); + + it('should be possible to access env if it has been blocked', async () => { + await expect( + execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $env.VAR1 }', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: { + isEnvAccessBlocked: true, + isProcessAvailable: true, + env: { VAR1: 'value' }, + }, + }), + }), + ).rejects.toThrow('access to env vars denied'); + }); + + it('should not be possible to iterate $env', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return Object.values($env).concat(Object.keys($env))', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: { + isEnvAccessBlocked: false, + isProcessAvailable: true, + env: { VAR1: '1', VAR2: '2', VAR3: '3' }, + }, + }), + }); + + expect(outcome.result).toEqual([]); + }); + + it("should not expose task runner's env variables even if no env state is received", async () => { + process.env.N8N_RUNNERS_N8N_URI = 'http://127.0.0.1:5679'; + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $env.N8N_RUNNERS_N8N_URI }', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: undefined, + }), + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: undefined })]); + }); + }); + }); + + describe('runOnceForAllItems', () => { + describe('continue on fail', () => { + it('should return an item with the error if continueOnFail is true', async () => { + const outcome = await executeForAllItems({ + code: 'throw new Error("Error message")', + inputItems: [{ a: 1 }], + settings: { continueOnFail: true }, + }); + + expect(outcome).toEqual({ + result: [wrapIntoJson({ error: 'Error message [line 1]' })], + customData: undefined, + }); + }); + + it('should throw an error if continueOnFail is false', async () => { + await expect( + executeForAllItems({ + code: 'throw new Error("Error message")', + inputItems: [{ a: 1 }], + settings: { continueOnFail: false }, + }), + ).rejects.toThrow('Error message'); + }); + }); + + describe('invalid output', () => { + test.each([['undefined'], ['42'], ['"a string"']])( + 'should throw a ValidationError if the code output is %s', + async (output) => { + await expect( + executeForAllItems({ + code: `return ${output}`, + inputItems: [{ a: 1 }], + }), + ).rejects.toThrow(ValidationError); + }, + ); + + it('should throw a ValidationError if some items are wrapped in json and some are not', async () => { + await expect( + executeForAllItems({ + code: 'return [{b: 1}, {json: {b: 2}}]', + inputItems: [{ a: 1 }], + }), + ).rejects.toThrow(ValidationError); + }); + }); + + it('should return static items', async () => { + const outcome = await executeForAllItems({ + code: 'return [{json: {b: 1}}]', + inputItems: [{ a: 1 }], + }); + + expect(outcome).toEqual({ + result: [wrapIntoJson({ b: 1 })], + customData: undefined, + }); + }); + + it('maps null into an empty array', async () => { + const outcome = await executeForAllItems({ + code: 'return null', + inputItems: [{ a: 1 }], + }); + + expect(outcome).toEqual({ + result: [], + customData: undefined, + }); + }); + + it("should wrap items into json if they aren't", async () => { + const outcome = await executeForAllItems({ + code: 'return [{b: 1}]', + inputItems: [{ a: 1 }], + }); + + expect(outcome).toEqual({ + result: [wrapIntoJson({ b: 1 })], + customData: undefined, + }); + }); + + it('should wrap single item into an array and json', async () => { + const outcome = await executeForAllItems({ + code: 'return {b: 1}', + inputItems: [{ a: 1 }], + }); + + expect(outcome).toEqual({ + result: [wrapIntoJson({ b: 1 })], + customData: undefined, + }); + }); + + test.each([['items'], ['$input.all()'], ["$('Trigger').all()"]])( + 'should have all input items in the context as %s', + async (expression) => { + const outcome = await executeForAllItems({ + code: `return ${expression}`, + inputItems: [{ a: 1 }, { a: 2 }], + }); + + expect(outcome).toEqual({ + result: [wrapIntoJson({ a: 1 }), wrapIntoJson({ a: 2 })], + customData: undefined, + }); + }, + ); + }); + + describe('runForEachItem', () => { + describe('continue on fail', () => { + it('should return an item with the error if continueOnFail is true', async () => { + const outcome = await executeForEachItem({ + code: 'throw new Error("Error message")', + inputItems: [{ a: 1 }, { a: 2 }], + settings: { continueOnFail: true }, + }); + + expect(outcome).toEqual({ + result: [ + withPairedItem(0, wrapIntoJson({ error: 'Error message [line 1]' })), + withPairedItem(1, wrapIntoJson({ error: 'Error message [line 1]' })), + ], + customData: undefined, + }); + }); + + it('should throw an error if continueOnFail is false', async () => { + await expect( + executeForEachItem({ + code: 'throw new Error("Error message")', + inputItems: [{ a: 1 }], + settings: { continueOnFail: false }, + }), + ).rejects.toThrow('Error message'); + }); + }); + + describe('invalid output', () => { + test.each([['undefined'], ['42'], ['"a string"'], ['[]'], ['[1,2,3]']])( + 'should throw a ValidationError if the code output is %s', + async (output) => { + await expect( + executeForEachItem({ + code: `return ${output}`, + inputItems: [{ a: 1 }], + }), + ).rejects.toThrow(ValidationError); + }, + ); + }); + + it('should return static items', async () => { + const outcome = await executeForEachItem({ + code: 'return {json: {b: 1}}', + inputItems: [{ a: 1 }], + }); + + expect(outcome).toEqual({ + result: [withPairedItem(0, wrapIntoJson({ b: 1 }))], + customData: undefined, + }); + }); + + it('should filter out null values', async () => { + const outcome = await executeForEachItem({ + code: 'return item.json.a === 1 ? item : null', + inputItems: [{ a: 1 }, { a: 2 }, { a: 3 }], + }); + + expect(outcome).toEqual({ + result: [withPairedItem(0, wrapIntoJson({ a: 1 }))], + customData: undefined, + }); + }); + + test.each([['item'], ['$input.item'], ['{ json: $json }']])( + 'should have the current input item in the context as %s', + async (expression) => { + const outcome = await executeForEachItem({ + code: `return ${expression}`, + inputItems: [{ a: 1 }, { a: 2 }], + }); + + expect(outcome).toEqual({ + result: [ + withPairedItem(0, wrapIntoJson({ a: 1 })), + withPairedItem(1, wrapIntoJson({ a: 2 })), + ], + customData: undefined, + }); + }, + ); + }); + + describe('require', () => { + const inputItems = [{ a: 1 }]; + const packageJson = JSON.parse(fs.readFileSync('package.json', 'utf8')); + + describe('blocked by default', () => { + const testCases = [...builtinModules, ...Object.keys(packageJson.dependencies)]; + + test.each(testCases)( + 'should throw an error when requiring %s in runOnceForAllItems mode', + async (module) => { + await expect( + executeForAllItems({ + code: `return require('${module}')`, + inputItems, + }), + ).rejects.toThrow(`Cannot find module '${module}'`); + }, + ); + + test.each(testCases)( + 'should throw an error when requiring %s in runOnceForEachItem mode', + async (module) => { + await expect( + executeForEachItem({ + code: `return require('${module}')`, + inputItems, + }), + ).rejects.toThrow(`Cannot find module '${module}'`); + }, + ); + }); + + describe('all built-ins allowed with *', () => { + const testCases = builtinModules; + const runner = createRunnerWithOpts({ + allowedBuiltInModules: '*', + }); + + test.each(testCases)( + 'should be able to require %s in runOnceForAllItems mode', + async (module) => { + await expect( + executeForAllItems({ + code: `return { val: require('${module}') }`, + inputItems, + runner, + }), + ).resolves.toBeDefined(); + }, + ); + + test.each(testCases)( + 'should be able to require %s in runOnceForEachItem mode', + async (module) => { + await expect( + executeForEachItem({ + code: `return { val: require('${module}') }`, + inputItems, + runner, + }), + ).resolves.toBeDefined(); + }, + ); + }); + + describe('all external modules allowed with *', () => { + const testCases = Object.keys(packageJson.dependencies); + const runner = createRunnerWithOpts({ + allowedExternalModules: '*', + }); + + test.each(testCases)( + 'should be able to require %s in runOnceForAllItems mode', + async (module) => { + await expect( + executeForAllItems({ + code: `return { val: require('${module}') }`, + inputItems, + runner, + }), + ).resolves.toBeDefined(); + }, + ); + + test.each(testCases)( + 'should be able to require %s in runOnceForEachItem mode', + async (module) => { + await expect( + executeForEachItem({ + code: `return { val: require('${module}') }`, + inputItems, + runner, + }), + ).resolves.toBeDefined(); + }, + ); + }); + + describe('specifically allowed built-in modules', () => { + const runner = createRunnerWithOpts({ + allowedBuiltInModules: 'crypto,path', + }); + + const allowedCases = [ + ['crypto', 'require("crypto").randomBytes(16).toString("hex")', expect.any(String)], + ['path', 'require("path").normalize("/root/./dir")', '/root/dir'], + ]; + + const blockedCases = [['http'], ['process']]; + + test.each(allowedCases)( + 'should allow requiring %s in runOnceForAllItems mode', + async (_moduleName, expression, expected) => { + const outcome = await executeForAllItems({ + code: `return { val: ${expression} }`, + inputItems, + runner, + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: expected })]); + }, + ); + + test.each(allowedCases)( + 'should allow requiring %s in runOnceForEachItem mode', + async (_moduleName, expression, expected) => { + const outcome = await executeForEachItem({ + code: `return { val: ${expression} }`, + inputItems, + runner, + }); + + expect(outcome.result).toEqual([withPairedItem(0, wrapIntoJson({ val: expected }))]); + }, + ); + + test.each(blockedCases)( + 'should throw when trying to require %s in runOnceForAllItems mode', + async (moduleName) => { + await expect( + executeForAllItems({ + code: `require("${moduleName}")`, + inputItems, + runner, + }), + ).rejects.toThrow(`Cannot find module '${moduleName}'`); + }, + ); + + test.each(blockedCases)( + 'should throw when trying to require %s in runOnceForEachItem mode', + async (moduleName) => { + await expect( + executeForEachItem({ + code: `require("${moduleName}")`, + inputItems, + runner, + }), + ).rejects.toThrow(`Cannot find module '${moduleName}'`); + }, + ); + }); + + describe('specifically allowed external modules', () => { + const runner = createRunnerWithOpts({ + allowedExternalModules: 'nanoid', + }); + + const allowedCases = [['nanoid', 'require("nanoid").nanoid()', expect.any(String)]]; + + const blockedCases = [['n8n-core']]; + + test.each(allowedCases)( + 'should allow requiring %s in runOnceForAllItems mode', + async (_moduleName, expression, expected) => { + const outcome = await executeForAllItems({ + code: `return { val: ${expression} }`, + inputItems, + runner, + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: expected })]); + }, + ); + + test.each(allowedCases)( + 'should allow requiring %s in runOnceForEachItem mode', + async (_moduleName, expression, expected) => { + const outcome = await executeForEachItem({ + code: `return { val: ${expression} }`, + inputItems, + runner, + }); + + expect(outcome.result).toEqual([withPairedItem(0, wrapIntoJson({ val: expected }))]); + }, + ); + + test.each(blockedCases)( + 'should throw when trying to require %s in runOnceForAllItems mode', + async (moduleName) => { + await expect( + executeForAllItems({ + code: `require("${moduleName}")`, + inputItems, + runner, + }), + ).rejects.toThrow(`Cannot find module '${moduleName}'`); + }, + ); + + test.each(blockedCases)( + 'should throw when trying to require %s in runOnceForEachItem mode', + async (moduleName) => { + await expect( + executeForEachItem({ + code: `require("${moduleName}")`, + inputItems, + runner, + }), + ).rejects.toThrow(`Cannot find module '${moduleName}'`); + }, + ); + }); + }); + + describe('errors', () => { + test.each<[CodeExecutionMode]>([['runOnceForAllItems'], ['runOnceForEachItem']])( + 'should throw an ExecutionError if the code is invalid in %s mode', + async (nodeMode) => { + await expect( + execTaskWithParams({ + task: newTaskWithSettings({ + code: 'unknown', + nodeMode, + }), + taskData: newAllCodeTaskData([wrapIntoJson({ a: 1 })]), + }), + ).rejects.toThrow(ExecutionError); + }, + ); + + it('sends serializes an error correctly', async () => { + const runner = createRunnerWithOpts({}); + const taskId = '1'; + const task = newTaskWithSettings({ + code: 'unknown; return []', + nodeMode: 'runOnceForAllItems', + continueOnFail: false, + mode: 'manual', + workflowMode: 'manual', + }); + runner.runningTasks.set(taskId, task); + + const sendSpy = jest.spyOn(runner.ws, 'send').mockImplementation(() => {}); + jest.spyOn(runner, 'sendOffers').mockImplementation(() => {}); + jest + .spyOn(runner, 'requestData') + .mockResolvedValue(newAllCodeTaskData([wrapIntoJson({ a: 1 })])); + + await runner.receivedSettings(taskId, task.settings); + + expect(sendSpy).toHaveBeenCalledWith( + JSON.stringify({ + type: 'runner:taskerror', + taskId, + error: { + message: 'unknown is not defined [line 1]', + description: 'ReferenceError', + lineNumber: 1, + }, + }), + ); + + console.log('DONE'); + }, 1000); + }); +}); diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts new file mode 100644 index 0000000000..b157094619 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts @@ -0,0 +1,168 @@ +import type { IDataObject, INode, INodeExecutionData, ITaskData } from 'n8n-workflow'; +import { NodeConnectionType } from 'n8n-workflow'; +import { nanoid } from 'nanoid'; + +import type { AllCodeTaskData, JSExecSettings } from '@/js-task-runner/js-task-runner'; +import type { Task } from '@/task-runner'; + +/** + * Creates a new task with the given settings + */ +export const newTaskWithSettings = ( + settings: Partial & Pick, +): Task => ({ + taskId: '1', + settings: { + workflowMode: 'manual', + continueOnFail: false, + mode: 'manual', + ...settings, + }, + active: true, + cancelled: false, +}); + +/** + * Creates a new node with the given options + */ +export const newNode = (opts: Partial = {}): INode => ({ + id: nanoid(), + name: 'Test Node' + nanoid(), + parameters: {}, + position: [0, 0], + type: 'n8n-nodes-base.code', + typeVersion: 1, + ...opts, +}); + +/** + * Creates a new task data with the given options + */ +export const newTaskData = (opts: Partial & Pick): ITaskData => ({ + startTime: Date.now(), + executionTime: 0, + executionStatus: 'success', + ...opts, +}); + +/** + * Creates a new all code task data with the given options + */ +export const newAllCodeTaskData = ( + codeNodeInputData: INodeExecutionData[], + opts: Partial = {}, +): AllCodeTaskData => { + const codeNode = newNode({ + name: 'JsCode', + parameters: { + mode: 'runOnceForEachItem', + language: 'javaScript', + jsCode: 'return item', + }, + type: 'n8n-nodes-base.code', + typeVersion: 2, + }); + const manualTriggerNode = newNode({ + name: 'Trigger', + type: 'n8n-nodes-base.manualTrigger', + parameters: { + manualTriggerParam: 'empty', + }, + }); + + return { + workflow: { + id: '1', + name: 'Test Workflow', + active: true, + connections: { + [manualTriggerNode.name]: { + main: [[{ node: codeNode.name, type: NodeConnectionType.Main, index: 0 }]], + }, + }, + nodes: [manualTriggerNode, codeNode], + }, + inputData: { + main: [codeNodeInputData], + }, + connectionInputData: codeNodeInputData, + node: codeNode, + runExecutionData: { + startData: {}, + resultData: { + runData: { + [manualTriggerNode.name]: [ + newTaskData({ + source: [], + data: { + main: [codeNodeInputData], + }, + }), + ], + }, + pinData: {}, + lastNodeExecuted: manualTriggerNode.name, + }, + executionData: { + contextData: {}, + nodeExecutionStack: [], + metadata: {}, + waitingExecution: {}, + waitingExecutionSource: {}, + }, + }, + runIndex: 0, + itemIndex: 0, + activeNodeName: codeNode.name, + contextNodeName: codeNode.name, + defaultReturnRunIndex: -1, + siblingParameters: {}, + mode: 'manual', + selfData: {}, + envProviderState: { + env: {}, + isEnvAccessBlocked: true, + isProcessAvailable: true, + }, + additionalData: { + executionId: 'exec-id', + instanceBaseUrl: '', + restartExecutionId: '', + restApiUrl: '', + formWaitingBaseUrl: 'http://formWaitingBaseUrl', + webhookBaseUrl: 'http://webhookBaseUrl', + webhookTestBaseUrl: 'http://webhookTestBaseUrl', + webhookWaitingBaseUrl: 'http://webhookWaitingBaseUrl', + variables: { + var: 'value', + }, + }, + executeData: { + node: codeNode, + data: { + main: [codeNodeInputData], + }, + source: { + main: [{ previousNode: manualTriggerNode.name }], + }, + }, + ...opts, + }; +}; + +/** + * Wraps the given value into an INodeExecutionData object's json property + */ +export const wrapIntoJson = (json: IDataObject): INodeExecutionData => ({ + json, +}); + +/** + * Adds the given index as the pairedItem property to the given INodeExecutionData object + */ +export const withPairedItem = (index: number, data: INodeExecutionData): INodeExecutionData => ({ + ...data, + pairedItem: { + item: index, + }, +}); diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts new file mode 100644 index 0000000000..3777940021 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts @@ -0,0 +1,53 @@ +import { ExecutionError } from '../execution-error'; + +describe('ExecutionError', () => { + const defaultStack = `TypeError: a.unknown is not a function + at VmCodeWrapper (evalmachine.:2:3) + at evalmachine.:7:2 + at Script.runInContext (node:vm:148:12) + at Script.runInNewContext (node:vm:153:17) + at runInNewContext (node:vm:309:38) + at JsTaskRunner.runForAllItems (/n8n/packages/@n8n/task-runner/dist/js-task-runner/js-task-runner.js:90:65) + at JsTaskRunner.executeTask (/n8n/packages/@n8n/task-runner/dist/js-task-runner/js-task-runner.js:71:26) + at process.processTicksAndRejections (node:internal/process/task_queues:95:5) + at async JsTaskRunner.receivedSettings (/n8n/packages/@n8n/task-runner/dist/task-runner.js:190:26)`; + + it('should parse error details from stack trace without itemIndex', () => { + const error = new Error('a.unknown is not a function'); + error.stack = defaultStack; + + const executionError = new ExecutionError(error); + expect(executionError.message).toBe('a.unknown is not a function [line 2]'); + expect(executionError.lineNumber).toBe(2); + expect(executionError.description).toBe('TypeError'); + expect(executionError.context).toBeUndefined(); + }); + + it('should parse error details from stack trace with itemIndex', () => { + const error = new Error('a.unknown is not a function'); + error.stack = defaultStack; + + const executionError = new ExecutionError(error, 1); + expect(executionError.message).toBe('a.unknown is not a function [line 2, for item 1]'); + expect(executionError.lineNumber).toBe(2); + expect(executionError.description).toBe('TypeError'); + expect(executionError.context).toEqual({ itemIndex: 1 }); + }); + + it('should serialize correctly', () => { + const error = new Error('a.unknown is not a function'); + error.stack = defaultStack; + + const executionError = new ExecutionError(error, 1); + + expect(JSON.stringify(executionError)).toBe( + JSON.stringify({ + message: 'a.unknown is not a function [line 2, for item 1]', + description: 'TypeError', + itemIndex: 1, + context: { itemIndex: 1 }, + lineNumber: 2, + }), + ); + }); +}); diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/error-like.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/error-like.ts new file mode 100644 index 0000000000..1eaf744e89 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/error-like.ts @@ -0,0 +1,12 @@ +export interface ErrorLike { + message: string; + stack?: string; +} + +export function isErrorLike(value: unknown): value is ErrorLike { + if (typeof value !== 'object' || value === null) return false; + + const errorLike = value as ErrorLike; + + return typeof errorLike.message === 'string'; +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/execution-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/execution-error.ts new file mode 100644 index 0000000000..63a2dd5e0b --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/execution-error.ts @@ -0,0 +1,94 @@ +import type { ErrorLike } from './error-like'; +import { SerializableError } from './serializable-error'; + +const VM_WRAPPER_FN_NAME = 'VmCodeWrapper'; + +export class ExecutionError extends SerializableError { + description: string | null = null; + + itemIndex: number | undefined = undefined; + + context: { itemIndex: number } | undefined = undefined; + + stack = ''; + + lineNumber: number | undefined = undefined; + + constructor(error: ErrorLike, itemIndex?: number) { + super(error.message); + this.itemIndex = itemIndex; + + if (this.itemIndex !== undefined) { + this.context = { itemIndex: this.itemIndex }; + } + + this.stack = error.stack ?? ''; + + this.populateFromStack(); + } + + /** + * Populate error `message` and `description` from error `stack`. + */ + private populateFromStack() { + const stackRows = this.stack.split('\n'); + + if (stackRows.length === 0) { + this.message = 'Unknown error'; + return; + } + + const messageRow = stackRows.find((line) => line.includes('Error:')); + const lineNumberRow = stackRows.find((line) => line.includes(`at ${VM_WRAPPER_FN_NAME} `)); + const lineNumberDisplay = this.toLineNumberDisplay(lineNumberRow); + + if (!messageRow) { + this.message = `Unknown error ${lineNumberDisplay}`; + return; + } + + const [errorDetails, errorType] = this.toErrorDetailsAndType(messageRow); + + if (errorType) this.description = errorType; + + if (!errorDetails) { + this.message = `Unknown error ${lineNumberDisplay}`; + return; + } + + this.message = `${errorDetails} ${lineNumberDisplay}`; + } + + private toLineNumberDisplay(lineNumberRow?: string) { + if (!lineNumberRow) return ''; + + // TODO: This doesn't work if there is a function definition in the code + // and the error is thrown from that function. + + const regex = new RegExp( + `at ${VM_WRAPPER_FN_NAME} \\(evalmachine\\.:(?\\d+):`, + ); + const errorLineNumberMatch = lineNumberRow.match(regex); + if (!errorLineNumberMatch?.groups?.lineNumber) return null; + + const lineNumber = errorLineNumberMatch.groups.lineNumber; + if (!lineNumber) return ''; + + this.lineNumber = Number(lineNumber); + + return this.itemIndex === undefined + ? `[line ${lineNumber}]` + : `[line ${lineNumber}, for item ${this.itemIndex}]`; + } + + private toErrorDetailsAndType(messageRow?: string) { + if (!messageRow) return [null, null]; + + const [errorDetails, errorType] = messageRow + .split(':') + .reverse() + .map((i) => i.trim()); + + return [errorDetails, errorType === 'Error' ? null : errorType]; + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts new file mode 100644 index 0000000000..cd0e568de0 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts @@ -0,0 +1,21 @@ +/** + * Error that has its message property serialized as well. Used to transport + * errors over the wire. + */ +export abstract class SerializableError extends Error { + constructor(message: string) { + super(message); + + // So it is serialized as well + this.makeMessageEnumerable(); + } + + private makeMessageEnumerable() { + Object.defineProperty(this, 'message', { + value: this.message, + enumerable: true, // This makes the message property enumerable + writable: true, + configurable: true, + }); + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/validation-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/validation-error.ts new file mode 100644 index 0000000000..bf66136ccf --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/validation-error.ts @@ -0,0 +1,44 @@ +import { SerializableError } from './serializable-error'; + +export class ValidationError extends SerializableError { + description = ''; + + itemIndex: number | undefined = undefined; + + context: { itemIndex: number } | undefined = undefined; + + lineNumber: number | undefined = undefined; + + constructor({ + message, + description, + itemIndex, + lineNumber, + }: { + message: string; + description: string; + itemIndex?: number; + lineNumber?: number; + }) { + super(message); + + this.lineNumber = lineNumber; + this.itemIndex = itemIndex; + + if (this.lineNumber !== undefined && this.itemIndex !== undefined) { + this.message = `${message} [line ${lineNumber}, for item ${itemIndex}]`; + } else if (this.lineNumber !== undefined) { + this.message = `${message} [line ${lineNumber}]`; + } else if (this.itemIndex !== undefined) { + this.message = `${message} [item ${itemIndex}]`; + } else { + this.message = message; + } + + this.description = description; + + if (this.itemIndex !== undefined) { + this.context = { itemIndex: this.itemIndex }; + } + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts new file mode 100644 index 0000000000..5bf2e06f26 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts @@ -0,0 +1,324 @@ +import { getAdditionalKeys } from 'n8n-core'; +import { + WorkflowDataProxy, + // type IWorkflowDataProxyAdditionalKeys, + Workflow, +} from 'n8n-workflow'; +import type { + CodeExecutionMode, + INode, + INodeType, + ITaskDataConnections, + IWorkflowExecuteAdditionalData, + WorkflowParameters, + IDataObject, + IExecuteData, + INodeExecutionData, + INodeParameters, + IRunExecutionData, + WorkflowExecuteMode, + EnvProviderState, +} from 'n8n-workflow'; +import * as a from 'node:assert'; +import { runInNewContext, type Context } from 'node:vm'; + +import type { TaskResultData } from '@/runner-types'; +import { type Task, TaskRunner } from '@/task-runner'; + +import { isErrorLike } from './errors/error-like'; +import { ExecutionError } from './errors/execution-error'; +import type { RequireResolver } from './require-resolver'; +import { createRequireResolver } from './require-resolver'; +import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation'; + +export interface JSExecSettings { + code: string; + nodeMode: CodeExecutionMode; + workflowMode: WorkflowExecuteMode; + continueOnFail: boolean; + + // For workflow data proxy + mode: WorkflowExecuteMode; +} + +export interface PartialAdditionalData { + executionId?: string; + restartExecutionId?: string; + restApiUrl: string; + instanceBaseUrl: string; + formWaitingBaseUrl: string; + webhookBaseUrl: string; + webhookWaitingBaseUrl: string; + webhookTestBaseUrl: string; + currentNodeParameters?: INodeParameters; + executionTimeoutTimestamp?: number; + userId?: string; + variables: IDataObject; +} + +export interface AllCodeTaskData { + workflow: Omit; + inputData: ITaskDataConnections; + node: INode; + + runExecutionData: IRunExecutionData; + runIndex: number; + itemIndex: number; + activeNodeName: string; + connectionInputData: INodeExecutionData[]; + siblingParameters: INodeParameters; + mode: WorkflowExecuteMode; + envProviderState?: EnvProviderState; + executeData?: IExecuteData; + defaultReturnRunIndex: number; + selfData: IDataObject; + contextNodeName: string; + additionalData: PartialAdditionalData; +} + +export interface JsTaskRunnerOpts { + wsUrl: string; + grantToken: string; + maxConcurrency: number; + name?: string; + /** + * List of built-in nodejs modules that are allowed to be required in the + * execution sandbox. Asterisk (*) can be used to allow all. + */ + allowedBuiltInModules?: string; + /** + * List of npm modules that are allowed to be required in the execution + * sandbox. Asterisk (*) can be used to allow all. + */ + allowedExternalModules?: string; +} + +type CustomConsole = { + log: (...args: unknown[]) => void; +}; + +export class JsTaskRunner extends TaskRunner { + private readonly requireResolver: RequireResolver; + + constructor({ + grantToken, + maxConcurrency, + wsUrl, + name = 'JS Task Runner', + allowedBuiltInModules, + allowedExternalModules, + }: JsTaskRunnerOpts) { + super('javascript', wsUrl, grantToken, maxConcurrency, name); + + const parseModuleAllowList = (moduleList: string) => + moduleList === '*' ? null : new Set(moduleList.split(',').map((x) => x.trim())); + + this.requireResolver = createRequireResolver({ + allowedBuiltInModules: parseModuleAllowList(allowedBuiltInModules ?? ''), + allowedExternalModules: parseModuleAllowList(allowedExternalModules ?? ''), + }); + } + + async executeTask(task: Task): Promise { + const allData = await this.requestData(task.taskId, 'all'); + + const settings = task.settings; + a.ok(settings, 'JS Code not sent to runner'); + + const workflowParams = allData.workflow; + const workflow = new Workflow({ + ...workflowParams, + nodeTypes: { + getByNameAndVersion() { + return undefined as unknown as INodeType; + }, + getByName() { + return undefined as unknown as INodeType; + }, + getKnownTypes() { + return {}; + }, + }, + }); + + const customConsole = { + // Send log output back to the main process. It will take care of forwarding + // it to the UI or printing to console. + log: (...args: unknown[]) => { + const logOutput = args + .map((arg) => (typeof arg === 'object' && arg !== null ? JSON.stringify(arg) : arg)) + .join(' '); + void this.makeRpcCall(task.taskId, 'logNodeOutput', [logOutput]); + }, + }; + + const result = + settings.nodeMode === 'runOnceForAllItems' + ? await this.runForAllItems(task.taskId, settings, allData, workflow, customConsole) + : await this.runForEachItem(task.taskId, settings, allData, workflow, customConsole); + + return { + result, + customData: allData.runExecutionData.resultData.metadata, + }; + } + + /** + * Executes the requested code for all items in a single run + */ + private async runForAllItems( + taskId: string, + settings: JSExecSettings, + allData: AllCodeTaskData, + workflow: Workflow, + customConsole: CustomConsole, + ): Promise { + const dataProxy = this.createDataProxy(allData, workflow, allData.itemIndex); + const inputItems = allData.connectionInputData; + + const context: Context = { + require: this.requireResolver, + module: {}, + console: customConsole, + + items: inputItems, + ...dataProxy, + ...this.buildRpcCallObject(taskId), + }; + + try { + const result = (await runInNewContext( + `module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, + context, + )) as TaskResultData['result']; + + if (result === null) { + return []; + } + + return validateRunForAllItemsOutput(result); + } catch (e) { + // Errors thrown by the VM are not instances of Error, so map them to an ExecutionError + const error = this.toExecutionErrorIfNeeded(e); + + if (settings.continueOnFail) { + return [{ json: { error: error.message } }]; + } + + throw error; + } + } + + /** + * Executes the requested code for each item in the input data + */ + private async runForEachItem( + taskId: string, + settings: JSExecSettings, + allData: AllCodeTaskData, + workflow: Workflow, + customConsole: CustomConsole, + ): Promise { + const inputItems = allData.connectionInputData; + const returnData: INodeExecutionData[] = []; + + for (let index = 0; index < inputItems.length; index++) { + const item = inputItems[index]; + const dataProxy = this.createDataProxy(allData, workflow, index); + const context: Context = { + require: this.requireResolver, + module: {}, + console: customConsole, + item, + + ...dataProxy, + ...this.buildRpcCallObject(taskId), + }; + + try { + let result = (await runInNewContext( + `module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, + context, + )) as INodeExecutionData | undefined; + + // Filter out null values + if (result === null) { + continue; + } + + result = validateRunForEachItemOutput(result, index); + if (result) { + returnData.push( + result.binary + ? { + json: result.json, + pairedItem: { item: index }, + binary: result.binary, + } + : { + json: result.json, + pairedItem: { item: index }, + }, + ); + } + } catch (e) { + // Errors thrown by the VM are not instances of Error, so map them to an ExecutionError + const error = this.toExecutionErrorIfNeeded(e); + + if (!settings.continueOnFail) { + throw error; + } + + returnData.push({ + json: { error: error.message }, + pairedItem: { + item: index, + }, + }); + } + } + + return returnData; + } + + private createDataProxy(allData: AllCodeTaskData, workflow: Workflow, itemIndex: number) { + return new WorkflowDataProxy( + workflow, + allData.runExecutionData, + allData.runIndex, + itemIndex, + allData.activeNodeName, + allData.connectionInputData, + allData.siblingParameters, + allData.mode, + getAdditionalKeys( + allData.additionalData as IWorkflowExecuteAdditionalData, + allData.mode, + allData.runExecutionData, + ), + allData.executeData, + allData.defaultReturnRunIndex, + allData.selfData, + allData.contextNodeName, + // Make sure that even if we don't receive the envProviderState for + // whatever reason, we don't expose the task runner's env to the code + allData.envProviderState ?? { + env: {}, + isEnvAccessBlocked: false, + isProcessAvailable: true, + }, + ).getDataProxy(); + } + + private toExecutionErrorIfNeeded(error: unknown): Error { + if (error instanceof Error) { + return error; + } + + if (isErrorLike(error)) { + return new ExecutionError(error); + } + + return new ExecutionError({ message: JSON.stringify(error) }); + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/obj-utils.ts b/packages/@n8n/task-runner/src/js-task-runner/obj-utils.ts new file mode 100644 index 0000000000..1e49e475d2 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/obj-utils.ts @@ -0,0 +1,5 @@ +export function isObject(maybe: unknown): maybe is { [key: string]: unknown } { + return ( + typeof maybe === 'object' && maybe !== null && !Array.isArray(maybe) && !(maybe instanceof Date) + ); +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/require-resolver.ts b/packages/@n8n/task-runner/src/js-task-runner/require-resolver.ts new file mode 100644 index 0000000000..ffa00c0441 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/require-resolver.ts @@ -0,0 +1,43 @@ +import { ApplicationError } from 'n8n-workflow'; +import { isBuiltin } from 'node:module'; + +import { ExecutionError } from './errors/execution-error'; + +export type RequireResolverOpts = { + /** + * List of built-in nodejs modules that are allowed to be required in the + * execution sandbox. `null` means all are allowed. + */ + allowedBuiltInModules: Set | null; + + /** + * List of external modules that are allowed to be required in the + * execution sandbox. `null` means all are allowed. + */ + allowedExternalModules: Set | null; +}; + +export type RequireResolver = (request: string) => unknown; + +export function createRequireResolver({ + allowedBuiltInModules, + allowedExternalModules, +}: RequireResolverOpts) { + return (request: string) => { + const checkIsAllowed = (allowList: Set | null, moduleName: string) => { + return allowList ? allowList.has(moduleName) : true; + }; + + const isAllowed = isBuiltin(request) + ? checkIsAllowed(allowedBuiltInModules, request) + : checkIsAllowed(allowedExternalModules, request); + + if (!isAllowed) { + const error = new ApplicationError(`Cannot find module '${request}'`); + throw new ExecutionError(error); + } + + // eslint-disable-next-line @typescript-eslint/no-var-requires + return require(request) as unknown; + }; +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/result-validation.ts b/packages/@n8n/task-runner/src/js-task-runner/result-validation.ts new file mode 100644 index 0000000000..b7d0ffc5fc --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/result-validation.ts @@ -0,0 +1,116 @@ +import { normalizeItems } from 'n8n-core'; +import type { INodeExecutionData } from 'n8n-workflow'; + +import { ValidationError } from './errors/validation-error'; +import { isObject } from './obj-utils'; + +export const REQUIRED_N8N_ITEM_KEYS = new Set(['json', 'binary', 'pairedItem', 'error']); + +function validateTopLevelKeys(item: INodeExecutionData, itemIndex: number) { + for (const key in item) { + if (Object.prototype.hasOwnProperty.call(item, key)) { + if (REQUIRED_N8N_ITEM_KEYS.has(key)) return; + + throw new ValidationError({ + message: `Unknown top-level item key: ${key}`, + description: 'Access the properties of an item under `.json`, e.g. `item.json`', + itemIndex, + }); + } + } +} + +function validateItem({ json, binary }: INodeExecutionData, itemIndex: number) { + if (json === undefined || !isObject(json)) { + throw new ValidationError({ + message: "A 'json' property isn't an object", + description: "In the returned data, every key named 'json' must point to an object.", + itemIndex, + }); + } + + if (binary !== undefined && !isObject(binary)) { + throw new ValidationError({ + message: "A 'binary' property isn't an object", + description: "In the returned data, every key named 'binary' must point to an object.", + itemIndex, + }); + } +} + +/** + * Validates the output of a code node in 'Run for All Items' mode. + */ +export function validateRunForAllItemsOutput( + executionResult: INodeExecutionData | INodeExecutionData[] | undefined, +) { + if (typeof executionResult !== 'object') { + throw new ValidationError({ + message: "Code doesn't return items properly", + description: 'Please return an array of objects, one for each item you would like to output.', + }); + } + + if (Array.isArray(executionResult)) { + /** + * If at least one top-level key is an n8n item key (`json`, `binary`, etc.), + * then require all item keys to be an n8n item key. + * + * If no top-level key is an n8n key, then skip this check, allowing non-n8n + * item keys to be wrapped in `json` when normalizing items below. + */ + const mustHaveTopLevelN8nKey = executionResult.some((item) => + Object.keys(item).find((key) => REQUIRED_N8N_ITEM_KEYS.has(key)), + ); + + if (mustHaveTopLevelN8nKey) { + for (let index = 0; index < executionResult.length; index++) { + const item = executionResult[index]; + validateTopLevelKeys(item, index); + } + } + } + + const returnData = normalizeItems(executionResult); + returnData.forEach(validateItem); + return returnData; +} + +/** + * Validates the output of a code node in 'Run for Each Item' mode for single item + */ +export function validateRunForEachItemOutput( + executionResult: INodeExecutionData | undefined, + itemIndex: number, +) { + if (typeof executionResult !== 'object') { + throw new ValidationError({ + message: "Code doesn't return an object", + description: `Please return an object representing the output item. ('${executionResult}' was returned instead.)`, + itemIndex, + }); + } + + if (Array.isArray(executionResult)) { + const firstSentence = + executionResult.length > 0 + ? `An array of ${typeof executionResult[0]}s was returned.` + : 'An empty array was returned.'; + throw new ValidationError({ + message: "Code doesn't return a single object", + description: `${firstSentence} If you need to output multiple items, please use the 'Run Once for All Items' mode instead.`, + itemIndex, + }); + } + + const [returnData] = normalizeItems([executionResult]); + + validateItem(returnData, itemIndex); + + // If at least one top-level key is a supported item key (`json`, `binary`, etc.), + // and another top-level key is unrecognized, then the user mis-added a property + // directly on the item, when they intended to add it on the `json` property + validateTopLevelKeys(returnData, itemIndex); + + return returnData; +} diff --git a/packages/@n8n/task-runner/src/runner-types.ts b/packages/@n8n/task-runner/src/runner-types.ts new file mode 100644 index 0000000000..27b4e9a76c --- /dev/null +++ b/packages/@n8n/task-runner/src/runner-types.ts @@ -0,0 +1,231 @@ +import type { INodeExecutionData } from 'n8n-workflow'; + +export type DataRequestType = 'input' | 'node' | 'all'; + +export interface TaskResultData { + result: INodeExecutionData[]; + customData?: Record; +} + +export namespace N8nMessage { + export namespace ToRunner { + export interface InfoRequest { + type: 'broker:inforequest'; + } + + export interface RunnerRegistered { + type: 'broker:runnerregistered'; + } + + export interface TaskOfferAccept { + type: 'broker:taskofferaccept'; + taskId: string; + offerId: string; + } + + export interface TaskCancel { + type: 'broker:taskcancel'; + taskId: string; + reason: string; + } + + export interface TaskSettings { + type: 'broker:tasksettings'; + taskId: string; + settings: unknown; + } + + export interface RPCResponse { + type: 'broker:rpcresponse'; + callId: string; + taskId: string; + status: 'success' | 'error'; + data: unknown; + } + + export interface TaskDataResponse { + type: 'broker:taskdataresponse'; + taskId: string; + requestId: string; + data: unknown; + } + + export type All = + | InfoRequest + | TaskOfferAccept + | TaskCancel + | TaskSettings + | RunnerRegistered + | RPCResponse + | TaskDataResponse; + } + + export namespace ToRequester { + export interface TaskReady { + type: 'broker:taskready'; + requestId: string; + taskId: string; + } + + export interface TaskDone { + type: 'broker:taskdone'; + taskId: string; + data: TaskResultData; + } + + export interface TaskError { + type: 'broker:taskerror'; + taskId: string; + error: unknown; + } + + export interface TaskDataRequest { + type: 'broker:taskdatarequest'; + taskId: string; + requestId: string; + requestType: DataRequestType; + param?: string; + } + + export interface RPC { + type: 'broker:rpc'; + callId: string; + taskId: string; + name: (typeof RPC_ALLOW_LIST)[number]; + params: unknown[]; + } + + export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC; + } +} + +export namespace RequesterMessage { + export namespace ToN8n { + export interface TaskSettings { + type: 'requester:tasksettings'; + taskId: string; + settings: unknown; + } + + export interface TaskCancel { + type: 'requester:taskcancel'; + taskId: string; + reason: string; + } + + export interface TaskDataResponse { + type: 'requester:taskdataresponse'; + taskId: string; + requestId: string; + data: unknown; + } + + export interface RPCResponse { + type: 'requester:rpcresponse'; + taskId: string; + callId: string; + status: 'success' | 'error'; + data: unknown; + } + + export interface TaskRequest { + type: 'requester:taskrequest'; + requestId: string; + taskType: string; + } + + export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest; + } +} + +export namespace RunnerMessage { + export namespace ToN8n { + export interface Info { + type: 'runner:info'; + name: string; + types: string[]; + } + + export interface TaskAccepted { + type: 'runner:taskaccepted'; + taskId: string; + } + + export interface TaskRejected { + type: 'runner:taskrejected'; + taskId: string; + reason: string; + } + + export interface TaskDone { + type: 'runner:taskdone'; + taskId: string; + data: TaskResultData; + } + + export interface TaskError { + type: 'runner:taskerror'; + taskId: string; + error: unknown; + } + + export interface TaskOffer { + type: 'runner:taskoffer'; + offerId: string; + taskType: string; + validFor: number; + } + + export interface TaskDataRequest { + type: 'runner:taskdatarequest'; + taskId: string; + requestId: string; + requestType: DataRequestType; + param?: string; + } + + export interface RPC { + type: 'runner:rpc'; + callId: string; + taskId: string; + name: (typeof RPC_ALLOW_LIST)[number]; + params: unknown[]; + } + + export type All = + | Info + | TaskDone + | TaskError + | TaskAccepted + | TaskRejected + | TaskOffer + | RPC + | TaskDataRequest; + } +} + +export const RPC_ALLOW_LIST = [ + 'helpers.httpRequestWithAuthentication', + 'helpers.requestWithAuthenticationPaginated', + // "helpers.normalizeItems" + // "helpers.constructExecutionMetaData" + // "helpers.assertBinaryData" + 'helpers.getBinaryDataBuffer', + // "helpers.copyInputItems" + // "helpers.returnJsonArray" + 'helpers.getSSHClient', + 'helpers.createReadStream', + // "helpers.getStoragePath" + 'helpers.writeContentToFile', + 'helpers.prepareBinaryData', + 'helpers.setBinaryDataBuffer', + 'helpers.copyBinaryFile', + 'helpers.binaryToBuffer', + // "helpers.binaryToString" + // "helpers.getBinaryPath" + 'helpers.getBinaryStream', + 'helpers.getBinaryMetadata', + 'helpers.createDeferredPromise', + 'helpers.httpRequest', + 'logNodeOutput', +] as const; diff --git a/packages/@n8n/task-runner/src/start.ts b/packages/@n8n/task-runner/src/start.ts new file mode 100644 index 0000000000..5f856140d9 --- /dev/null +++ b/packages/@n8n/task-runner/src/start.ts @@ -0,0 +1,83 @@ +import { ApplicationError, ensureError } from 'n8n-workflow'; +import * as a from 'node:assert/strict'; + +import { authenticate } from './authenticator'; +import { JsTaskRunner } from './js-task-runner/js-task-runner'; + +let runner: JsTaskRunner | undefined; +let isShuttingDown = false; + +type Config = { + n8nUri: string; + authToken?: string; + grantToken?: string; +}; + +function readAndParseConfig(): Config { + const authToken = process.env.N8N_RUNNERS_AUTH_TOKEN; + const grantToken = process.env.N8N_RUNNERS_GRANT_TOKEN; + if (!authToken && !grantToken) { + throw new ApplicationError( + 'Missing task runner authentication. Use either N8N_RUNNERS_AUTH_TOKEN or N8N_RUNNERS_GRANT_TOKEN to configure it', + ); + } + + return { + n8nUri: process.env.N8N_RUNNERS_N8N_URI ?? '127.0.0.1:5679', + authToken, + grantToken, + }; +} + +function createSignalHandler(signal: string) { + return async function onSignal() { + if (isShuttingDown) { + return; + } + + console.log(`Received ${signal} signal, shutting down...`); + + isShuttingDown = true; + try { + if (runner) { + await runner.stop(); + runner = undefined; + } + } catch (e) { + const error = ensureError(e); + console.error('Error stopping task runner', { error }); + } finally { + process.exit(0); + } + }; +} + +void (async function start() { + const config = readAndParseConfig(); + + let grantToken = config.grantToken; + if (!grantToken) { + a.ok(config.authToken); + + grantToken = await authenticate({ + authToken: config.authToken, + n8nUri: config.n8nUri, + }); + } + + const wsUrl = `ws://${config.n8nUri}/runners/_ws`; + runner = new JsTaskRunner({ + wsUrl, + grantToken, + maxConcurrency: 5, + allowedBuiltInModules: process.env.NODE_FUNCTION_ALLOW_BUILTIN, + allowedExternalModules: process.env.NODE_FUNCTION_ALLOW_EXTERNAL, + }); + + process.on('SIGINT', createSignalHandler('SIGINT')); + process.on('SIGTERM', createSignalHandler('SIGTERM')); +})().catch((e) => { + const error = ensureError(e); + console.error('Task runner failed to start', { error }); + process.exit(1); +}); diff --git a/packages/@n8n/task-runner/src/task-runner.ts b/packages/@n8n/task-runner/src/task-runner.ts new file mode 100644 index 0000000000..ac8378636a --- /dev/null +++ b/packages/@n8n/task-runner/src/task-runner.ts @@ -0,0 +1,390 @@ +import { ApplicationError } from 'n8n-workflow'; +import { nanoid } from 'nanoid'; +import { URL } from 'node:url'; +import { type MessageEvent, WebSocket } from 'ws'; + +import { + RPC_ALLOW_LIST, + type RunnerMessage, + type N8nMessage, + type TaskResultData, +} from './runner-types'; + +export interface Task { + taskId: string; + settings?: T; + active: boolean; + cancelled: boolean; +} + +export interface TaskOffer { + offerId: string; + validUntil: bigint; +} + +interface DataRequest { + requestId: string; + resolve: (data: unknown) => void; + reject: (error: unknown) => void; +} + +interface RPCCall { + callId: string; + resolve: (data: unknown) => void; + reject: (error: unknown) => void; +} + +export interface RPCCallObject { + [name: string]: ((...args: unknown[]) => Promise) | RPCCallObject; +} + +const VALID_TIME_MS = 1000; +const VALID_EXTRA_MS = 100; + +export abstract class TaskRunner { + id: string = nanoid(); + + ws: WebSocket; + + canSendOffers = false; + + runningTasks: Map = new Map(); + + offerInterval: NodeJS.Timeout | undefined; + + openOffers: Map = new Map(); + + dataRequests: Map = new Map(); + + rpcCalls: Map = new Map(); + + constructor( + public taskType: string, + wsUrl: string, + grantToken: string, + private maxConcurrency: number, + public name?: string, + ) { + const url = new URL(wsUrl); + url.searchParams.append('id', this.id); + this.ws = new WebSocket(url.toString(), { + headers: { + authorization: `Bearer ${grantToken}`, + }, + }); + this.ws.addEventListener('message', this.receiveMessage); + this.ws.addEventListener('close', this.stopTaskOffers); + } + + private receiveMessage = (message: MessageEvent) => { + // eslint-disable-next-line n8n-local-rules/no-uncaught-json-parse + const data = JSON.parse(message.data as string) as N8nMessage.ToRunner.All; + void this.onMessage(data); + }; + + private stopTaskOffers = () => { + this.canSendOffers = false; + if (this.offerInterval) { + clearInterval(this.offerInterval); + this.offerInterval = undefined; + } + }; + + private startTaskOffers() { + this.canSendOffers = true; + if (this.offerInterval) { + clearInterval(this.offerInterval); + } + this.offerInterval = setInterval(() => this.sendOffers(), 250); + } + + deleteStaleOffers() { + this.openOffers.forEach((offer, key) => { + if (offer.validUntil < process.hrtime.bigint()) { + this.openOffers.delete(key); + } + }); + } + + sendOffers() { + this.deleteStaleOffers(); + + const offersToSend = + this.maxConcurrency - + (Object.values(this.openOffers).length + Object.values(this.runningTasks).length); + + for (let i = 0; i < offersToSend; i++) { + const offer: TaskOffer = { + offerId: nanoid(), + validUntil: process.hrtime.bigint() + BigInt((VALID_TIME_MS + VALID_EXTRA_MS) * 1_000_000), // Adding a little extra time to account for latency + }; + this.openOffers.set(offer.offerId, offer); + this.send({ + type: 'runner:taskoffer', + taskType: this.taskType, + offerId: offer.offerId, + validFor: VALID_TIME_MS, + }); + } + } + + send(message: RunnerMessage.ToN8n.All) { + this.ws.send(JSON.stringify(message)); + } + + onMessage(message: N8nMessage.ToRunner.All) { + switch (message.type) { + case 'broker:inforequest': + this.send({ + type: 'runner:info', + name: this.name ?? 'Node.js Task Runner SDK', + types: [this.taskType], + }); + break; + case 'broker:runnerregistered': + this.startTaskOffers(); + break; + case 'broker:taskofferaccept': + this.offerAccepted(message.offerId, message.taskId); + break; + case 'broker:taskcancel': + this.taskCancelled(message.taskId); + break; + case 'broker:tasksettings': + void this.receivedSettings(message.taskId, message.settings); + break; + case 'broker:taskdataresponse': + this.processDataResponse(message.requestId, message.data); + break; + case 'broker:rpcresponse': + this.handleRpcResponse(message.callId, message.status, message.data); + } + } + + processDataResponse(requestId: string, data: unknown) { + const request = this.dataRequests.get(requestId); + if (!request) { + return; + } + // Deleting of the request is handled in `requestData`, using a + // `finally` wrapped around the return + request.resolve(data); + } + + hasOpenTasks() { + return Object.values(this.runningTasks).length < this.maxConcurrency; + } + + offerAccepted(offerId: string, taskId: string) { + if (!this.hasOpenTasks()) { + this.send({ + type: 'runner:taskrejected', + taskId, + reason: 'No open task slots', + }); + return; + } + const offer = this.openOffers.get(offerId); + if (!offer) { + this.send({ + type: 'runner:taskrejected', + taskId, + reason: 'Offer expired and no open task slots', + }); + return; + } else { + this.openOffers.delete(offerId); + } + + this.runningTasks.set(taskId, { + taskId, + active: false, + cancelled: false, + }); + + this.send({ + type: 'runner:taskaccepted', + taskId, + }); + } + + taskCancelled(taskId: string) { + const task = this.runningTasks.get(taskId); + if (!task) { + return; + } + task.cancelled = true; + if (task.active) { + // TODO + } else { + this.runningTasks.delete(taskId); + } + this.sendOffers(); + } + + taskErrored(taskId: string, error: unknown) { + this.send({ + type: 'runner:taskerror', + taskId, + error, + }); + this.runningTasks.delete(taskId); + this.sendOffers(); + } + + taskDone(taskId: string, data: RunnerMessage.ToN8n.TaskDone['data']) { + this.send({ + type: 'runner:taskdone', + taskId, + data, + }); + this.runningTasks.delete(taskId); + this.sendOffers(); + } + + async receivedSettings(taskId: string, settings: unknown) { + const task = this.runningTasks.get(taskId); + if (!task) { + return; + } + if (task.cancelled) { + this.runningTasks.delete(taskId); + return; + } + task.settings = settings; + task.active = true; + try { + const data = await this.executeTask(task); + this.taskDone(taskId, data); + } catch (error) { + this.taskErrored(taskId, error); + } + } + + // eslint-disable-next-line @typescript-eslint/naming-convention + async executeTask(_task: Task): Promise { + throw new ApplicationError('Unimplemented'); + } + + async requestData( + taskId: Task['taskId'], + type: RunnerMessage.ToN8n.TaskDataRequest['requestType'], + param?: string, + ): Promise { + const requestId = nanoid(); + + const p = new Promise((resolve, reject) => { + this.dataRequests.set(requestId, { + requestId, + resolve: resolve as (data: unknown) => void, + reject, + }); + }); + + this.send({ + type: 'runner:taskdatarequest', + taskId, + requestId, + requestType: type, + param, + }); + + try { + return await p; + } finally { + this.dataRequests.delete(requestId); + } + } + + async makeRpcCall(taskId: string, name: RunnerMessage.ToN8n.RPC['name'], params: unknown[]) { + const callId = nanoid(); + + const dataPromise = new Promise((resolve, reject) => { + this.rpcCalls.set(callId, { + callId, + resolve, + reject, + }); + }); + + this.send({ + type: 'runner:rpc', + callId, + taskId, + name, + params, + }); + + try { + return await dataPromise; + } finally { + this.rpcCalls.delete(callId); + } + } + + handleRpcResponse( + callId: string, + status: N8nMessage.ToRunner.RPCResponse['status'], + data: unknown, + ) { + const call = this.rpcCalls.get(callId); + if (!call) { + return; + } + if (status === 'success') { + call.resolve(data); + } else { + call.reject(typeof data === 'string' ? new Error(data) : data); + } + } + + buildRpcCallObject(taskId: string) { + const rpcObject: RPCCallObject = {}; + for (const r of RPC_ALLOW_LIST) { + const splitPath = r.split('.'); + let obj = rpcObject; + + splitPath.forEach((s, index) => { + if (index !== splitPath.length - 1) { + obj[s] = {}; + obj = obj[s]; + return; + } + obj[s] = async (...args: unknown[]) => await this.makeRpcCall(taskId, r, args); + }); + } + return rpcObject; + } + + /** Close the connection gracefully and wait until has been closed */ + async stop() { + this.stopTaskOffers(); + + await this.waitUntilAllTasksAreDone(); + + await this.closeConnection(); + } + + private async closeConnection() { + // 1000 is the standard close code + // https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.5 + this.ws.close(1000, 'Shutting down'); + + await new Promise((resolve) => { + this.ws.once('close', resolve); + }); + } + + private async waitUntilAllTasksAreDone(maxWaitTimeInMs = 30_000) { + // TODO: Make maxWaitTimeInMs configurable + const start = Date.now(); + + while (this.runningTasks.size > 0) { + if (Date.now() - start > maxWaitTimeInMs) { + throw new ApplicationError('Timeout while waiting for tasks to finish'); + } + + await new Promise((resolve) => setTimeout(resolve, 100)); + } + } +} diff --git a/packages/@n8n/task-runner/tsconfig.build.json b/packages/@n8n/task-runner/tsconfig.build.json new file mode 100644 index 0000000000..59065a1e2b --- /dev/null +++ b/packages/@n8n/task-runner/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": ["./tsconfig.json", "../../../tsconfig.build.json"], + "compilerOptions": { + "composite": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "dist/build.tsbuildinfo" + }, + "include": ["src/**/*.ts"], + "exclude": ["src/**/__tests__/**"] +} diff --git a/packages/@n8n/task-runner/tsconfig.json b/packages/@n8n/task-runner/tsconfig.json new file mode 100644 index 0000000000..db6ad545e3 --- /dev/null +++ b/packages/@n8n/task-runner/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": ["../../../tsconfig.json", "../../../tsconfig.backend.json"], + "compilerOptions": { + "rootDir": ".", + "baseUrl": "src", + "paths": { + "@/*": ["./*"] + }, + "tsBuildInfoFile": "dist/typecheck.tsbuildinfo" + }, + "include": ["src/**/*.ts"] +} diff --git a/packages/cli/.eslintrc.js b/packages/cli/.eslintrc.js index a3a1e612ab..6002da5caa 100644 --- a/packages/cli/.eslintrc.js +++ b/packages/cli/.eslintrc.js @@ -71,5 +71,11 @@ module.exports = { ], }, }, + { + files: ['./test/**/*.ts', './src/**/__tests__/**/*.ts'], + rules: { + 'n8n-local-rules/no-dynamic-import-template': 'off', + }, + }, ], }; diff --git a/packages/cli/BREAKING-CHANGES.md b/packages/cli/BREAKING-CHANGES.md index 869ace642e..bdb1ff5890 100644 --- a/packages/cli/BREAKING-CHANGES.md +++ b/packages/cli/BREAKING-CHANGES.md @@ -2,6 +2,28 @@ This list shows all the versions which include breaking changes and how to upgrade. +# 1.63.0 + +### What changed? + +1. The worker server used to bind to IPv6 by default. It now binds to IPv4 by default. +2. The worker server's `/healthz` used to report healthy status based on database and Redis checks. It now reports healthy status regardless of database and Redis status, and the database and Redis checks are part of `/healthz/readiness`. + +### When is action necessary? + +1. If you experience a port conflict error when starting a worker server using its default port, set a different port for the worker server with `QUEUE_HEALTH_CHECK_PORT`. +2. If you are relying on database and Redis checks for worker health status, switch to checking `/healthz/readiness` instead of `/healthz`. + +## 1.57.0 + +### What changed? + +The `verbose` log level was merged into the `debug` log level. + +### When is action necessary? + +If you are setting the env var `N8N_LOG_LEVEL=verbose`, please update your log level to `N8N_LOG_LEVEL=debug`. + ## 1.55.0 ### What changed? diff --git a/packages/cli/package.json b/packages/cli/package.json index 3b03bbf0ab..9e80eb34c2 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "1.61.0", + "version": "1.63.0", "description": "n8n Workflow Automation Tool", "main": "dist/index", "types": "dist/index.d.ts", @@ -51,13 +51,13 @@ "!dist/**/e2e.*" ], "devDependencies": { - "@redocly/cli": "^1.6.0", + "@redocly/cli": "^1.25.5", "@types/archiver": "^6.0.2", "@types/aws4": "^1.5.1", "@types/bcryptjs": "^2.4.2", "@types/compression": "1.0.1", "@types/convict": "^6.1.1", - "@types/cookie-parser": "^1.4.2", + "@types/cookie-parser": "^1.4.7", "@types/express": "catalog:", "@types/flat": "^5.0.5", "@types/formidable": "^3.4.5", @@ -78,7 +78,6 @@ "@types/xml2js": "catalog:", "@types/yamljs": "^0.2.31", "@vvo/tzdb": "^6.141.0", - "chokidar": "^3.5.2", "concurrently": "^8.2.0", "ioredis-mock": "^8.8.1", "mjml": "^4.15.3", @@ -94,8 +93,9 @@ "@n8n/localtunnel": "3.0.0", "@n8n/n8n-nodes-langchain": "workspace:*", "@n8n/permissions": "workspace:*", - "@n8n/typeorm": "0.3.20-10", - "@n8n_io/ai-assistant-sdk": "1.9.4", + "@n8n/task-runner": "workspace:*", + "@n8n/typeorm": "0.3.20-12", + "@n8n_io/ai-assistant-sdk": "1.10.3", "@n8n_io/license-sdk": "2.13.1", "@oclif/core": "4.0.7", "@rudderstack/rudder-sdk-node": "2.0.9", @@ -114,14 +114,14 @@ "class-validator": "0.14.0", "compression": "1.7.4", "convict": "6.2.4", - "cookie-parser": "1.4.6", + "cookie-parser": "1.4.7", "csrf": "3.1.0", "curlconverter": "3.21.0", "dotenv": "8.6.0", - "express": "4.21.0", + "express": "4.21.1", "express-async-errors": "3.1.1", "express-handlebars": "7.1.2", - "express-openapi-validator": "5.3.3", + "express-openapi-validator": "5.3.7", "express-prom-bundle": "6.6.0", "express-rate-limit": "7.2.0", "fast-glob": "catalog:", @@ -148,7 +148,7 @@ "nodemailer": "6.9.9", "oauth-1.0a": "2.2.6", "open": "7.4.2", - "openapi-types": "10.0.0", + "openapi-types": "12.1.3", "otpauth": "9.1.1", "p-cancelable": "2.1.1", "p-lazy": "3.1.0", @@ -167,16 +167,15 @@ "simple-git": "3.17.0", "source-map-support": "0.5.21", "sqlite3": "5.1.7", - "sse-channel": "4.0.0", "sshpk": "1.17.0", - "swagger-ui-express": "5.0.0", + "swagger-ui-express": "5.0.1", "syslog-client": "1.1.1", "tar-stream": "^3.1.7", "typedi": "catalog:", "unzip-stream": "0.3.4", "uuid": "catalog:", "validator": "13.7.0", - "winston": "3.8.2", + "winston": "3.14.2", "ws": "8.17.1", "xml2js": "catalog:", "xmllint-wasm": "3.0.1", diff --git a/packages/cli/src/__tests__/license.test.ts b/packages/cli/src/__tests__/license.test.ts index 6f2e06752f..67a92b95cd 100644 --- a/packages/cli/src/__tests__/license.test.ts +++ b/packages/cli/src/__tests__/license.test.ts @@ -5,7 +5,7 @@ import type { InstanceSettings } from 'n8n-core'; import config from '@/config'; import { N8N_VERSION } from '@/constants'; import { License } from '@/license'; -import type { Logger } from '@/logger'; +import { mockLogger } from '@test/mocking'; jest.mock('@n8n_io/license-sdk'); @@ -25,37 +25,39 @@ describe('License', () => { }); let license: License; - const logger = mock(); const instanceSettings = mock({ instanceId: MOCK_INSTANCE_ID, instanceType: 'main', }); beforeEach(async () => { - license = new License(logger, instanceSettings, mock(), mock(), mock()); + license = new License(mockLogger(), instanceSettings, mock(), mock(), mock()); await license.init(); }); test('initializes license manager', async () => { - expect(LicenseManager).toHaveBeenCalledWith({ - autoRenewEnabled: true, - autoRenewOffset: MOCK_RENEW_OFFSET, - offlineMode: false, - renewOnInit: true, - deviceFingerprint: expect.any(Function), - productIdentifier: `n8n-${N8N_VERSION}`, - logger, - loadCertStr: expect.any(Function), - saveCertStr: expect.any(Function), - onFeatureChange: expect.any(Function), - collectUsageMetrics: expect.any(Function), - collectPassthroughData: expect.any(Function), - server: MOCK_SERVER_URL, - tenantId: 1, - }); + expect(LicenseManager).toHaveBeenCalledWith( + expect.objectContaining({ + autoRenewEnabled: true, + autoRenewOffset: MOCK_RENEW_OFFSET, + offlineMode: false, + renewOnInit: true, + deviceFingerprint: expect.any(Function), + productIdentifier: `n8n-${N8N_VERSION}`, + loadCertStr: expect.any(Function), + saveCertStr: expect.any(Function), + onFeatureChange: expect.any(Function), + collectUsageMetrics: expect.any(Function), + collectPassthroughData: expect.any(Function), + server: MOCK_SERVER_URL, + tenantId: 1, + }), + ); }); test('initializes license manager for worker', async () => { + const logger = mockLogger(); + license = new License( logger, mock({ instanceType: 'worker' }), @@ -64,22 +66,23 @@ describe('License', () => { mock(), ); await license.init(); - expect(LicenseManager).toHaveBeenCalledWith({ - autoRenewEnabled: false, - autoRenewOffset: MOCK_RENEW_OFFSET, - offlineMode: true, - renewOnInit: false, - deviceFingerprint: expect.any(Function), - productIdentifier: `n8n-${N8N_VERSION}`, - logger, - loadCertStr: expect.any(Function), - saveCertStr: expect.any(Function), - onFeatureChange: expect.any(Function), - collectUsageMetrics: expect.any(Function), - collectPassthroughData: expect.any(Function), - server: MOCK_SERVER_URL, - tenantId: 1, - }); + expect(LicenseManager).toHaveBeenCalledWith( + expect.objectContaining({ + autoRenewEnabled: false, + autoRenewOffset: MOCK_RENEW_OFFSET, + offlineMode: true, + renewOnInit: false, + deviceFingerprint: expect.any(Function), + productIdentifier: `n8n-${N8N_VERSION}`, + loadCertStr: expect.any(Function), + saveCertStr: expect.any(Function), + onFeatureChange: expect.any(Function), + collectUsageMetrics: expect.any(Function), + collectPassthroughData: expect.any(Function), + server: MOCK_SERVER_URL, + tenantId: 1, + }), + ); }); test('attempts to activate license with provided key', async () => { @@ -196,7 +199,7 @@ describe('License', () => { it('should enable renewal', async () => { config.set('multiMainSetup.enabled', false); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }), @@ -208,7 +211,7 @@ describe('License', () => { it('should disable renewal', async () => { config.set('license.autoRenewEnabled', false); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -226,7 +229,7 @@ describe('License', () => { config.set('multiMainSetup.instanceType', status); config.set('license.autoRenewEnabled', false); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -241,7 +244,7 @@ describe('License', () => { config.set('multiMainSetup.instanceType', status); config.set('license.autoRenewEnabled', false); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -252,7 +255,7 @@ describe('License', () => { config.set('multiMainSetup.enabled', true); config.set('multiMainSetup.instanceType', 'leader'); - await new License(mock(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }), @@ -264,7 +267,7 @@ describe('License', () => { describe('reinit', () => { it('should reinitialize license manager', async () => { - const license = new License(mock(), mock(), mock(), mock(), mock()); + const license = new License(mockLogger(), mock(), mock(), mock(), mock()); await license.init(); const initSpy = jest.spyOn(license, 'init'); diff --git a/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts b/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts new file mode 100644 index 0000000000..bcf485445f --- /dev/null +++ b/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts @@ -0,0 +1,37 @@ +import { mock } from 'jest-mock-extended'; +import type { DirectoryLoader } from 'n8n-core'; + +import { LoadNodesAndCredentials } from '../load-nodes-and-credentials'; + +describe('LoadNodesAndCredentials', () => { + describe('resolveIcon', () => { + let instance: LoadNodesAndCredentials; + + beforeEach(() => { + instance = new LoadNodesAndCredentials(mock(), mock(), mock()); + instance.loaders.package1 = mock({ + directory: '/icons/package1', + }); + }); + + it('should return undefined if the loader for the package is not found', () => { + const result = instance.resolveIcon('unknownPackage', '/icons/unknownPackage/icon.png'); + expect(result).toBeUndefined(); + }); + + it('should return undefined if the resolved file path is outside the loader directory', () => { + const result = instance.resolveIcon('package1', '/some/other/path/icon.png'); + expect(result).toBeUndefined(); + }); + + it('should return the file path if the file is within the loader directory', () => { + const result = instance.resolveIcon('package1', '/icons/package1/icon.png'); + expect(result).toBe('/icons/package1/icon.png'); + }); + + it('should return undefined if the URL is outside the package directory', () => { + const result = instance.resolveIcon('package1', '/icons/package1/../../../etc/passwd'); + expect(result).toBeUndefined(); + }); + }); +}); diff --git a/packages/cli/src/__tests__/wait-tracker.test.ts b/packages/cli/src/__tests__/wait-tracker.test.ts index 9ca3a66d33..8f9f31da00 100644 --- a/packages/cli/src/__tests__/wait-tracker.test.ts +++ b/packages/cli/src/__tests__/wait-tracker.test.ts @@ -5,6 +5,7 @@ import type { IExecutionResponse } from '@/interfaces'; import type { MultiMainSetup } from '@/services/orchestration/main/multi-main-setup.ee'; import { OrchestrationService } from '@/services/orchestration.service'; import { WaitTracker } from '@/wait-tracker'; +import { mockLogger } from '@test/mocking'; jest.useFakeTimers(); @@ -21,7 +22,7 @@ describe('WaitTracker', () => { let waitTracker: WaitTracker; beforeEach(() => { waitTracker = new WaitTracker( - mock(), + mockLogger(), executionRepository, mock(), mock(), diff --git a/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts b/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts index 6b715f175d..88aee51540 100644 --- a/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts +++ b/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts @@ -1,21 +1,80 @@ +import { mock } from 'jest-mock-extended'; +import type { + IExecuteWorkflowInfo, + IWorkflowExecuteAdditionalData, + ExecuteWorkflowOptions, + IRun, +} from 'n8n-workflow'; +import type PCancelable from 'p-cancelable'; import Container from 'typedi'; +import { ActiveExecutions } from '@/active-executions'; import { CredentialsHelper } from '@/credentials-helper'; +import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; +import { ExecutionRepository } from '@/databases/repositories/execution.repository'; +import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { VariablesService } from '@/environments/variables/variables.service.ee'; import { EventService } from '@/events/event.service'; +import { ExternalHooks } from '@/external-hooks'; import { SecretsHelper } from '@/secrets-helpers'; -import { getBase } from '@/workflow-execute-additional-data'; +import { WorkflowStatisticsService } from '@/services/workflow-statistics.service'; +import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service'; +import { Telemetry } from '@/telemetry'; +import { PermissionChecker } from '@/user-management/permission-checker'; +import { executeWorkflow, getBase } from '@/workflow-execute-additional-data'; import { mockInstance } from '@test/mocking'; +const run = mock({ + data: { resultData: {} }, + finished: true, + mode: 'manual', + startedAt: new Date(), + status: 'new', +}); + +const cancelablePromise = mock>({ + then: jest + .fn() + .mockImplementation(async (onfulfilled) => await Promise.resolve(run).then(onfulfilled)), + catch: jest + .fn() + .mockImplementation(async (onrejected) => await Promise.resolve(run).catch(onrejected)), + finally: jest + .fn() + .mockImplementation(async (onfinally) => await Promise.resolve(run).finally(onfinally)), + [Symbol.toStringTag]: 'PCancelable', +}); + +jest.mock('n8n-core', () => ({ + __esModule: true, + ...jest.requireActual('n8n-core'), + WorkflowExecute: jest.fn().mockImplementation(() => ({ + processRunExecutionData: jest.fn().mockReturnValue(cancelablePromise), + })), +})); + +jest.mock('../workflow-helpers', () => ({ + ...jest.requireActual('../workflow-helpers'), + getDataLastExecutedNodeData: jest.fn().mockReturnValue({ data: { main: [] } }), +})); + describe('WorkflowExecuteAdditionalData', () => { const variablesService = mockInstance(VariablesService); variablesService.getAllCached.mockResolvedValue([]); const credentialsHelper = mockInstance(CredentialsHelper); const secretsHelper = mockInstance(SecretsHelper); const eventService = mockInstance(EventService); + mockInstance(ExternalHooks); Container.set(VariablesService, variablesService); Container.set(CredentialsHelper, credentialsHelper); Container.set(SecretsHelper, secretsHelper); + const executionRepository = mockInstance(ExecutionRepository); + mockInstance(Telemetry); + const workflowRepository = mockInstance(WorkflowRepository); + const activeExecutions = mockInstance(ActiveExecutions); + mockInstance(PermissionChecker); + mockInstance(SubworkflowPolicyChecker); + mockInstance(WorkflowStatisticsService); test('logAiEvent should call MessageEventBus', async () => { const additionalData = await getBase('user-id'); @@ -35,4 +94,18 @@ describe('WorkflowExecuteAdditionalData', () => { expect(eventService.emit).toHaveBeenCalledTimes(1); expect(eventService.emit).toHaveBeenCalledWith(eventName, payload); }); + + it('`executeWorkflow` should set subworkflow execution as running', async () => { + const executionId = '123'; + workflowRepository.get.mockResolvedValue(mock({ id: executionId, nodes: [] })); + activeExecutions.add.mockResolvedValue(executionId); + + await executeWorkflow( + mock(), + mock(), + mock({ loadedWorkflowData: undefined }), + ); + + expect(executionRepository.setRunning).toHaveBeenCalledWith(executionId); + }); }); diff --git a/packages/cli/src/abstract-server.ts b/packages/cli/src/abstract-server.ts index 3c60a3e48d..95ecaccdc5 100644 --- a/packages/cli/src/abstract-server.ts +++ b/packages/cli/src/abstract-server.ts @@ -13,7 +13,7 @@ import { N8N_VERSION, TEMPLATES_DIR, inDevelopment, inTest } from '@/constants'; import * as Db from '@/db'; import { OnShutdown } from '@/decorators/on-shutdown'; import { ExternalHooks } from '@/external-hooks'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { rawBodyReader, bodyParser, corsMiddleware } from '@/middlewares'; import { send, sendErrorResponse } from '@/response-helper'; import { WaitingForms } from '@/waiting-forms'; diff --git a/packages/cli/src/active-executions.ts b/packages/cli/src/active-executions.ts index 8f7661925b..f5835ca164 100644 --- a/packages/cli/src/active-executions.ts +++ b/packages/cli/src/active-executions.ts @@ -13,12 +13,12 @@ import { Service } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { ExecutionNotFoundError } from '@/errors/execution-not-found-error'; import type { - ExecutionPayload, + CreateExecutionPayload, IExecutingWorkflowData, IExecutionDb, IExecutionsCurrentSummary, } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isWorkflowIdValid } from '@/utils'; import { ConcurrencyControlService } from './concurrency/concurrency-control.service'; @@ -52,11 +52,10 @@ export class ActiveExecutions { if (executionId === undefined) { // Is a new execution so save in DB - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: CreateExecutionPayload = { data: executionData.executionData!, mode, finished: false, - startedAt: new Date(), workflowData: executionData.workflowData, status: executionStatus, workflowId: executionData.workflowData.id, @@ -74,7 +73,10 @@ export class ActiveExecutions { executionId = await this.executionRepository.createNewExecution(fullExecutionData); assert(executionId); - await this.concurrencyControl.throttle({ mode, executionId }); + if (config.getEnv('executions.mode') === 'regular') { + await this.concurrencyControl.throttle({ mode, executionId }); + await this.executionRepository.setRunning(executionId); + } executionStatus = 'running'; } else { // Is an existing execution we want to finish so update in DB @@ -86,6 +88,7 @@ export class ActiveExecutions { data: executionData.executionData!, waitTill: null, status: executionStatus, + // this is resuming, so keep `startedAt` as it was }; await this.executionRepository.updateExistingExecution(executionId, execution); diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index 44172ef564..9c5ef15f38 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -37,6 +37,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { OnShutdown } from '@/decorators/on-shutdown'; import { ExternalHooks } from '@/external-hooks'; import type { IWorkflowDb } from '@/interfaces'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { ActiveWorkflowsService } from '@/services/active-workflows.service'; import { OrchestrationService } from '@/services/orchestration.service'; @@ -47,7 +48,6 @@ import { WorkflowExecutionService } from '@/workflows/workflow-execution.service import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service'; import { ExecutionService } from './executions/execution.service'; -import { Logger } from './logger'; interface QueuedActivation { activationMode: WorkflowActivateMode; @@ -750,7 +750,7 @@ export class ActiveWorkflowManager { const wasRemoved = await this.activeWorkflows.remove(workflowId); if (wasRemoved) { - this.logger.warn(`Removed triggers and pollers for workflow "${workflowId}"`, { + this.logger.debug(`Removed triggers and pollers for workflow "${workflowId}"`, { workflowId, }); } diff --git a/packages/cli/src/auth/auth.service.ts b/packages/cli/src/auth/auth.service.ts index 481f25c897..989396df84 100644 --- a/packages/cli/src/auth/auth.service.ts +++ b/packages/cli/src/auth/auth.service.ts @@ -12,7 +12,7 @@ import { UserRepository } from '@/databases/repositories/user.repository'; import { AuthError } from '@/errors/response-errors/auth.error'; import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { AuthenticatedRequest } from '@/requests'; import { JwtService } from '@/services/jwt.service'; import { UrlService } from '@/services/url.service'; diff --git a/packages/cli/src/commands/base-command.ts b/packages/cli/src/commands/base-command.ts index 857ca231d4..f4d97a6a05 100644 --- a/packages/cli/src/commands/base-command.ts +++ b/packages/cli/src/commands/base-command.ts @@ -1,8 +1,18 @@ import 'reflect-metadata'; import { GlobalConfig } from '@n8n/config'; import { Command, Errors } from '@oclif/core'; -import { BinaryDataService, InstanceSettings, ObjectStoreService } from 'n8n-core'; -import { ApplicationError, ErrorReporterProxy as ErrorReporter, sleep } from 'n8n-workflow'; +import { + BinaryDataService, + InstanceSettings, + ObjectStoreService, + DataDeduplicationService, +} from 'n8n-core'; +import { + ApplicationError, + ensureError, + ErrorReporterProxy as ErrorReporter, + sleep, +} from 'n8n-workflow'; import { Container } from 'typedi'; import type { AbstractServer } from '@/abstract-server'; @@ -11,15 +21,16 @@ import { LICENSE_FEATURES, inDevelopment, inTest } from '@/constants'; import * as CrashJournal from '@/crash-journal'; import { generateHostInstanceId } from '@/databases/utils/generators'; import * as Db from '@/db'; +import { getDataDeduplicationService } from '@/deduplication'; import { initErrorHandling } from '@/error-reporting'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { TelemetryEventRelay } from '@/events/telemetry-event-relay'; +import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay'; import { initExpressionEvaluator } from '@/expression-evaluator'; import { ExternalHooks } from '@/external-hooks'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { PostHogClient } from '@/posthog'; import { ShutdownService } from '@/shutdown/shutdown.service'; @@ -261,6 +272,11 @@ export abstract class BaseCommand extends Command { await Container.get(BinaryDataService).init(binaryDataConfig); } + protected async initDataDeduplicationService() { + const dataDeduplicationService = getDataDeduplicationService(); + await DataDeduplicationService.init(dataDeduplicationService); + } + async initExternalHooks() { this.externalHooks = Container.get(ExternalHooks); await this.externalHooks.init(); @@ -283,8 +299,9 @@ export abstract class BaseCommand extends Command { this.logger.debug('Attempting license activation'); await this.license.activate(activationKey); this.logger.debug('License init complete'); - } catch (e) { - this.logger.error('Could not activate license', e as Error); + } catch (e: unknown) { + const error = ensureError(e); + this.logger.error('Could not activate license', { error }); } } } diff --git a/packages/cli/src/commands/db/__tests__/revert.test.ts b/packages/cli/src/commands/db/__tests__/revert.test.ts index 9c7a37b533..ce3911b2b6 100644 --- a/packages/cli/src/commands/db/__tests__/revert.test.ts +++ b/packages/cli/src/commands/db/__tests__/revert.test.ts @@ -4,7 +4,7 @@ import { mock } from 'jest-mock-extended'; import { main } from '@/commands/db/revert'; import type { IrreversibleMigration, ReversibleMigration } from '@/databases/types'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { mockInstance } from '@test/mocking'; const logger = mockInstance(Logger); @@ -170,3 +170,38 @@ test('revert the last migration if it has a down migration', async () => { expect(dataSource.undoLastMigration).toHaveBeenCalled(); expect(dataSource.destroy).toHaveBeenCalled(); }); + +test("don't use transaction if the last migration has transaction = false", async () => { + // + // ARRANGE + // + class TestMigration implements ReversibleMigration { + name = 'ReversibleMigration'; + + transaction = false as const; + + async up() {} + + async down() {} + } + + const migrationsInDb: Migration[] = [ + { id: 1, timestamp: Date.now(), name: 'ReversibleMigration' }, + ]; + const dataSource = mock({ migrations: [new TestMigration()] }); + + const migrationExecutor = mock(); + migrationExecutor.getExecutedMigrations.mockResolvedValue(migrationsInDb); + + // + // ACT + // + await main(logger, dataSource, migrationExecutor); + + // + // ASSERT + // + expect(dataSource.undoLastMigration).toHaveBeenCalledWith({ + transaction: 'none', + }); +}); diff --git a/packages/cli/src/commands/db/revert.ts b/packages/cli/src/commands/db/revert.ts index dc3776a6af..4510044405 100644 --- a/packages/cli/src/commands/db/revert.ts +++ b/packages/cli/src/commands/db/revert.ts @@ -8,7 +8,7 @@ import { Container } from 'typedi'; import { getConnectionOptions } from '@/databases/config'; import type { Migration } from '@/databases/types'; import { wrapMigration } from '@/databases/utils/migration-helpers'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; // This function is extracted to make it easier to unit test it. // Mocking turned into a mess due to this command using typeorm and the db @@ -55,7 +55,9 @@ export async function main( return; } - await connection.undoLastMigration(); + await connection.undoLastMigration({ + transaction: lastMigrationInstance.transaction === false ? 'none' : 'each', + }); await connection.destroy(); } diff --git a/packages/cli/src/commands/execute-batch.ts b/packages/cli/src/commands/execute-batch.ts index 71540952b5..fbbecd2cbb 100644 --- a/packages/cli/src/commands/execute-batch.ts +++ b/packages/cli/src/commands/execute-batch.ts @@ -167,6 +167,7 @@ export class ExecuteBatch extends BaseCommand { async init() { await super.init(); await this.initBinaryDataService(); + await this.initDataDeduplicationService(); await this.initExternalHooks(); } diff --git a/packages/cli/src/commands/execute.ts b/packages/cli/src/commands/execute.ts index 9a901fdfc5..fd49a2b619 100644 --- a/packages/cli/src/commands/execute.ts +++ b/packages/cli/src/commands/execute.ts @@ -31,6 +31,7 @@ export class Execute extends BaseCommand { async init() { await super.init(); await this.initBinaryDataService(); + await this.initDataDeduplicationService(); await this.initExternalHooks(); } diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index a8d9cee882..a1d8314ec6 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -23,9 +23,11 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus' import { EventService } from '@/events/event.service'; import { ExecutionService } from '@/executions/execution.service'; import { License } from '@/license'; -import { Publisher } from '@/scaling/pubsub/publisher.service'; +import { SingleMainTaskManager } from '@/runners/task-managers/single-main-task-manager'; +import { TaskManager } from '@/runners/task-managers/task-manager'; +import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; +import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { Server } from '@/server'; -import { OrchestrationHandlerMainService } from '@/services/orchestration/main/orchestration.handler.main.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { OwnershipService } from '@/services/ownership.service'; import { PruningService } from '@/services/pruning.service'; @@ -212,6 +214,8 @@ export class Start extends BaseCommand { this.logger.debug('Wait tracker init complete'); await this.initBinaryDataService(); this.logger.debug('Binary data service init complete'); + await this.initDataDeduplicationService(); + this.logger.debug('Data deduplication service init complete'); await this.initExternalHooks(); this.logger.debug('External hooks init complete'); await this.initExternalSecrets(); @@ -222,6 +226,17 @@ export class Start extends BaseCommand { if (!this.globalConfig.endpoints.disableUi) { await this.generateStaticAssets(); } + + if (!this.globalConfig.taskRunners.disabled) { + Container.set(TaskManager, new SingleMainTaskManager()); + const { TaskRunnerServer } = await import('@/runners/task-runner-server'); + const taskRunnerServer = Container.get(TaskRunnerServer); + await taskRunnerServer.start(); + + const { TaskRunnerProcess } = await import('@/runners/task-runner-process'); + const runnerProcess = Container.get(TaskRunnerProcess); + await runnerProcess.start(); + } } async initOrchestration() { @@ -241,10 +256,11 @@ export class Start extends BaseCommand { await orchestrationService.init(); - await Container.get(OrchestrationHandlerMainService).initWithOptions({ - queueModeId: this.queueModeId, - publisher: Container.get(Publisher), - }); + Container.get(PubSubHandler).init(); + + const subscriber = Container.get(Subscriber); + await subscriber.subscribe('n8n.commands'); + await subscriber.subscribe('n8n.worker-response'); if (!orchestrationService.isMultiMainSetupEnabled) return; @@ -374,10 +390,9 @@ export class Start extends BaseCommand { if (executions.length === 0) return; - this.logger.debug( - '[Startup] Found enqueued executions to run', - executions.map((e) => e.id), - ); + this.logger.debug('[Startup] Found enqueued executions to run', { + executionIds: executions.map((e) => e.id), + }); const ownershipService = Container.get(OwnershipService); const workflowRunner = Container.get(WorkflowRunner); diff --git a/packages/cli/src/commands/webhook.ts b/packages/cli/src/commands/webhook.ts index e8a47e10e0..8c601c7ebc 100644 --- a/packages/cli/src/commands/webhook.ts +++ b/packages/cli/src/commands/webhook.ts @@ -4,7 +4,8 @@ import { Container } from 'typedi'; import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; -import { OrchestrationHandlerWebhookService } from '@/services/orchestration/webhook/orchestration.handler.webhook.service'; +import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; +import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { OrchestrationWebhookService } from '@/services/orchestration/webhook/orchestration.webhook.service'; import { WebhookServer } from '@/webhooks/webhook-server'; @@ -81,6 +82,8 @@ export class Webhook extends BaseCommand { this.logger.debug('Orchestration init complete'); await this.initBinaryDataService(); this.logger.debug('Binary data service init complete'); + await this.initDataDeduplicationService(); + this.logger.debug('Data deduplication service init complete'); await this.initExternalHooks(); this.logger.debug('External hooks init complete'); await this.initExternalSecrets(); @@ -110,6 +113,8 @@ export class Webhook extends BaseCommand { async initOrchestration() { await Container.get(OrchestrationWebhookService).init(); - await Container.get(OrchestrationHandlerWebhookService).init(); + + Container.get(PubSubHandler).init(); + await Container.get(Subscriber).subscribe('n8n.commands'); } } diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index f5f6b2b79b..528951be4a 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -6,12 +6,12 @@ import config from '@/config'; import { N8N_VERSION, inTest } from '@/constants'; import { EventMessageGeneric } from '@/eventbus/event-message-classes/event-message-generic'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay'; +import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import { JobProcessor } from '@/scaling/job-processor'; -import { Publisher } from '@/scaling/pubsub/publisher.service'; +import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; +import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import type { ScalingService } from '@/scaling/scaling.service'; import type { WorkerServerEndpointsConfig } from '@/scaling/worker-server'; -import { OrchestrationHandlerWorkerService } from '@/services/orchestration/worker/orchestration.handler.worker.service'; import { OrchestrationWorkerService } from '@/services/orchestration/worker/orchestration.worker.service'; import { BaseCommand } from './base-command'; @@ -93,6 +93,8 @@ export class Worker extends BaseCommand { this.logger.debug('License init complete'); await this.initBinaryDataService(); this.logger.debug('Binary data service init complete'); + await this.initDataDeduplicationService(); + this.logger.debug('Data deduplication service init complete'); await this.initExternalHooks(); this.logger.debug('External hooks init complete'); await this.initExternalSecrets(); @@ -128,12 +130,9 @@ export class Worker extends BaseCommand { */ async initOrchestration() { await Container.get(OrchestrationWorkerService).init(); - await Container.get(OrchestrationHandlerWorkerService).initWithOptions({ - queueModeId: this.queueModeId, - publisher: Container.get(Publisher), - getRunningJobIds: () => this.jobProcessor.getRunningJobIds(), - getRunningJobsSummary: () => this.jobProcessor.getRunningJobsSummary(), - }); + + Container.get(PubSubHandler).init(); + await Container.get(Subscriber).subscribe('n8n.commands'); } async setConcurrency() { diff --git a/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts b/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts index 75239f5e59..6511ae4d03 100644 --- a/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts +++ b/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts @@ -11,13 +11,13 @@ import type { ExecutionRepository } from '@/databases/repositories/execution.rep import { InvalidConcurrencyLimitError } from '@/errors/invalid-concurrency-limit.error'; import type { EventService } from '@/events/event.service'; import type { IExecutingWorkflowData } from '@/interfaces'; -import type { Logger } from '@/logger'; import type { Telemetry } from '@/telemetry'; +import { mockLogger } from '@test/mocking'; import { ConcurrencyQueue } from '../concurrency-queue'; describe('ConcurrencyControlService', () => { - const logger = mock(); + const logger = mockLogger(); const executionRepository = mock(); const telemetry = mock(); const eventService = mock(); diff --git a/packages/cli/src/concurrency/concurrency-control.service.ts b/packages/cli/src/concurrency/concurrency-control.service.ts index 45ef2e1206..cf537870f2 100644 --- a/packages/cli/src/concurrency/concurrency-control.service.ts +++ b/packages/cli/src/concurrency/concurrency-control.service.ts @@ -7,7 +7,7 @@ import { InvalidConcurrencyLimitError } from '@/errors/invalid-concurrency-limit import { UnknownExecutionModeError } from '@/errors/unknown-execution-mode.error'; import { EventService } from '@/events/event.service'; import type { IExecutingWorkflowData } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { Telemetry } from '@/telemetry'; import { ConcurrencyQueue } from './concurrency-queue'; @@ -33,6 +33,8 @@ export class ConcurrencyControlService { private readonly telemetry: Telemetry, private readonly eventService: EventService, ) { + this.logger = this.logger.withScope('executions'); + this.productionLimit = config.getEnv('executions.concurrency.productionLimit'); if (this.productionLimit === 0) { @@ -45,7 +47,6 @@ export class ConcurrencyControlService { if (this.productionLimit === -1 || config.getEnv('executions.mode') === 'queue') { this.isEnabled = false; - this.log('Service disabled'); return; } @@ -64,13 +65,12 @@ export class ConcurrencyControlService { }); this.productionQueue.on('execution-throttled', ({ executionId }) => { - this.log('Execution throttled', { executionId }); + this.logger.debug('Execution throttled', { executionId }); this.eventService.emit('execution-throttled', { executionId }); }); this.productionQueue.on('execution-released', async (executionId) => { - this.log('Execution released', { executionId }); - await this.executionRepository.resetStartedAt(executionId); + this.logger.debug('Execution released', { executionId }); }); } @@ -144,9 +144,9 @@ export class ConcurrencyControlService { // ---------------------------------- private logInit() { - this.log('Enabled'); + this.logger.debug('Enabled'); - this.log( + this.logger.debug( [ 'Production execution concurrency is', this.productionLimit === -1 ? 'unlimited' : 'limited to ' + this.productionLimit.toString(), @@ -171,10 +171,6 @@ export class ConcurrencyControlService { throw new UnknownExecutionModeError(mode); } - private log(message: string, meta?: object) { - this.logger.debug(['[Concurrency Control]', message].join(' '), meta); - } - private shouldReport(capacity: number) { return config.getEnv('deployment.type') === 'cloud' && this.limitsToReport.includes(capacity); } diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index e811fe8e10..047df9341e 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -1,7 +1,6 @@ import { GlobalConfig } from '@n8n/config'; import convict from 'convict'; import { InstanceSettings } from 'n8n-core'; -import { LOG_LEVELS } from 'n8n-workflow'; import path from 'path'; import { Container } from 'typedi'; @@ -296,41 +295,6 @@ export const schema = { env: 'EXTERNAL_HOOK_FILES', }, - logs: { - level: { - doc: 'Log output level', - format: LOG_LEVELS, - default: 'info', - env: 'N8N_LOG_LEVEL', - }, - output: { - doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")', - format: String, - default: 'console', - env: 'N8N_LOG_OUTPUT', - }, - file: { - fileCountMax: { - doc: 'Maximum number of files to keep.', - format: Number, - default: 100, - env: 'N8N_LOG_FILE_COUNT_MAX', - }, - fileSizeMax: { - doc: 'Maximum size for each log file in MB.', - format: Number, - default: 16, - env: 'N8N_LOG_FILE_SIZE_MAX', - }, - location: { - doc: 'Log file location; only used if log output is set to file.', - format: String, - default: path.join(Container.get(InstanceSettings).n8nFolder, 'logs/n8n.log'), - env: 'N8N_LOG_FILE_LOCATION', - }, - }, - }, - push: { backend: { format: ['sse', 'websocket'] as const, diff --git a/packages/cli/src/config/types.ts b/packages/cli/src/config/types.ts index 0d3c5db2cb..78f2358f5d 100644 --- a/packages/cli/src/config/types.ts +++ b/packages/cli/src/config/types.ts @@ -1,5 +1,6 @@ import type { RedisOptions } from 'ioredis'; import type { BinaryData } from 'n8n-core'; +import type { IProcessedDataConfig } from 'n8n-workflow'; import type { schema } from './schema'; @@ -76,6 +77,7 @@ type ToReturnType = T extends NumericPath type ExceptionPaths = { 'queue.bull.redis': RedisOptions; binaryDataManager: BinaryData.Config; + processedDataManager: IProcessedDataConfig; 'userManagement.isInstanceOwnerSetUp': boolean; 'ui.banners.dismissed': string[] | undefined; }; diff --git a/packages/cli/src/constants.ts b/packages/cli/src/constants.ts index 447b32e42f..04512e8be9 100644 --- a/packages/cli/src/constants.ts +++ b/packages/cli/src/constants.ts @@ -91,6 +91,7 @@ export const LICENSE_FEATURES = { PROJECT_ROLE_EDITOR: 'feat:projectRole:editor', PROJECT_ROLE_VIEWER: 'feat:projectRole:viewer', AI_ASSISTANT: 'feat:aiAssistant', + ASK_AI: 'feat:askAi', COMMUNITY_NODES_CUSTOM_REGISTRY: 'feat:communityNodes:customRegistry', } as const; @@ -168,6 +169,8 @@ export const ARTIFICIAL_TASK_DATA = { ], }; +/** Lowest priority, meaning shut down happens after other groups */ export const LOWEST_SHUTDOWN_PRIORITY = 0; export const DEFAULT_SHUTDOWN_PRIORITY = 100; +/** Highest priority, meaning shut down happens before all other groups */ export const HIGHEST_SHUTDOWN_PRIORITY = 200; diff --git a/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts b/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts new file mode 100644 index 0000000000..81025fb2ca --- /dev/null +++ b/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts @@ -0,0 +1,79 @@ +import { mock } from 'jest-mock-extended'; +import { randomString } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import type { ApiKey } from '@/databases/entities/api-key'; +import type { User } from '@/databases/entities/user'; +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; +import type { ApiKeysRequest, AuthenticatedRequest } from '@/requests'; +import { API_KEY_PREFIX } from '@/services/public-api-key.service'; +import { mockInstance } from '@test/mocking'; + +import { ApiKeysController } from '../api-keys.controller'; + +describe('ApiKeysController', () => { + const apiKeysRepository = mockInstance(ApiKeyRepository); + const controller = Container.get(ApiKeysController); + + let req: AuthenticatedRequest; + beforeAll(() => { + req = mock({ user: mock({ id: '123' }) }); + }); + + describe('createAPIKey', () => { + it('should create and save an API key', async () => { + const apiKeyData = { + id: '123', + userId: '123', + label: 'My API Key', + apiKey: `${API_KEY_PREFIX}${randomString(42)}`, + createdAt: new Date(), + } as ApiKey; + + apiKeysRepository.upsert.mockImplementation(); + + apiKeysRepository.findOneByOrFail.mockResolvedValue(apiKeyData); + + const newApiKey = await controller.createAPIKey(req); + + expect(apiKeysRepository.upsert).toHaveBeenCalled(); + expect(apiKeyData).toEqual(newApiKey); + }); + }); + + describe('getAPIKeys', () => { + it('should return the users api keys redacted', async () => { + const apiKeyData = { + id: '123', + userId: '123', + label: 'My API Key', + apiKey: `${API_KEY_PREFIX}${randomString(42)}`, + createdAt: new Date(), + } as ApiKey; + + apiKeysRepository.findBy.mockResolvedValue([apiKeyData]); + + const apiKeys = await controller.getAPIKeys(req); + expect(apiKeys[0].apiKey).not.toEqual(apiKeyData.apiKey); + expect(apiKeysRepository.findBy).toHaveBeenCalledWith({ userId: req.user.id }); + }); + }); + + describe('deleteAPIKey', () => { + it('should delete the API key', async () => { + const user = mock({ + id: '123', + password: 'password', + authIdentities: [], + role: 'global:member', + mfaEnabled: false, + }); + const req = mock({ user, params: { id: user.id } }); + await controller.deleteAPIKey(req); + expect(apiKeysRepository.delete).toHaveBeenCalledWith({ + userId: req.user.id, + id: req.params.id, + }); + }); + }); +}); diff --git a/packages/cli/src/controllers/__tests__/me.controller.test.ts b/packages/cli/src/controllers/__tests__/me.controller.test.ts index 3c9af48689..37c391a2dc 100644 --- a/packages/cli/src/controllers/__tests__/me.controller.test.ts +++ b/packages/cli/src/controllers/__tests__/me.controller.test.ts @@ -5,7 +5,7 @@ import jwt from 'jsonwebtoken'; import { Container } from 'typedi'; import { AUTH_COOKIE_NAME } from '@/constants'; -import { API_KEY_PREFIX, MeController } from '@/controllers/me.controller'; +import { MeController } from '@/controllers/me.controller'; import type { User } from '@/databases/entities/user'; import { AuthUserRepository } from '@/databases/repositories/auth-user.repository'; import { InvalidAuthTokenRepository } from '@/databases/repositories/invalid-auth-token.repository'; @@ -408,32 +408,4 @@ describe('MeController', () => { await expect(controller.storeSurveyAnswers(req)).rejects.toThrowError(BadRequestError); }); }); - - describe('API Key methods', () => { - let req: AuthenticatedRequest; - beforeAll(() => { - req = mock({ user: mock>({ id: '123', apiKey: `${API_KEY_PREFIX}test-key` }) }); - }); - - describe('createAPIKey', () => { - it('should create and save an API key', async () => { - const { apiKey } = await controller.createAPIKey(req); - expect(userService.update).toHaveBeenCalledWith(req.user.id, { apiKey }); - }); - }); - - describe('getAPIKey', () => { - it('should return the users api key redacted', async () => { - const { apiKey } = await controller.getAPIKey(req); - expect(apiKey).not.toEqual(req.user.apiKey); - }); - }); - - describe('deleteAPIKey', () => { - it('should delete the API key', async () => { - await controller.deleteAPIKey(req); - expect(userService.update).toHaveBeenCalledWith(req.user.id, { apiKey: null }); - }); - }); - }); }); diff --git a/packages/cli/src/controllers/ai-assistant.controller.ts b/packages/cli/src/controllers/ai.controller.ts similarity index 65% rename from packages/cli/src/controllers/ai-assistant.controller.ts rename to packages/cli/src/controllers/ai.controller.ts index c910be0a24..1957db2971 100644 --- a/packages/cli/src/controllers/ai-assistant.controller.ts +++ b/packages/cli/src/controllers/ai.controller.ts @@ -7,18 +7,18 @@ import { WritableStream } from 'node:stream/web'; import { Post, RestController } from '@/decorators'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { AiAssistantRequest } from '@/requests'; -import { AiAssistantService } from '@/services/ai-assistant.service'; +import { AiService } from '@/services/ai.service'; type FlushableResponse = Response & { flush: () => void }; -@RestController('/ai-assistant') -export class AiAssistantController { - constructor(private readonly aiAssistantService: AiAssistantService) {} +@RestController('/ai') +export class AiController { + constructor(private readonly aiService: AiService) {} @Post('/chat', { rateLimit: { limit: 100 } }) async chat(req: AiAssistantRequest.Chat, res: FlushableResponse) { try { - const aiResponse = await this.aiAssistantService.chat(req.body, req.user); + const aiResponse = await this.aiService.chat(req.body, req.user); if (aiResponse.body) { res.header('Content-type', 'application/json-lines').flush(); await aiResponse.body.pipeTo( @@ -40,10 +40,21 @@ export class AiAssistantController { @Post('/chat/apply-suggestion') async applySuggestion( - req: AiAssistantRequest.ApplySuggestion, + req: AiAssistantRequest.ApplySuggestionPayload, ): Promise { try { - return await this.aiAssistantService.applySuggestion(req.body, req.user); + return await this.aiService.applySuggestion(req.body, req.user); + } catch (e) { + assert(e instanceof Error); + ErrorReporterProxy.error(e); + throw new InternalServerError(`Something went wrong: ${e.message}`); + } + } + + @Post('/ask-ai') + async askAi(req: AiAssistantRequest.AskAiPayload): Promise { + try { + return await this.aiService.askAi(req.body, req.user); } catch (e) { assert(e instanceof Error); ErrorReporterProxy.error(e); diff --git a/packages/cli/src/controllers/annotation-tags.controller.ts b/packages/cli/src/controllers/annotation-tags.controller.ee.ts similarity index 99% rename from packages/cli/src/controllers/annotation-tags.controller.ts rename to packages/cli/src/controllers/annotation-tags.controller.ee.ts index ff43c2ef7e..ed6ff8c52a 100644 --- a/packages/cli/src/controllers/annotation-tags.controller.ts +++ b/packages/cli/src/controllers/annotation-tags.controller.ee.ts @@ -1,6 +1,6 @@ import { Delete, Get, Patch, Post, RestController, GlobalScope } from '@/decorators'; import { AnnotationTagsRequest } from '@/requests'; -import { AnnotationTagService } from '@/services/annotation-tag.service'; +import { AnnotationTagService } from '@/services/annotation-tag.service.ee'; @RestController('/annotation-tags') export class AnnotationTagsController { diff --git a/packages/cli/src/controllers/api-keys.controller.ts b/packages/cli/src/controllers/api-keys.controller.ts new file mode 100644 index 0000000000..db53a00449 --- /dev/null +++ b/packages/cli/src/controllers/api-keys.controller.ts @@ -0,0 +1,56 @@ +import { type RequestHandler } from 'express'; + +import { Delete, Get, Post, RestController } from '@/decorators'; +import { EventService } from '@/events/event.service'; +import { isApiEnabled } from '@/public-api'; +import { ApiKeysRequest, AuthenticatedRequest } from '@/requests'; +import { PublicApiKeyService } from '@/services/public-api-key.service'; + +export const isApiEnabledMiddleware: RequestHandler = (_, res, next) => { + if (isApiEnabled()) { + next(); + } else { + res.status(404).end(); + } +}; + +@RestController('/api-keys') +export class ApiKeysController { + constructor( + private readonly eventService: EventService, + private readonly publicApiKeyService: PublicApiKeyService, + ) {} + + /** + * Create an API Key + */ + @Post('/', { middlewares: [isApiEnabledMiddleware] }) + async createAPIKey(req: AuthenticatedRequest) { + const newApiKey = await this.publicApiKeyService.createPublicApiKeyForUser(req.user); + + this.eventService.emit('public-api-key-created', { user: req.user, publicApi: false }); + + return newApiKey; + } + + /** + * Get API keys + */ + @Get('/', { middlewares: [isApiEnabledMiddleware] }) + async getAPIKeys(req: AuthenticatedRequest) { + const apiKeys = await this.publicApiKeyService.getRedactedApiKeysForUser(req.user); + return apiKeys; + } + + /** + * Delete an API Key + */ + @Delete('/:id', { middlewares: [isApiEnabledMiddleware] }) + async deleteAPIKey(req: ApiKeysRequest.DeleteAPIKey) { + await this.publicApiKeyService.deleteApiKeyForUser(req.user, req.params.id); + + this.eventService.emit('public-api-key-deleted', { user: req.user, publicApi: false }); + + return { success: true }; + } +} diff --git a/packages/cli/src/controllers/auth.controller.ts b/packages/cli/src/controllers/auth.controller.ts index 25f069ad20..c2ee1c92fb 100644 --- a/packages/cli/src/controllers/auth.controller.ts +++ b/packages/cli/src/controllers/auth.controller.ts @@ -14,7 +14,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { EventService } from '@/events/event.service'; import type { PublicUser } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MfaService } from '@/mfa/mfa.service'; import { PostHogClient } from '@/posthog'; import { AuthenticatedRequest, LoginRequest, UserRequest } from '@/requests'; diff --git a/packages/cli/src/controllers/e2e.controller.ts b/packages/cli/src/controllers/e2e.controller.ts index 5137d5b4af..9c5a1ff36d 100644 --- a/packages/cli/src/controllers/e2e.controller.ts +++ b/packages/cli/src/controllers/e2e.controller.ts @@ -14,7 +14,7 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus' import type { BooleanLicenseFeature, NumericLicenseFeature } from '@/interfaces'; import type { FeatureReturnType } from '@/license'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MfaService } from '@/mfa/mfa.service'; import { Push } from '@/push'; import type { UserSetupPayload } from '@/requests'; @@ -92,6 +92,7 @@ export class E2EController { [LICENSE_FEATURES.PROJECT_ROLE_VIEWER]: false, [LICENSE_FEATURES.AI_ASSISTANT]: false, [LICENSE_FEATURES.COMMUNITY_NODES_CUSTOM_REGISTRY]: false, + [LICENSE_FEATURES.ASK_AI]: false, }; private numericFeatures: Record = { diff --git a/packages/cli/src/controllers/invitation.controller.ts b/packages/cli/src/controllers/invitation.controller.ts index 411ab7a03f..cbd2afb9f4 100644 --- a/packages/cli/src/controllers/invitation.controller.ts +++ b/packages/cli/src/controllers/invitation.controller.ts @@ -12,7 +12,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PostHogClient } from '@/posthog'; import { UserRequest } from '@/requests'; import { PasswordUtility } from '@/services/password.utility'; diff --git a/packages/cli/src/controllers/me.controller.ts b/packages/cli/src/controllers/me.controller.ts index 0e1bbb37a6..6cbbda3622 100644 --- a/packages/cli/src/controllers/me.controller.ts +++ b/packages/cli/src/controllers/me.controller.ts @@ -4,39 +4,26 @@ import { UserUpdateRequestDto, } from '@n8n/api-types'; import { plainToInstance } from 'class-transformer'; -import { randomBytes } from 'crypto'; -import { type RequestHandler, Response } from 'express'; +import { Response } from 'express'; import { AuthService } from '@/auth/auth.service'; import type { User } from '@/databases/entities/user'; import { UserRepository } from '@/databases/repositories/user.repository'; -import { Body, Delete, Get, Patch, Post, RestController } from '@/decorators'; +import { Body, Patch, Post, RestController } from '@/decorators'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { InvalidMfaCodeError } from '@/errors/response-errors/invalid-mfa-code.error'; import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import { validateEntity } from '@/generic-helpers'; import type { PublicUser } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MfaService } from '@/mfa/mfa.service'; -import { isApiEnabled } from '@/public-api'; import { AuthenticatedRequest, MeRequest } from '@/requests'; import { PasswordUtility } from '@/services/password.utility'; import { UserService } from '@/services/user.service'; import { isSamlLicensedAndEnabled } from '@/sso/saml/saml-helpers'; import { PersonalizationSurveyAnswersV4 } from './survey-answers.dto'; - -export const API_KEY_PREFIX = 'n8n_api_'; - -export const isApiEnabledMiddleware: RequestHandler = (_, res, next) => { - if (isApiEnabled()) { - next(); - } else { - res.status(404).end(); - } -}; - @RestController('/me') export class MeController { constructor( @@ -218,41 +205,6 @@ export class MeController { return { success: true }; } - /** - * Creates an API Key - */ - @Post('/api-key', { middlewares: [isApiEnabledMiddleware] }) - async createAPIKey(req: AuthenticatedRequest) { - const apiKey = `n8n_api_${randomBytes(40).toString('hex')}`; - - await this.userService.update(req.user.id, { apiKey }); - - this.eventService.emit('public-api-key-created', { user: req.user, publicApi: false }); - - return { apiKey }; - } - - /** - * Get an API Key - */ - @Get('/api-key', { middlewares: [isApiEnabledMiddleware] }) - async getAPIKey(req: AuthenticatedRequest) { - const apiKey = this.redactApiKey(req.user.apiKey); - return { apiKey }; - } - - /** - * Deletes an API Key - */ - @Delete('/api-key', { middlewares: [isApiEnabledMiddleware] }) - async deleteAPIKey(req: AuthenticatedRequest) { - await this.userService.update(req.user.id, { apiKey: null }); - - this.eventService.emit('public-api-key-deleted', { user: req.user, publicApi: false }); - - return { success: true }; - } - /** * Update the logged-in user's settings. */ @@ -273,14 +225,4 @@ export class MeController { return user.settings; } - - private redactApiKey(apiKey: string | null) { - if (!apiKey) return; - const keepLength = 5; - return ( - API_KEY_PREFIX + - apiKey.slice(API_KEY_PREFIX.length, API_KEY_PREFIX.length + keepLength) + - '*'.repeat(apiKey.length - API_KEY_PREFIX.length - keepLength) - ); - } } diff --git a/packages/cli/src/controllers/oauth/__tests__/oauth1-credential.controller.test.ts b/packages/cli/src/controllers/oauth/__tests__/oauth1-credential.controller.test.ts index b2c0d13f21..68a86269d3 100644 --- a/packages/cli/src/controllers/oauth/__tests__/oauth1-credential.controller.test.ts +++ b/packages/cli/src/controllers/oauth/__tests__/oauth1-credential.controller.test.ts @@ -15,7 +15,7 @@ import { VariablesService } from '@/environments/variables/variables.service.ee' import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { ExternalHooks } from '@/external-hooks'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { OAuthRequest } from '@/requests'; import { SecretsHelper } from '@/secrets-helpers'; import { mockInstance } from '@test/mocking'; diff --git a/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts b/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts index 139ab9a983..9fc98d5557 100644 --- a/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts +++ b/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts @@ -15,7 +15,7 @@ import { VariablesService } from '@/environments/variables/variables.service.ee' import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { ExternalHooks } from '@/external-hooks'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { OAuthRequest } from '@/requests'; import { SecretsHelper } from '@/secrets-helpers'; import { mockInstance } from '@test/mocking'; diff --git a/packages/cli/src/controllers/oauth/abstract-oauth.controller.ts b/packages/cli/src/controllers/oauth/abstract-oauth.controller.ts index 6750c7c2a3..6e162af988 100644 --- a/packages/cli/src/controllers/oauth/abstract-oauth.controller.ts +++ b/packages/cli/src/controllers/oauth/abstract-oauth.controller.ts @@ -15,7 +15,7 @@ import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { ExternalHooks } from '@/external-hooks'; import type { ICredentialsDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { OAuthRequest } from '@/requests'; import { UrlService } from '@/services/url.service'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; diff --git a/packages/cli/src/controllers/orchestration.controller.ts b/packages/cli/src/controllers/orchestration.controller.ts index a5235d1169..db1d690a3e 100644 --- a/packages/cli/src/controllers/orchestration.controller.ts +++ b/packages/cli/src/controllers/orchestration.controller.ts @@ -28,11 +28,4 @@ export class OrchestrationController { if (!this.licenseService.isWorkerViewLicensed()) return; return await this.orchestrationService.getWorkerStatus(); } - - @GlobalScope('orchestration:list') - @Post('/worker/ids') - async getWorkerIdsAll() { - if (!this.licenseService.isWorkerViewLicensed()) return; - return await this.orchestrationService.getWorkerIds(); - } } diff --git a/packages/cli/src/controllers/owner.controller.ts b/packages/cli/src/controllers/owner.controller.ts index 76a0191359..47d50ad3f0 100644 --- a/packages/cli/src/controllers/owner.controller.ts +++ b/packages/cli/src/controllers/owner.controller.ts @@ -9,7 +9,7 @@ import { GlobalScope, Post, RestController } from '@/decorators'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { EventService } from '@/events/event.service'; import { validateEntity } from '@/generic-helpers'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PostHogClient } from '@/posthog'; import { OwnerRequest } from '@/requests'; import { PasswordUtility } from '@/services/password.utility'; diff --git a/packages/cli/src/controllers/password-reset.controller.ts b/packages/cli/src/controllers/password-reset.controller.ts index ed566cf129..88155e420a 100644 --- a/packages/cli/src/controllers/password-reset.controller.ts +++ b/packages/cli/src/controllers/password-reset.controller.ts @@ -13,7 +13,7 @@ import { UnprocessableRequestError } from '@/errors/response-errors/unprocessabl import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MfaService } from '@/mfa/mfa.service'; import { PasswordResetRequest } from '@/requests'; import { PasswordUtility } from '@/services/password.utility'; diff --git a/packages/cli/src/controllers/users.controller.ts b/packages/cli/src/controllers/users.controller.ts index c00fe48ad8..8e19be894d 100644 --- a/packages/cli/src/controllers/users.controller.ts +++ b/packages/cli/src/controllers/users.controller.ts @@ -18,7 +18,7 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import type { PublicUser } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { listQueryMiddleware } from '@/middlewares'; import { AuthenticatedRequest, ListQuery, UserRequest } from '@/requests'; import { ProjectService } from '@/services/project.service'; diff --git a/packages/cli/src/controllers/workflow-statistics.controller.ts b/packages/cli/src/controllers/workflow-statistics.controller.ts index 1268643b14..58c99727db 100644 --- a/packages/cli/src/controllers/workflow-statistics.controller.ts +++ b/packages/cli/src/controllers/workflow-statistics.controller.ts @@ -7,7 +7,7 @@ import { WorkflowStatisticsRepository } from '@/databases/repositories/workflow- import { Get, Middleware, RestController } from '@/decorators'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import type { IWorkflowStatisticsDataLoaded } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { StatisticsRequest } from './workflow-statistics.types'; diff --git a/packages/cli/src/crash-journal.ts b/packages/cli/src/crash-journal.ts index 184702c446..577a2f34fe 100644 --- a/packages/cli/src/crash-journal.ts +++ b/packages/cli/src/crash-journal.ts @@ -6,7 +6,7 @@ import { join, dirname } from 'path'; import { Container } from 'typedi'; import { inProduction } from '@/constants'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; export const touchFile = async (filePath: string): Promise => { await mkdir(dirname(filePath), { recursive: true }); diff --git a/packages/cli/src/credentials-overwrites.ts b/packages/cli/src/credentials-overwrites.ts index ec14bf7ecc..ed1b492dc6 100644 --- a/packages/cli/src/credentials-overwrites.ts +++ b/packages/cli/src/credentials-overwrites.ts @@ -5,7 +5,7 @@ import { Service } from 'typedi'; import { CredentialTypes } from '@/credential-types'; import type { ICredentialsOverwrite } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; @Service() export class CredentialsOverwrites { diff --git a/packages/cli/src/credentials/credentials.controller.ts b/packages/cli/src/credentials/credentials.controller.ts index 41d9d163ba..76db501cf7 100644 --- a/packages/cli/src/credentials/credentials.controller.ts +++ b/packages/cli/src/credentials/credentials.controller.ts @@ -23,7 +23,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { EventService } from '@/events/event.service'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { listQueryMiddleware } from '@/middlewares'; import { CredentialRequest } from '@/requests'; import { NamingService } from '@/services/naming.service'; diff --git a/packages/cli/src/credentials/credentials.service.ee.ts b/packages/cli/src/credentials/credentials.service.ee.ts index 116137374a..aad78fe7b7 100644 --- a/packages/cli/src/credentials/credentials.service.ee.ts +++ b/packages/cli/src/credentials/credentials.service.ee.ts @@ -157,14 +157,6 @@ export class EnterpriseCredentialsService { "You can't transfer a credential into the project that's already owning it.", ); } - if (sourceProject.type !== 'team' && sourceProject.type !== 'personal') { - throw new TransferCredentialError( - 'You can only transfer credentials out of personal or team projects.', - ); - } - if (destinationProject.type !== 'team') { - throw new TransferCredentialError('You can only transfer credentials into team projects.'); - } await this.sharedCredentialsRepository.manager.transaction(async (trx) => { // 6. transfer the credential diff --git a/packages/cli/src/credentials/credentials.service.ts b/packages/cli/src/credentials/credentials.service.ts index dc5ab4e6c7..f9bbf89e57 100644 --- a/packages/cli/src/credentials/credentials.service.ts +++ b/packages/cli/src/credentials/credentials.service.ts @@ -33,7 +33,7 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { ExternalHooks } from '@/external-hooks'; import { validateEntity } from '@/generic-helpers'; import type { ICredentialsDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { userHasScopes } from '@/permissions/check-access'; import type { CredentialRequest, ListQuery } from '@/requests'; import { CredentialsTester } from '@/services/credentials-tester.service'; diff --git a/packages/cli/src/databases/entities/__tests__/user.entity.test.ts b/packages/cli/src/databases/entities/__tests__/user.entity.test.ts index 5901d0218b..5bd8b0f2cb 100644 --- a/packages/cli/src/databases/entities/__tests__/user.entity.test.ts +++ b/packages/cli/src/databases/entities/__tests__/user.entity.test.ts @@ -8,7 +8,6 @@ describe('User Entity', () => { firstName: 'Don', lastName: 'Joe', password: '123456789', - apiKey: '123', }); expect(JSON.stringify(user)).toEqual( '{"email":"test@example.com","firstName":"Don","lastName":"Joe"}', diff --git a/packages/cli/src/databases/entities/annotation-tag-entity.ts b/packages/cli/src/databases/entities/annotation-tag-entity.ee.ts similarity index 93% rename from packages/cli/src/databases/entities/annotation-tag-entity.ts rename to packages/cli/src/databases/entities/annotation-tag-entity.ee.ts index e89c75a5a8..4aed8df8af 100644 --- a/packages/cli/src/databases/entities/annotation-tag-entity.ts +++ b/packages/cli/src/databases/entities/annotation-tag-entity.ee.ts @@ -1,8 +1,8 @@ import { Column, Entity, Index, ManyToMany, OneToMany } from '@n8n/typeorm'; import { IsString, Length } from 'class-validator'; -import type { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping'; -import type { ExecutionAnnotation } from '@/databases/entities/execution-annotation'; +import type { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping.ee'; +import type { ExecutionAnnotation } from '@/databases/entities/execution-annotation.ee'; import { WithTimestampsAndStringId } from './abstract-entity'; diff --git a/packages/cli/src/databases/entities/annotation-tag-mapping.ts b/packages/cli/src/databases/entities/annotation-tag-mapping.ee.ts similarity index 98% rename from packages/cli/src/databases/entities/annotation-tag-mapping.ts rename to packages/cli/src/databases/entities/annotation-tag-mapping.ee.ts index 2fa4fc79c1..5b1b9e1bf2 100644 --- a/packages/cli/src/databases/entities/annotation-tag-mapping.ts +++ b/packages/cli/src/databases/entities/annotation-tag-mapping.ee.ts @@ -1,7 +1,7 @@ import { Entity, JoinColumn, ManyToOne, PrimaryColumn } from '@n8n/typeorm'; -import type { AnnotationTagEntity } from './annotation-tag-entity'; -import type { ExecutionAnnotation } from './execution-annotation'; +import type { AnnotationTagEntity } from './annotation-tag-entity.ee'; +import type { ExecutionAnnotation } from './execution-annotation.ee'; /** * This entity represents the junction table between the execution annotations and the tags diff --git a/packages/cli/src/databases/entities/api-key.ts b/packages/cli/src/databases/entities/api-key.ts new file mode 100644 index 0000000000..6e2df2d00c --- /dev/null +++ b/packages/cli/src/databases/entities/api-key.ts @@ -0,0 +1,25 @@ +import { Column, Entity, Index, ManyToOne, Unique } from '@n8n/typeorm'; + +import { WithTimestampsAndStringId } from './abstract-entity'; +import { User } from './user'; + +@Entity('user_api_keys') +@Unique(['userId', 'label']) +export class ApiKey extends WithTimestampsAndStringId { + @ManyToOne( + () => User, + (user) => user.id, + { onDelete: 'CASCADE' }, + ) + user: User; + + @Column({ type: String }) + userId: string; + + @Column({ type: String }) + label: string; + + @Index({ unique: true }) + @Column({ type: String }) + apiKey: string; +} diff --git a/packages/cli/src/databases/entities/execution-annotation.ts b/packages/cli/src/databases/entities/execution-annotation.ee.ts similarity index 98% rename from packages/cli/src/databases/entities/execution-annotation.ts rename to packages/cli/src/databases/entities/execution-annotation.ee.ts index 33943f6330..59820c83b5 100644 --- a/packages/cli/src/databases/entities/execution-annotation.ts +++ b/packages/cli/src/databases/entities/execution-annotation.ee.ts @@ -12,8 +12,8 @@ import { } from '@n8n/typeorm'; import type { AnnotationVote } from 'n8n-workflow'; -import type { AnnotationTagEntity } from './annotation-tag-entity'; -import type { AnnotationTagMapping } from './annotation-tag-mapping'; +import type { AnnotationTagEntity } from './annotation-tag-entity.ee'; +import type { AnnotationTagMapping } from './annotation-tag-mapping.ee'; import { ExecutionEntity } from './execution-entity'; @Entity({ name: 'execution_annotations' }) diff --git a/packages/cli/src/databases/entities/execution-entity.ts b/packages/cli/src/databases/entities/execution-entity.ts index d44cb6c3f0..7b63b63eaf 100644 --- a/packages/cli/src/databases/entities/execution-entity.ts +++ b/packages/cli/src/databases/entities/execution-entity.ts @@ -12,7 +12,7 @@ import { } from '@n8n/typeorm'; import { ExecutionStatus, WorkflowExecuteMode } from 'n8n-workflow'; -import type { ExecutionAnnotation } from '@/databases/entities/execution-annotation'; +import type { ExecutionAnnotation } from '@/databases/entities/execution-annotation.ee'; import { datetimeColumnType } from './abstract-entity'; import type { ExecutionData } from './execution-data'; @@ -47,7 +47,14 @@ export class ExecutionEntity { status: ExecutionStatus; @Column(datetimeColumnType) - startedAt: Date; + createdAt: Date; + + /** + * Time when the processing of the execution actually started. This column + * is `null` when an execution is enqueued but has not started yet. + */ + @Column({ type: datetimeColumnType, nullable: true }) + startedAt: Date | null; @Index() @Column({ type: datetimeColumnType, nullable: true }) diff --git a/packages/cli/src/databases/entities/index.ts b/packages/cli/src/databases/entities/index.ts index 8ae29ababd..39f67b3252 100644 --- a/packages/cli/src/databases/entities/index.ts +++ b/packages/cli/src/databases/entities/index.ts @@ -1,17 +1,19 @@ -import { AnnotationTagEntity } from './annotation-tag-entity'; -import { AnnotationTagMapping } from './annotation-tag-mapping'; +import { AnnotationTagEntity } from './annotation-tag-entity.ee'; +import { AnnotationTagMapping } from './annotation-tag-mapping.ee'; +import { ApiKey } from './api-key'; import { AuthIdentity } from './auth-identity'; import { AuthProviderSyncHistory } from './auth-provider-sync-history'; import { AuthUser } from './auth-user'; import { CredentialsEntity } from './credentials-entity'; import { EventDestinations } from './event-destinations'; -import { ExecutionAnnotation } from './execution-annotation'; +import { ExecutionAnnotation } from './execution-annotation.ee'; import { ExecutionData } from './execution-data'; import { ExecutionEntity } from './execution-entity'; import { ExecutionMetadata } from './execution-metadata'; import { InstalledNodes } from './installed-nodes'; import { InstalledPackages } from './installed-packages'; import { InvalidAuthToken } from './invalid-auth-token'; +import { ProcessedData } from './processed-data'; import { Project } from './project'; import { ProjectRelation } from './project-relation'; import { Settings } from './settings'; @@ -54,4 +56,6 @@ export const entities = { WorkflowHistory, Project, ProjectRelation, + ApiKey, + ProcessedData, }; diff --git a/packages/cli/src/databases/entities/processed-data.ts b/packages/cli/src/databases/entities/processed-data.ts new file mode 100644 index 0000000000..bd638fca95 --- /dev/null +++ b/packages/cli/src/databases/entities/processed-data.ts @@ -0,0 +1,22 @@ +import { Column, Entity, PrimaryColumn } from '@n8n/typeorm'; + +import type { IProcessedDataEntries, IProcessedDataLatest } from '@/interfaces'; + +import { jsonColumnType, WithTimestamps } from './abstract-entity'; +import { objectRetriever } from '../utils/transformers'; + +@Entity() +export class ProcessedData extends WithTimestamps { + @PrimaryColumn('varchar') + context: string; + + @PrimaryColumn() + workflowId: string; + + @Column({ + type: jsonColumnType, + nullable: true, + transformer: objectRetriever, + }) + value: IProcessedDataEntries | IProcessedDataLatest; +} diff --git a/packages/cli/src/databases/entities/user.ts b/packages/cli/src/databases/entities/user.ts index 32b257fcd1..b75bec757c 100644 --- a/packages/cli/src/databases/entities/user.ts +++ b/packages/cli/src/databases/entities/user.ts @@ -23,6 +23,7 @@ import { NoUrl } from '@/validators/no-url.validator'; import { NoXss } from '@/validators/no-xss.validator'; import { WithTimestamps, jsonColumnType } from './abstract-entity'; +import type { ApiKey } from './api-key'; import type { AuthIdentity } from './auth-identity'; import type { ProjectRelation } from './project-relation'; import type { SharedCredentials } from './shared-credentials'; @@ -89,6 +90,9 @@ export class User extends WithTimestamps implements IUser { @OneToMany('AuthIdentity', 'user') authIdentities: AuthIdentity[]; + @OneToMany('ApiKey', 'user') + apiKeys: ApiKey[]; + @OneToMany('SharedWorkflow', 'user') sharedWorkflows: SharedWorkflow[]; @@ -107,10 +111,6 @@ export class User extends WithTimestamps implements IUser { this.email = this.email?.toLowerCase() ?? null; } - @Column({ type: String, nullable: true }) - @Index({ unique: true }) - apiKey: string | null; - @Column({ type: Boolean, default: false }) mfaEnabled: boolean; @@ -151,7 +151,7 @@ export class User extends WithTimestamps implements IUser { } toJSON() { - const { password, apiKey, ...rest } = this; + const { password, ...rest } = this; return rest; } diff --git a/packages/cli/src/databases/migrations/common/1724951148974-AddApiKeysTable.ts b/packages/cli/src/databases/migrations/common/1724951148974-AddApiKeysTable.ts new file mode 100644 index 0000000000..a9ea5626bb --- /dev/null +++ b/packages/cli/src/databases/migrations/common/1724951148974-AddApiKeysTable.ts @@ -0,0 +1,109 @@ +import type { ApiKey } from '@/databases/entities/api-key'; +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; +import { generateNanoId } from '@/databases/utils/generators'; + +export class AddApiKeysTable1724951148974 implements ReversibleMigration { + async up({ + queryRunner, + escape, + runQuery, + schemaBuilder: { createTable, column }, + }: MigrationContext) { + const userTable = escape.tableName('user'); + const userApiKeysTable = escape.tableName('user_api_keys'); + const userIdColumn = escape.columnName('userId'); + const apiKeyColumn = escape.columnName('apiKey'); + const labelColumn = escape.columnName('label'); + const idColumn = escape.columnName('id'); + + // Create the new table + await createTable('user_api_keys') + .withColumns( + column('id').varchar(36).primary, + column('userId').uuid.notNull, + column('label').varchar(100).notNull, + column('apiKey').varchar().notNull, + ) + .withForeignKey('userId', { + tableName: 'user', + columnName: 'id', + onDelete: 'CASCADE', + }) + .withIndexOn(['userId', 'label'], true) + .withIndexOn(['apiKey'], true).withTimestamps; + + const usersWithApiKeys = (await queryRunner.query( + `SELECT ${idColumn}, ${apiKeyColumn} FROM ${userTable} WHERE ${apiKeyColumn} IS NOT NULL`, + )) as Array>; + + // Move the apiKey from the users table to the new table + await Promise.all( + usersWithApiKeys.map( + async (user: { id: string; apiKey: string }) => + await runQuery( + `INSERT INTO ${userApiKeysTable} (${idColumn}, ${userIdColumn}, ${apiKeyColumn}, ${labelColumn}) VALUES (:id, :userId, :apiKey, :label)`, + { + id: generateNanoId(), + userId: user.id, + apiKey: user.apiKey, + label: 'My API Key', + }, + ), + ), + ); + + // Drop apiKey column on user's table + await queryRunner.query(`ALTER TABLE ${userTable} DROP COLUMN ${apiKeyColumn};`); + } + + async down({ + queryRunner, + runQuery, + schemaBuilder: { dropTable, addColumns, createIndex, column }, + escape, + isMysql, + }: MigrationContext) { + const userTable = escape.tableName('user'); + const userApiKeysTable = escape.tableName('user_api_keys'); + const apiKeyColumn = escape.columnName('apiKey'); + const userIdColumn = escape.columnName('userId'); + const idColumn = escape.columnName('id'); + const createdAtColumn = escape.columnName('createdAt'); + + await addColumns('user', [column('apiKey').varchar()]); + + await createIndex('user', ['apiKey'], true); + + const queryToGetUsersApiKeys = isMysql + ? ` + SELECT ${userIdColumn}, + ${apiKeyColumn}, + ${createdAtColumn} + FROM ${userApiKeysTable} u + WHERE ${createdAtColumn} = (SELECT Min(${createdAtColumn}) + FROM ${userApiKeysTable} + WHERE ${userIdColumn} = u.${userIdColumn});` + : ` + SELECT DISTINCT ON + (${userIdColumn}) ${userIdColumn}, + ${apiKeyColumn}, ${createdAtColumn} + FROM ${userApiKeysTable} + ORDER BY ${userIdColumn}, ${createdAtColumn} ASC;`; + + const oldestApiKeysPerUser = (await queryRunner.query(queryToGetUsersApiKeys)) as Array< + Partial + >; + + await Promise.all( + oldestApiKeysPerUser.map( + async (user: { userId: string; apiKey: string }) => + await runQuery( + `UPDATE ${userTable} SET ${apiKeyColumn} = :apiKey WHERE ${idColumn} = :userId`, + user, + ), + ), + ); + + await dropTable('user_api_keys'); + } +} diff --git a/packages/cli/src/databases/migrations/common/1726606152711-CreateProcessedDataTable.ts b/packages/cli/src/databases/migrations/common/1726606152711-CreateProcessedDataTable.ts new file mode 100644 index 0000000000..86992a0580 --- /dev/null +++ b/packages/cli/src/databases/migrations/common/1726606152711-CreateProcessedDataTable.ts @@ -0,0 +1,23 @@ +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; + +const processedDataTableName = 'processed_data'; + +export class CreateProcessedDataTable1726606152711 implements ReversibleMigration { + async up({ schemaBuilder: { createTable, column } }: MigrationContext) { + await createTable(processedDataTableName) + .withColumns( + column('workflowId').varchar(36).notNull.primary, + column('value').varchar(255).notNull, + column('context').varchar(255).notNull.primary, + ) + .withForeignKey('workflowId', { + tableName: 'workflow_entity', + columnName: 'id', + onDelete: 'CASCADE', + }).withTimestamps; + } + + async down({ schemaBuilder: { dropTable } }: MigrationContext) { + await dropTable(processedDataTableName); + } +} diff --git a/packages/cli/src/databases/migrations/common/1727427440136-SeparateExecutionCreationFromStart.ts b/packages/cli/src/databases/migrations/common/1727427440136-SeparateExecutionCreationFromStart.ts new file mode 100644 index 0000000000..a44450fa2f --- /dev/null +++ b/packages/cli/src/databases/migrations/common/1727427440136-SeparateExecutionCreationFromStart.ts @@ -0,0 +1,27 @@ +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; + +export class SeparateExecutionCreationFromStart1727427440136 implements ReversibleMigration { + async up({ + schemaBuilder: { addColumns, column, dropNotNull }, + runQuery, + escape, + }: MigrationContext) { + await addColumns('execution_entity', [ + column('createdAt').notNull.timestamp().default('NOW()'), + ]); + + await dropNotNull('execution_entity', 'startedAt'); + + const executionEntity = escape.tableName('execution_entity'); + const createdAt = escape.columnName('createdAt'); + const startedAt = escape.columnName('startedAt'); + + // inaccurate for pre-migration rows but prevents `createdAt` from being nullable + await runQuery(`UPDATE ${executionEntity} SET ${createdAt} = ${startedAt};`); + } + + async down({ schemaBuilder: { dropColumns, addNotNull } }: MigrationContext) { + await dropColumns('execution_entity', ['createdAt']); + await addNotNull('execution_entity', 'startedAt'); + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/index.ts b/packages/cli/src/databases/migrations/mysqldb/index.ts index f3660f905d..1dcca1e592 100644 --- a/packages/cli/src/databases/migrations/mysqldb/index.ts +++ b/packages/cli/src/databases/migrations/mysqldb/index.ts @@ -63,6 +63,9 @@ import { AddConstraintToExecutionMetadata1720101653148 } from '../common/1720101 import { CreateInvalidAuthTokenTable1723627610222 } from '../common/1723627610222-CreateInvalidAuthTokenTable'; import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; +import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable'; +import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; +import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; export const mysqlMigrations: Migration[] = [ InitialMigration1588157391238, @@ -128,4 +131,7 @@ export const mysqlMigrations: Migration[] = [ CreateInvalidAuthTokenTable1723627610222, RefactorExecutionIndices1723796243146, CreateAnnotationTables1724753530828, + AddApiKeysTable1724951148974, + SeparateExecutionCreationFromStart1727427440136, + CreateProcessedDataTable1726606152711, ]; diff --git a/packages/cli/src/databases/migrations/postgresdb/index.ts b/packages/cli/src/databases/migrations/postgresdb/index.ts index e3ad5afa57..eb0e2bd946 100644 --- a/packages/cli/src/databases/migrations/postgresdb/index.ts +++ b/packages/cli/src/databases/migrations/postgresdb/index.ts @@ -63,6 +63,9 @@ import { AddConstraintToExecutionMetadata1720101653148 } from '../common/1720101 import { CreateInvalidAuthTokenTable1723627610222 } from '../common/1723627610222-CreateInvalidAuthTokenTable'; import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; +import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable'; +import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; +import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; export const postgresMigrations: Migration[] = [ InitialMigration1587669153312, @@ -128,4 +131,7 @@ export const postgresMigrations: Migration[] = [ CreateInvalidAuthTokenTable1723627610222, RefactorExecutionIndices1723796243146, CreateAnnotationTables1724753530828, + AddApiKeysTable1724951148974, + SeparateExecutionCreationFromStart1727427440136, + CreateProcessedDataTable1726606152711, ]; diff --git a/packages/cli/src/databases/migrations/sqlite/1724951148974-AddApiKeysTable.ts b/packages/cli/src/databases/migrations/sqlite/1724951148974-AddApiKeysTable.ts new file mode 100644 index 0000000000..71dcecdfb3 --- /dev/null +++ b/packages/cli/src/databases/migrations/sqlite/1724951148974-AddApiKeysTable.ts @@ -0,0 +1,127 @@ +import type { ApiKey } from '@/databases/entities/api-key'; +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; +import { generateNanoId } from '@/databases/utils/generators'; + +export class AddApiKeysTable1724951148974 implements ReversibleMigration { + transaction = false as const; + + async up({ queryRunner, tablePrefix, runQuery }: MigrationContext) { + const tableName = `${tablePrefix}user_api_keys`; + + // Create the table + await queryRunner.query(` + CREATE TABLE ${tableName} ( + id VARCHAR(36) PRIMARY KEY NOT NULL, + "userId" VARCHAR NOT NULL, + "label" VARCHAR(100) NOT NULL, + "apiKey" VARCHAR NOT NULL, + "createdAt" DATETIME(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + "updatedAt" DATETIME(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + FOREIGN KEY ("userId") REFERENCES user(id) ON DELETE CASCADE, + UNIQUE ("userId", label), + UNIQUE("apiKey") + ); + `); + + const usersWithApiKeys = (await queryRunner.query( + `SELECT id, "apiKey" FROM ${tablePrefix}user WHERE "apiKey" IS NOT NULL`, + )) as Array>; + + // Move the apiKey from the users table to the new table + await Promise.all( + usersWithApiKeys.map( + async (user: { id: string; apiKey: string }) => + await runQuery( + `INSERT INTO ${tableName} ("id", "userId", "apiKey", "label") VALUES (:id, :userId, :apiKey, :label)`, + { + id: generateNanoId(), + userId: user.id, + apiKey: user.apiKey, + label: 'My API Key', + }, + ), + ), + ); + + // Create temporary table to store the users dropping the api key column + await queryRunner.query(` + CREATE TABLE users_new ( + id varchar PRIMARY KEY, + email VARCHAR(255) UNIQUE, + "firstName" VARCHAR(32), + "lastName" VARCHAR(32), + password VARCHAR, + "personalizationAnswers" TEXT, + "createdAt" DATETIME(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + "updatedAt" DATETIME(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + settings TEXT, + disabled BOOLEAN DEFAULT FALSE NOT NULL, + "mfaEnabled" BOOLEAN DEFAULT FALSE NOT NULL, + "mfaSecret" TEXT, + "mfaRecoveryCodes" TEXT, + role TEXT NOT NULL + ); + `); + + // Copy the data from the original users table + await queryRunner.query(` + INSERT INTO users_new ("id", "email", "firstName", "lastName", "password", "personalizationAnswers", "createdAt", "updatedAt", "settings", "disabled", "mfaEnabled", "mfaSecret", "mfaRecoveryCodes", "role") + SELECT "id", "email", "firstName", "lastName", "password", "personalizationAnswers", "createdAt", "updatedAt", "settings", "disabled", "mfaEnabled", "mfaSecret", "mfaRecoveryCodes", "role" + FROM ${tablePrefix}user; + `); + + // Drop table with apiKey column + await queryRunner.query(`DROP TABLE ${tablePrefix}user;`); + + // Rename the temporary table to users + await queryRunner.query('ALTER TABLE users_new RENAME TO user;'); + } + + async down({ + queryRunner, + runQuery, + tablePrefix, + schemaBuilder: { dropTable, createIndex }, + escape, + }: MigrationContext) { + const userApiKeysTable = escape.tableName('user_api_keys'); + const apiKeyColumn = escape.columnName('apiKey'); + const userIdColumn = escape.columnName('userId'); + const idColumn = escape.columnName('id'); + const createdAtColumn = escape.columnName('createdAt'); + + const queryToGetUsersApiKeys = ` + SELECT + ${userIdColumn}, + ${apiKeyColumn}, + ${createdAtColumn} + FROM + ${userApiKeysTable} + WHERE + ${createdAtColumn} IN( + SELECT + MIN(${createdAtColumn}) + FROM ${userApiKeysTable} + GROUP BY ${userIdColumn});`; + + const oldestApiKeysPerUser = (await queryRunner.query(queryToGetUsersApiKeys)) as Array< + Partial + >; + + await queryRunner.query(`ALTER TABLE ${tablePrefix}user ADD COLUMN "apiKey" varchar;`); + + await createIndex('user', ['apiKey'], true); + + await Promise.all( + oldestApiKeysPerUser.map( + async (user: { userId: string; apiKey: string }) => + await runQuery( + `UPDATE ${tablePrefix}user SET ${apiKeyColumn} = :apiKey WHERE ${idColumn} = :userId`, + user, + ), + ), + ); + + await dropTable('user_api_keys'); + } +} diff --git a/packages/cli/src/databases/migrations/sqlite/index.ts b/packages/cli/src/databases/migrations/sqlite/index.ts index a1739ae4a4..797b26752c 100644 --- a/packages/cli/src/databases/migrations/sqlite/index.ts +++ b/packages/cli/src/databases/migrations/sqlite/index.ts @@ -37,6 +37,7 @@ import { AddMfaColumns1690000000030 } from './1690000000040-AddMfaColumns'; import { ExecutionSoftDelete1693491613982 } from './1693491613982-ExecutionSoftDelete'; import { DropRoleMapping1705429061930 } from './1705429061930-DropRoleMapping'; import { AddActivatedAtUserSetting1717498465931 } from './1717498465931-AddActivatedAtUserSetting'; +import { AddApiKeysTable1724951148974 } from './1724951148974-AddApiKeysTable'; import { UniqueWorkflowNames1620821879465 } from '../common/1620821879465-UniqueWorkflowNames'; import { UpdateWorkflowCredentials1630330987096 } from '../common/1630330987096-UpdateWorkflowCredentials'; import { AddNodeIds1658930531669 } from '../common/1658930531669-AddNodeIds'; @@ -60,6 +61,8 @@ import { AddConstraintToExecutionMetadata1720101653148 } from '../common/1720101 import { CreateInvalidAuthTokenTable1723627610222 } from '../common/1723627610222-CreateInvalidAuthTokenTable'; import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; +import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; +import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; const sqliteMigrations: Migration[] = [ InitialMigration1588102412422, @@ -122,6 +125,9 @@ const sqliteMigrations: Migration[] = [ CreateInvalidAuthTokenTable1723627610222, RefactorExecutionIndices1723796243146, CreateAnnotationTables1724753530828, + AddApiKeysTable1724951148974, + SeparateExecutionCreationFromStart1727427440136, + CreateProcessedDataTable1726606152711, ]; export { sqliteMigrations }; diff --git a/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts b/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts index ac45b71bc4..10d1371f37 100644 --- a/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts +++ b/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts @@ -12,7 +12,9 @@ import { mockInstance, mockEntityManager } from '@test/mocking'; describe('ExecutionRepository', () => { const entityManager = mockEntityManager(ExecutionEntity); - const globalConfig = mockInstance(GlobalConfig); + const globalConfig = mockInstance(GlobalConfig, { + logging: { outputs: ['console'], scopes: [] }, + }); const binaryDataService = mockInstance(BinaryDataService); const executionRepository = Container.get(ExecutionRepository); const mockDate = new Date('2023-12-28 12:34:56.789Z'); diff --git a/packages/cli/src/databases/repositories/annotation-tag-mapping.repository.ts b/packages/cli/src/databases/repositories/annotation-tag-mapping.repository.ee.ts similarity index 97% rename from packages/cli/src/databases/repositories/annotation-tag-mapping.repository.ts rename to packages/cli/src/databases/repositories/annotation-tag-mapping.repository.ee.ts index c8c4a80d31..07bb79815b 100644 --- a/packages/cli/src/databases/repositories/annotation-tag-mapping.repository.ts +++ b/packages/cli/src/databases/repositories/annotation-tag-mapping.repository.ee.ts @@ -1,7 +1,7 @@ import { DataSource, Repository } from '@n8n/typeorm'; import { Service } from 'typedi'; -import { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping'; +import { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping.ee'; @Service() export class AnnotationTagMappingRepository extends Repository { diff --git a/packages/cli/src/databases/repositories/annotation-tag.repository.ts b/packages/cli/src/databases/repositories/annotation-tag.repository.ee.ts similarity index 94% rename from packages/cli/src/databases/repositories/annotation-tag.repository.ts rename to packages/cli/src/databases/repositories/annotation-tag.repository.ee.ts index 2f4b847ba6..e3aa993460 100644 --- a/packages/cli/src/databases/repositories/annotation-tag.repository.ts +++ b/packages/cli/src/databases/repositories/annotation-tag.repository.ee.ts @@ -1,7 +1,7 @@ import { DataSource, Repository } from '@n8n/typeorm'; import { Service } from 'typedi'; -import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity'; +import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; @Service() export class AnnotationTagRepository extends Repository { diff --git a/packages/cli/src/databases/repositories/api-key.repository.ts b/packages/cli/src/databases/repositories/api-key.repository.ts new file mode 100644 index 0000000000..21ad2c3e40 --- /dev/null +++ b/packages/cli/src/databases/repositories/api-key.repository.ts @@ -0,0 +1,11 @@ +import { DataSource, Repository } from '@n8n/typeorm'; +import { Service } from 'typedi'; + +import { ApiKey } from '../entities/api-key'; + +@Service() +export class ApiKeyRepository extends Repository { + constructor(dataSource: DataSource) { + super(ApiKey, dataSource.manager); + } +} diff --git a/packages/cli/src/databases/repositories/execution-annotation.repository.ts b/packages/cli/src/databases/repositories/execution-annotation.repository.ts index 81d4917173..97ca972733 100644 --- a/packages/cli/src/databases/repositories/execution-annotation.repository.ts +++ b/packages/cli/src/databases/repositories/execution-annotation.repository.ts @@ -1,7 +1,7 @@ import { DataSource, Repository } from '@n8n/typeorm'; import { Service } from 'typedi'; -import { ExecutionAnnotation } from '@/databases/entities/execution-annotation'; +import { ExecutionAnnotation } from '@/databases/entities/execution-annotation.ee'; @Service() export class ExecutionAnnotationRepository extends Repository { diff --git a/packages/cli/src/databases/repositories/execution.repository.ts b/packages/cli/src/databases/repositories/execution.repository.ts index 52c7fd65f3..7b26463969 100644 --- a/packages/cli/src/databases/repositories/execution.repository.ts +++ b/packages/cli/src/databases/repositories/execution.repository.ts @@ -36,24 +36,26 @@ import type { import { Service } from 'typedi'; import config from '@/config'; -import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity'; -import { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping'; -import { ExecutionAnnotation } from '@/databases/entities/execution-annotation'; +import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; +import { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping.ee'; +import { ExecutionAnnotation } from '@/databases/entities/execution-annotation.ee'; import { PostgresLiveRowsRetrievalError } from '@/errors/postgres-live-rows-retrieval.error'; import type { ExecutionSummaries } from '@/executions/execution.types'; import type { - ExecutionPayload, + CreateExecutionPayload, IExecutionBase, IExecutionFlattedDb, IExecutionResponse, } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { separate } from '@/utils'; import { ExecutionDataRepository } from './execution-data.repository'; import type { ExecutionData } from '../entities/execution-data'; import { ExecutionEntity } from '../entities/execution-entity'; import { ExecutionMetadata } from '../entities/execution-metadata'; +import { SharedWorkflow } from '../entities/shared-workflow'; +import { WorkflowEntity } from '../entities/workflow-entity'; export interface IGetExecutionsQueryFilter { id?: FindOperator | string; @@ -196,7 +198,7 @@ export class ExecutionRepository extends Repository { return executions.map((execution) => { const { executionData, ...rest } = execution; return rest; - }); + }) as IExecutionFlattedDb[] | IExecutionResponse[] | IExecutionBase[]; } reportInvalidExecutions(executions: ExecutionEntity[]) { @@ -295,15 +297,15 @@ export class ExecutionRepository extends Repository { }), ...(options?.includeAnnotation && serializedAnnotation && { annotation: serializedAnnotation }), - }; + } as IExecutionFlattedDb | IExecutionResponse | IExecutionBase; } /** * Insert a new execution and its execution data using a transaction. */ - async createNewExecution(execution: ExecutionPayload): Promise { + async createNewExecution(execution: CreateExecutionPayload): Promise { const { data, workflowData, ...rest } = execution; - const { identifiers: inserted } = await this.insert(rest); + const { identifiers: inserted } = await this.insert({ ...rest, createdAt: new Date() }); const { id: executionId } = inserted[0] as { id: string }; const { connections, nodes, name, settings } = workflowData ?? {}; await this.executionDataRepository.insert({ @@ -338,20 +340,25 @@ export class ExecutionRepository extends Repository { ]); } - async updateStatus(executionId: string, status: ExecutionStatus) { - await this.update({ id: executionId }, { status }); - } + async setRunning(executionId: string) { + const startedAt = new Date(); - async resetStartedAt(executionId: string) { - await this.update({ id: executionId }, { startedAt: new Date() }); + await this.update({ id: executionId }, { status: 'running', startedAt }); + + return startedAt; } async updateExistingExecution(executionId: string, execution: Partial) { - // Se isolate startedAt because it must be set when the execution starts and should never change. - // So we prevent updating it, if it's sent (it usually is and causes problems to executions that - // are resumed after waiting for some time, as a new startedAt is set) - const { id, data, workflowId, workflowData, startedAt, customData, ...executionInformation } = - execution; + const { + id, + data, + workflowId, + workflowData, + createdAt, // must never change + startedAt, // must never change + customData, + ...executionInformation + } = execution; if (Object.keys(executionInformation).length > 0) { await this.update({ id: executionId }, executionInformation); } @@ -719,6 +726,7 @@ export class ExecutionRepository extends Repository { mode: true, retryOf: true, status: true, + createdAt: true, startedAt: true, stoppedAt: true, }; @@ -804,6 +812,7 @@ export class ExecutionRepository extends Repository { // @tech_debt: These transformations should not be needed private toSummary(execution: { id: number | string; + createdAt?: Date | string; startedAt?: Date | string; stoppedAt?: Date | string; waitTill?: Date | string | null; @@ -815,6 +824,13 @@ export class ExecutionRepository extends Repository { return date; }; + if (execution.createdAt) { + execution.createdAt = + execution.createdAt instanceof Date + ? execution.createdAt.toISOString() + : normalizeDateString(execution.createdAt); + } + if (execution.startedAt) { execution.startedAt = execution.startedAt instanceof Date @@ -874,6 +890,7 @@ export class ExecutionRepository extends Repository { metadata, annotationTags, vote, + projectId, } = query; const fields = Object.keys(this.summaryFields) @@ -945,6 +962,12 @@ export class ExecutionRepository extends Repository { } } + if (projectId) { + qb.innerJoin(WorkflowEntity, 'w', 'w.id = execution.workflowId') + .innerJoin(SharedWorkflow, 'sw', 'sw.workflowId = w.id') + .where('sw.projectId = :projectId', { projectId }); + } + return qb; } diff --git a/packages/cli/src/databases/repositories/processed-data.repository.ts b/packages/cli/src/databases/repositories/processed-data.repository.ts new file mode 100644 index 0000000000..f02fbf270a --- /dev/null +++ b/packages/cli/src/databases/repositories/processed-data.repository.ts @@ -0,0 +1,11 @@ +import { DataSource, Repository } from '@n8n/typeorm'; +import { Service } from 'typedi'; + +import { ProcessedData } from '../entities/processed-data'; + +@Service() +export class ProcessedDataRepository extends Repository { + constructor(dataSource: DataSource) { + super(ProcessedData, dataSource.manager); + } +} diff --git a/packages/cli/src/databases/subscribers/user-subscriber.ts b/packages/cli/src/databases/subscribers/user-subscriber.ts index 4bb2cc98d3..2f9e698890 100644 --- a/packages/cli/src/databases/subscribers/user-subscriber.ts +++ b/packages/cli/src/databases/subscribers/user-subscriber.ts @@ -3,7 +3,7 @@ import { EventSubscriber } from '@n8n/typeorm'; import { ApplicationError, ErrorReporterProxy } from 'n8n-workflow'; import { Container } from 'typedi'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { Project } from '../entities/project'; import { User } from '../entities/user'; diff --git a/packages/cli/src/databases/types.ts b/packages/cli/src/databases/types.ts index 21bfdc025a..2bb1802bf2 100644 --- a/packages/cli/src/databases/types.ts +++ b/packages/cli/src/databases/types.ts @@ -1,7 +1,7 @@ import type { QueryRunner, ObjectLiteral } from '@n8n/typeorm'; import type { INodeTypes } from 'n8n-workflow'; -import type { Logger } from '@/logger'; +import type { Logger } from '@/logging/logger.service'; import type { createSchemaBuilder } from './dsl'; diff --git a/packages/cli/src/databases/utils/migration-helpers.ts b/packages/cli/src/databases/utils/migration-helpers.ts index 9d29e3fbe5..1093096f43 100644 --- a/packages/cli/src/databases/utils/migration-helpers.ts +++ b/packages/cli/src/databases/utils/migration-helpers.ts @@ -9,7 +9,7 @@ import { Container } from 'typedi'; import { inTest } from '@/constants'; import { createSchemaBuilder } from '@/databases/dsl'; import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@/databases/types'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json'; diff --git a/packages/cli/src/decorators/debounce.ts b/packages/cli/src/decorators/debounce.ts new file mode 100644 index 0000000000..6096ce522a --- /dev/null +++ b/packages/cli/src/decorators/debounce.ts @@ -0,0 +1,37 @@ +import debounce from 'lodash/debounce'; + +/** + * Debounce a class method using `lodash/debounce`. + * + * @param waitMs - Number of milliseconds to debounce method by. + * + * @example + * ``` + * class MyClass { + * @Debounce(1000) + * async myMethod() { + * // debounced + * } + * } + * ``` + */ +export const Debounce = + (waitMs: number): MethodDecorator => + ( + _: object, + methodName: string, + originalDescriptor: PropertyDescriptor, + ): TypedPropertyDescriptor => ({ + configurable: true, + + get() { + const debouncedFn = debounce(originalDescriptor.value, waitMs); + + Object.defineProperty(this, methodName, { + configurable: false, + value: debouncedFn, + }); + + return debouncedFn as T; + }, + }); diff --git a/packages/cli/src/decorators/redactable.ts b/packages/cli/src/decorators/redactable.ts index 51d02c5c3d..e2df19daa6 100644 --- a/packages/cli/src/decorators/redactable.ts +++ b/packages/cli/src/decorators/redactable.ts @@ -1,5 +1,5 @@ import { RedactableError } from '@/errors/redactable.error'; -import type { UserLike } from '@/events/relay-event-map'; +import type { UserLike } from '@/events/maps/relay.event-map'; function toRedactable(userLike: UserLike) { return { diff --git a/packages/cli/src/deduplication/deduplication-helper.ts b/packages/cli/src/deduplication/deduplication-helper.ts new file mode 100644 index 0000000000..a913a21a8c --- /dev/null +++ b/packages/cli/src/deduplication/deduplication-helper.ts @@ -0,0 +1,356 @@ +import { createHash } from 'crypto'; +import { + type ICheckProcessedContextData, + type IDataDeduplicator, + type ICheckProcessedOptions, + type IDeduplicationOutput, + type DeduplicationScope, + type DeduplicationItemTypes, + type DeduplicationMode, + tryToParseDateTime, +} from 'n8n-workflow'; +import * as assert from 'node:assert/strict'; +import { Container } from 'typedi'; + +import type { ProcessedData } from '@/databases/entities/processed-data'; +import { ProcessedDataRepository } from '@/databases/repositories/processed-data.repository'; +import { DeduplicationError } from '@/errors/deduplication.error'; +import type { IProcessedDataEntries, IProcessedDataLatest } from '@/interfaces'; + +export class DeduplicationHelper implements IDataDeduplicator { + private static sortEntries( + items: DeduplicationItemTypes[], + mode: DeduplicationMode, + ): DeduplicationItemTypes[] { + return items.slice().sort((a, b) => DeduplicationHelper.compareValues(mode, a, b)); + } + /** + * Compares two values based on the provided mode ('latestIncrementalKey' or 'latestDate'). + * + * @param {DeduplicationMode} mode - The mode to determine the comparison logic. Can be either: + * - 'latestIncrementalKey': Compares numeric values and returns true if `value1` is greater than `value2`. + * - 'latestDate': Compares date strings and returns true if `value1` is a later date than `value2`. + * + * @param {DeduplicationItemTypes} value1 - The first value to compare. + * - If the mode is 'latestIncrementalKey', this should be a numeric value or a string that can be converted to a number. + * - If the mode is 'latestDate', this should be a valid date string. + * + * @param {DeduplicationItemTypes} value2 - The second value to compare. + * - If the mode is 'latestIncrementalKey', this should be a numeric value or a string that can be converted to a number. + * - If the mode is 'latestDate', this should be a valid date string. + * + * @returns {boolean} - Returns `true` if `value1` is greater than `value2` based on the comparison mode. + * - In 'latestIncrementalKey' mode, it returns `true` if `value1` is numerically greater than `value2`. + * - In 'latestDate' mode, it returns `true` if `value1` is a later date than `value2`. + * + * @throws {DeduplicationError} - Throws an error if: + * - The mode is 'latestIncrementalKey' and the values are not valid numbers. + * - The mode is 'latestDate' and the values are not valid date strings. + * - An unsupported mode is provided. + */ + + private static compareValues( + mode: DeduplicationMode, + value1: DeduplicationItemTypes, + value2: DeduplicationItemTypes, + ): 1 | 0 | -1 { + if (mode === 'latestIncrementalKey') { + const num1 = Number(value1); + const num2 = Number(value2); + if (!isNaN(num1) && !isNaN(num2)) { + return num1 === num2 ? 0 : num1 > num2 ? 1 : -1; + } + throw new DeduplicationError( + 'Invalid value. Only numbers are supported in mode "latestIncrementalKey"', + ); + } else if (mode === 'latestDate') { + try { + const date1 = tryToParseDateTime(value1); + const date2 = tryToParseDateTime(value2); + + return date1 === date2 ? 0 : date1 > date2 ? 1 : -1; + } catch (error) { + throw new DeduplicationError( + 'Invalid value. Only valid dates are supported in mode "latestDate"', + ); + } + } else { + throw new DeduplicationError( + "Invalid mode. Only 'latestIncrementalKey' and 'latestDate' are supported.", + ); + } + } + + private static createContext( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + ): string { + if (scope === 'node') { + if (!contextData.node) { + throw new DeduplicationError( + "No node information has been provided and so cannot use scope 'node'", + ); + } + // Use the node ID to make sure that the data can still be accessed and does not get deleted + // whenever the node gets renamed + return `n:${contextData.node.id}`; + } + return ''; + } + + private static createValueHash(value: DeduplicationItemTypes): string { + return createHash('md5').update(value.toString()).digest('base64'); + } + + private async findProcessedData( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + ): Promise { + return await Container.get(ProcessedDataRepository).findOne({ + where: { + workflowId: contextData.workflow.id, + context: DeduplicationHelper.createContext(scope, contextData), + }, + }); + } + + private validateMode(processedData: ProcessedData | null, options: ICheckProcessedOptions) { + if (processedData && processedData.value.mode !== options.mode) { + throw new DeduplicationError( + 'Deduplication data was originally saved with an incompatible setting of the ‘Keep Items Where’ parameter. Try ’Clean Database’ operation to reset.', + ); + } + } + + private processedDataHasEntries( + data: IProcessedDataEntries | IProcessedDataLatest, + ): data is IProcessedDataEntries { + return Array.isArray(data.data); + } + + private processedDataIsLatest( + data: IProcessedDataEntries | IProcessedDataLatest, + ): data is IProcessedDataLatest { + return data && !Array.isArray(data.data); + } + + private async handleLatestModes( + items: DeduplicationItemTypes[], + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + processedData: ProcessedData | null, + dbContext: string, + ): Promise { + const incomingItems = DeduplicationHelper.sortEntries(items, options.mode); + + if (!processedData) { + // All items are new so add new entries + await Container.get(ProcessedDataRepository).insert({ + workflowId: contextData.workflow.id, + context: dbContext, + value: { + mode: options.mode, + data: incomingItems.pop(), + }, + }); + + return { + new: items, + processed: [], + }; + } + + const returnData: IDeduplicationOutput = { + new: [], + processed: [], + }; + + if (!this.processedDataIsLatest(processedData.value)) { + return returnData; + } + + let largestValue = processedData.value.data; + const processedDataValue = processedData.value; + + incomingItems.forEach((item) => { + if (DeduplicationHelper.compareValues(options.mode, item, processedDataValue.data) === 1) { + returnData.new.push(item); + if (DeduplicationHelper.compareValues(options.mode, item, largestValue) === 1) { + largestValue = item; + } + } else { + returnData.processed.push(item); + } + }); + + processedData.value.data = largestValue; + + await Container.get(ProcessedDataRepository).update( + { workflowId: processedData.workflowId, context: processedData.context }, + processedData, + ); + + return returnData; + } + + private async handleHashedItems( + items: DeduplicationItemTypes[], + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + processedData: ProcessedData | null, + dbContext: string, + ): Promise { + const hashedItems = items.map((item) => DeduplicationHelper.createValueHash(item)); + + if (!processedData) { + // All items are new so add new entries + if (options.maxEntries) { + hashedItems.splice(0, hashedItems.length - options.maxEntries); + } + await Container.get(ProcessedDataRepository).insert({ + workflowId: contextData.workflow.id, + context: dbContext, + value: { + mode: options.mode, + data: hashedItems, + }, + }); + + return { + new: items, + processed: [], + }; + } + + const returnData: IDeduplicationOutput = { + new: [], + processed: [], + }; + + if (!this.processedDataHasEntries(processedData.value)) { + return returnData; + } + + const processedDataValue = processedData.value; + const processedItemsSet = new Set(processedDataValue.data); + + hashedItems.forEach((item, index) => { + if (processedItemsSet.has(item)) { + returnData.processed.push(items[index]); + } else { + returnData.new.push(items[index]); + processedDataValue.data.push(item); + } + }); + + if (options.maxEntries) { + processedDataValue.data.splice(0, processedDataValue.data.length - options.maxEntries); + } + + await Container.get(ProcessedDataRepository).update( + { workflowId: processedData.workflowId, context: processedData.context }, + processedData, + ); + + return returnData; + } + + async checkProcessedAndRecord( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + const dbContext = DeduplicationHelper.createContext(scope, contextData); + + assert.ok(contextData.workflow.id); + + const processedData = await this.findProcessedData(scope, contextData); + + this.validateMode(processedData, options); + + if (['latestIncrementalKey', 'latestDate'].includes(options.mode)) { + return await this.handleLatestModes(items, contextData, options, processedData, dbContext); + } + //mode entries + return await this.handleHashedItems(items, contextData, options, processedData, dbContext); + } + + async removeProcessed( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + if (['latestIncrementalKey', 'latestDate'].includes(options.mode)) { + throw new DeduplicationError('Removing processed data is not possible in mode "latest"'); + } + assert.ok(contextData.workflow.id); + + const processedData = await Container.get(ProcessedDataRepository).findOne({ + where: { + workflowId: contextData.workflow.id, + context: DeduplicationHelper.createContext(scope, contextData), + }, + }); + + if (!processedData) { + return; + } + + const hashedItems = items.map((item) => DeduplicationHelper.createValueHash(item)); + + if (!this.processedDataHasEntries(processedData.value)) { + return; + } + + const processedDataValue = processedData.value; + + hashedItems.forEach((item) => { + const index = processedDataValue.data.findIndex((value) => value === item); + if (index !== -1) { + processedDataValue.data.splice(index, 1); + } + }); + + await Container.get(ProcessedDataRepository).update( + { workflowId: processedData.workflowId, context: processedData.context }, + processedData, + ); + } + + async clearAllProcessedItems( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + ): Promise { + await Container.get(ProcessedDataRepository).delete({ + workflowId: contextData.workflow.id, + context: DeduplicationHelper.createContext(scope, contextData), + }); + } + + async getProcessedDataCount( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + const processedDataRepository = Container.get(ProcessedDataRepository); + + const processedData = await processedDataRepository.findOne({ + where: { + workflowId: contextData.workflow.id, + context: DeduplicationHelper.createContext(scope, contextData), + }, + }); + + if ( + options.mode === 'entries' && + processedData && + this.processedDataHasEntries(processedData.value) + ) { + return processedData.value.data.length; + } else { + return 0; + } + } +} diff --git a/packages/cli/src/deduplication/index.ts b/packages/cli/src/deduplication/index.ts new file mode 100644 index 0000000000..2cf2973d71 --- /dev/null +++ b/packages/cli/src/deduplication/index.ts @@ -0,0 +1,7 @@ +import { type IDataDeduplicator } from 'n8n-workflow'; + +import { DeduplicationHelper } from './deduplication-helper'; + +export function getDataDeduplicationService(): IDataDeduplicator { + return new DeduplicationHelper(); +} diff --git a/packages/cli/src/environments/source-control/__tests__/source-control.service.test.ts b/packages/cli/src/environments/source-control/__tests__/source-control.service.test.ts new file mode 100644 index 0000000000..06dab4f97c --- /dev/null +++ b/packages/cli/src/environments/source-control/__tests__/source-control.service.test.ts @@ -0,0 +1,46 @@ +import { mock } from 'jest-mock-extended'; +import { InstanceSettings } from 'n8n-core'; + +import { SourceControlPreferencesService } from '@/environments/source-control/source-control-preferences.service.ee'; +import { SourceControlService } from '@/environments/source-control/source-control.service.ee'; + +describe('SourceControlService', () => { + const preferencesService = new SourceControlPreferencesService( + new InstanceSettings(), + mock(), + mock(), + ); + const sourceControlService = new SourceControlService( + mock(), + mock(), + preferencesService, + mock(), + mock(), + mock(), + mock(), + ); + + describe('pushWorkfolder', () => { + it('should throw an error if a file is given that is not in the workfolder', async () => { + jest.spyOn(sourceControlService, 'sanityCheck').mockResolvedValue(undefined); + + await expect( + sourceControlService.pushWorkfolder({ + fileNames: [ + { + file: '/etc/passwd', + id: 'test', + name: 'secret-file', + type: 'file', + status: 'modified', + location: 'local', + conflict: false, + updatedAt: new Date().toISOString(), + pushed: false, + }, + ], + }), + ).rejects.toThrow('File path /etc/passwd is invalid'); + }); + }); +}); diff --git a/packages/cli/src/environments/source-control/source-control-export.service.ee.ts b/packages/cli/src/environments/source-control/source-control-export.service.ee.ts index 321534ff6c..9c495bbb8d 100644 --- a/packages/cli/src/environments/source-control/source-control-export.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-export.service.ee.ts @@ -11,7 +11,7 @@ import { SharedWorkflowRepository } from '@/databases/repositories/shared-workfl import { TagRepository } from '@/databases/repositories/tag.repository'; import { WorkflowTagMappingRepository } from '@/databases/repositories/workflow-tag-mapping.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, diff --git a/packages/cli/src/environments/source-control/source-control-git.service.ee.ts b/packages/cli/src/environments/source-control/source-control-git.service.ee.ts index 14cdb9e81a..99571cdd52 100644 --- a/packages/cli/src/environments/source-control/source-control-git.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-git.service.ee.ts @@ -14,7 +14,7 @@ import type { import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OwnershipService } from '@/services/ownership.service'; import { diff --git a/packages/cli/src/environments/source-control/source-control-helper.ee.ts b/packages/cli/src/environments/source-control/source-control-helper.ee.ts index 29393c5efe..00a9875741 100644 --- a/packages/cli/src/environments/source-control/source-control-helper.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-helper.ee.ts @@ -1,10 +1,13 @@ import { generateKeyPairSync } from 'crypto'; import { constants as fsConstants, mkdirSync, accessSync } from 'fs'; +import { ApplicationError } from 'n8n-workflow'; +import { ok } from 'node:assert/strict'; import path from 'path'; import { Container } from 'typedi'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; +import { isContainedWithin } from '@/utils/path-util'; import { SOURCE_CONTROL_GIT_KEY_COMMENT, @@ -163,3 +166,24 @@ export function getTrackingInformationFromPostPushResult(result: SourceControlle uniques.filter((file) => file.pushed && file.file.startsWith('variable_stubs')).length ?? 0, }; } + +/** + * Normalizes and validates the given source controlled file path. Ensures + * the path is absolute and contained within the git folder. + * + * @throws {ApplicationError} If the path is not within the git folder + */ +export function normalizeAndValidateSourceControlledFilePath( + gitFolderPath: string, + filePath: string, +) { + ok(path.isAbsolute(gitFolderPath), 'gitFolder must be an absolute path'); + + const normalizedPath = path.isAbsolute(filePath) ? filePath : path.join(gitFolderPath, filePath); + + if (!isContainedWithin(gitFolderPath, filePath)) { + throw new ApplicationError(`File path ${filePath} is invalid`); + } + + return normalizedPath; +} diff --git a/packages/cli/src/environments/source-control/source-control-import.service.ee.ts b/packages/cli/src/environments/source-control/source-control-import.service.ee.ts index fb491f39d2..b5012d2762 100644 --- a/packages/cli/src/environments/source-control/source-control-import.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-import.service.ee.ts @@ -2,7 +2,12 @@ import { In } from '@n8n/typeorm'; import glob from 'fast-glob'; import { Credentials, InstanceSettings } from 'n8n-core'; -import { ApplicationError, jsonParse, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; +import { + ApplicationError, + jsonParse, + ErrorReporterProxy as ErrorReporter, + ensureError, +} from 'n8n-workflow'; import { readFile as fsReadFile } from 'node:fs/promises'; import path from 'path'; import { Container, Service } from 'typedi'; @@ -23,7 +28,7 @@ import { VariablesRepository } from '@/databases/repositories/variables.reposito import { WorkflowTagMappingRepository } from '@/databases/repositories/workflow-tag-mapping.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import type { IWorkflowToImport } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isUniqueConstraintError } from '@/response-helper'; import { assertNever } from '@/utils'; @@ -274,8 +279,9 @@ export class SourceControlImportService { this.logger.debug(`Reactivating workflow id ${existingWorkflow.id}`); await workflowManager.add(existingWorkflow.id, 'activate'); // update the versionId of the workflow to match the imported workflow - } catch (error) { - this.logger.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error); + } catch (e) { + const error = ensureError(e); + this.logger.error(`Failed to activate workflow ${existingWorkflow.id}`, { error }); } finally { await Container.get(WorkflowRepository).update( { id: existingWorkflow.id }, @@ -377,8 +383,9 @@ export class SourceControlImportService { await fsReadFile(candidate.file, { encoding: 'utf8' }), { fallbackValue: { tags: [], mappings: [] } }, ); - } catch (error) { - this.logger.error(`Failed to import tags from file ${candidate.file}`, error as Error); + } catch (e) { + const error = ensureError(e); + this.logger.error(`Failed to import tags from file ${candidate.file}`, { error }); return; } @@ -444,8 +451,8 @@ export class SourceControlImportService { await fsReadFile(candidate.file, { encoding: 'utf8' }), { fallbackValue: [] }, ); - } catch (error) { - this.logger.error(`Failed to import tags from file ${candidate.file}`, error as Error); + } catch (e) { + this.logger.error(`Failed to import tags from file ${candidate.file}`, { error: e }); return; } const overriddenKeys = Object.keys(valueOverrides ?? {}); diff --git a/packages/cli/src/environments/source-control/source-control-preferences.service.ee.ts b/packages/cli/src/environments/source-control/source-control-preferences.service.ee.ts index a99c5fb3e8..d3a34d784f 100644 --- a/packages/cli/src/environments/source-control/source-control-preferences.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-preferences.service.ee.ts @@ -9,7 +9,7 @@ import Container, { Service } from 'typedi'; import config from '@/config'; import { SettingsRepository } from '@/databases/repositories/settings.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { SOURCE_CONTROL_SSH_FOLDER, diff --git a/packages/cli/src/environments/source-control/source-control.controller.ee.ts b/packages/cli/src/environments/source-control/source-control.controller.ee.ts index 7e41fd7cae..7b8b2a7266 100644 --- a/packages/cli/src/environments/source-control/source-control.controller.ee.ts +++ b/packages/cli/src/environments/source-control/source-control.controller.ee.ts @@ -170,6 +170,7 @@ export class SourceControlController { if (this.sourceControlPreferencesService.isBranchReadOnly()) { throw new BadRequestError('Cannot push onto read-only branch.'); } + try { await this.sourceControlService.setGitUserDetails( `${req.user.firstName} ${req.user.lastName}`, diff --git a/packages/cli/src/environments/source-control/source-control.service.ee.ts b/packages/cli/src/environments/source-control/source-control.service.ee.ts index 11340a0e15..58c213f03c 100644 --- a/packages/cli/src/environments/source-control/source-control.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control.service.ee.ts @@ -10,7 +10,7 @@ import type { Variables } from '@/databases/entities/variables'; import { TagRepository } from '@/databases/repositories/tag.repository'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { SOURCE_CONTROL_DEFAULT_EMAIL, @@ -25,6 +25,7 @@ import { getTrackingInformationFromPrePushResult, getTrackingInformationFromPullResult, getVariablesPath, + normalizeAndValidateSourceControlledFilePath, sourceControlFoldersExistCheck, } from './source-control-helper.ee'; import { SourceControlImportService } from './source-control-import.service.ee'; @@ -80,7 +81,7 @@ export class SourceControlService { }); } - private async sanityCheck(): Promise { + public async sanityCheck(): Promise { try { const foldersExisted = sourceControlFoldersExistCheck( [this.gitFolder, this.sshFolder], @@ -217,8 +218,20 @@ export class SourceControlService { throw new BadRequestError('Cannot push onto read-only branch.'); } + const filesToPush = options.fileNames.map((file) => { + const normalizedPath = normalizeAndValidateSourceControlledFilePath( + this.gitFolder, + file.file, + ); + + return { + ...file, + file: normalizedPath, + }; + }); + // only determine file status if not provided by the frontend - let statusResult: SourceControlledFile[] = options.fileNames; + let statusResult: SourceControlledFile[] = filesToPush; if (statusResult.length === 0) { statusResult = (await this.getStatus({ direction: 'push', @@ -240,7 +253,7 @@ export class SourceControlService { const filesToBePushed = new Set(); const filesToBeDeleted = new Set(); - options.fileNames.forEach((e) => { + filesToPush.forEach((e) => { if (e.status !== 'deleted') { filesToBePushed.add(e.file); } else { @@ -250,12 +263,12 @@ export class SourceControlService { this.sourceControlExportService.rmFilesFromExportFolder(filesToBeDeleted); - const workflowsToBeExported = options.fileNames.filter( + const workflowsToBeExported = filesToPush.filter( (e) => e.type === 'workflow' && e.status !== 'deleted', ); await this.sourceControlExportService.exportWorkflowsToWorkFolder(workflowsToBeExported); - const credentialsToBeExported = options.fileNames.filter( + const credentialsToBeExported = filesToPush.filter( (e) => e.type === 'credential' && e.status !== 'deleted', ); const credentialExportResult = @@ -269,11 +282,11 @@ export class SourceControlService { }); } - if (options.fileNames.find((e) => e.type === 'tags')) { + if (filesToPush.find((e) => e.type === 'tags')) { await this.sourceControlExportService.exportTagsToWorkFolder(); } - if (options.fileNames.find((e) => e.type === 'variables')) { + if (filesToPush.find((e) => e.type === 'variables')) { await this.sourceControlExportService.exportVariablesToWorkFolder(); } @@ -281,7 +294,7 @@ export class SourceControlService { for (let i = 0; i < statusResult.length; i++) { // eslint-disable-next-line @typescript-eslint/no-loop-func - if (options.fileNames.find((file) => file.file === statusResult[i].file)) { + if (filesToPush.find((file) => file.file === statusResult[i].file)) { statusResult[i].pushed = true; } } diff --git a/packages/cli/src/error-reporting.ts b/packages/cli/src/error-reporting.ts index d1ecd39198..e429bdbd30 100644 --- a/packages/cli/src/error-reporting.ts +++ b/packages/cli/src/error-reporting.ts @@ -1,6 +1,7 @@ import { GlobalConfig } from '@n8n/config'; // eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import import { QueryFailedError } from '@n8n/typeorm'; +import { AxiosError } from 'axios'; import { createHash } from 'crypto'; import { ErrorReporterProxy, ApplicationError } from 'n8n-workflow'; import Container from 'typedi'; @@ -67,6 +68,8 @@ export const initErrorHandling = async () => { beforeSend(event, { originalException }) { if (!originalException) return null; + if (originalException instanceof AxiosError) return null; + if ( originalException instanceof QueryFailedError && ['SQLITE_FULL', 'SQLITE_IOERR'].some((errMsg) => originalException.message.includes(errMsg)) diff --git a/packages/cli/src/errors/deduplication.error.ts b/packages/cli/src/errors/deduplication.error.ts new file mode 100644 index 0000000000..8e9173abb9 --- /dev/null +++ b/packages/cli/src/errors/deduplication.error.ts @@ -0,0 +1,7 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class DeduplicationError extends ApplicationError { + constructor(message: string) { + super(`Deduplication Failed: ${message}`); + } +} diff --git a/packages/cli/src/errors/port-taken.error.ts b/packages/cli/src/errors/port-taken.error.ts deleted file mode 100644 index 30c63a679f..0000000000 --- a/packages/cli/src/errors/port-taken.error.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { ApplicationError } from 'n8n-workflow'; - -export class PortTakenError extends ApplicationError { - constructor(port: number) { - super( - `Port ${port} is already in use. Do you already have the n8n main process running on that port?`, - ); - } -} diff --git a/packages/cli/src/errors/response-errors/webhook-not-found.error.ts b/packages/cli/src/errors/response-errors/webhook-not-found.error.ts index 648a7a0106..8b2d8bd26c 100644 --- a/packages/cli/src/errors/response-errors/webhook-not-found.error.ts +++ b/packages/cli/src/errors/response-errors/webhook-not-found.error.ts @@ -47,10 +47,13 @@ export class WebhookNotFoundError extends NotFoundError { ) { const errorMsg = webhookNotFoundErrorMessage({ path, httpMethod, webhookMethods }); - const hintMsg = - hint === 'default' - ? "Click the 'Test workflow' button on the canvas, then try again. (In test mode, the webhook only works for one call after you click this button)" - : "The workflow must be active for a production URL to run successfully. You can activate the workflow using the toggle in the top-right of the editor. Note that unlike test URL calls, production URL calls aren't shown on the canvas (only in the executions list)"; + let hintMsg = ''; + if (!webhookMethods?.length) { + hintMsg = + hint === 'default' + ? "Click the 'Test workflow' button on the canvas, then try again. (In test mode, the webhook only works for one call after you click this button)" + : "The workflow must be active for a production URL to run successfully. You can activate the workflow using the toggle in the top-right of the editor. Note that unlike test URL calls, production URL calls aren't shown on the canvas (only in the executions list)"; + } super(errorMsg, hintMsg); } diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-from-db.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-from-db.ts index d901346d7e..4046855f30 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-from-db.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-from-db.ts @@ -2,7 +2,7 @@ import { MessageEventBusDestinationTypeNames } from 'n8n-workflow'; import { Container } from 'typedi'; import type { EventDestinations } from '@/databases/entities/event-destinations'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MessageEventBusDestinationSentry } from './message-event-bus-destination-sentry.ee'; import { MessageEventBusDestinationSyslog } from './message-event-bus-destination-syslog.ee'; diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee.ts index dc1727cb0c..83db469d79 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee.ts @@ -7,7 +7,7 @@ import { MessageEventBusDestinationTypeNames } from 'n8n-workflow'; import syslog from 'syslog-client'; import Container from 'typedi'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MessageEventBusDestination } from './message-event-bus-destination.ee'; import { eventMessageGenericDestinationTestEvent } from '../event-message-classes/event-message-generic'; diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee.ts index f6a35f4329..a5373d0cc5 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee.ts @@ -180,7 +180,7 @@ export class MessageEventBusDestinationWebhook try { JSON.parse(this.jsonQuery); } catch { - this.logger.error('JSON parameter need to be an valid JSON'); + this.logger.error('JSON parameter needs to be valid JSON'); } this.axiosRequestOptions.params = jsonParse(this.jsonQuery); } @@ -198,7 +198,7 @@ export class MessageEventBusDestinationWebhook try { JSON.parse(this.jsonHeaders); } catch { - this.logger.error('JSON parameter need to be an valid JSON'); + this.logger.error('JSON parameter needs to be valid JSON'); } this.axiosRequestOptions.headers = jsonParse(this.jsonHeaders); } diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination.ee.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination.ee.ts index 4d6725ff2c..7b65767b04 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination.ee.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination.ee.ts @@ -5,7 +5,7 @@ import { v4 as uuid } from 'uuid'; import { EventDestinationsRepository } from '@/databases/repositories/event-destinations.repository'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { EventMessageTypes } from '../event-message-classes'; import type { AbstractEventMessage } from '../event-message-classes/abstract-event-message'; diff --git a/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts b/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts index 6b009f38e9..6c6a928a67 100644 --- a/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts +++ b/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts @@ -12,7 +12,7 @@ import Container from 'typedi'; import { Worker } from 'worker_threads'; import { inTest } from '@/constants'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { EventMessageTypes } from '../event-message-classes'; import { isEventMessageOptions } from '../event-message-classes/abstract-event-message'; @@ -149,7 +149,7 @@ export class MessageEventBusLogWriter { this._worker = new Worker(workerFileName); if (this.worker) { this.worker.on('messageerror', async (error) => { - this.logger.error('Event Bus Log Writer thread error, attempting to restart...', error); + this.logger.error('Event Bus Log Writer thread error, attempting to restart...', { error }); await MessageEventBusLogWriter.instance.startThread(); }); return true; diff --git a/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts b/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts index cf9a585478..0f622c2317 100644 --- a/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts +++ b/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts @@ -13,7 +13,7 @@ import { EventDestinationsRepository } from '@/databases/repositories/event-dest import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { ExecutionRecoveryService } from '../../executions/execution-recovery.service'; @@ -210,7 +210,7 @@ export class MessageEventBus extends EventEmitter { this.destinations[destination.getId()] = destination; this.destinations[destination.getId()].startListening(); if (notifyWorkers) { - await this.orchestrationService.publish('restartEventBus'); + await this.orchestrationService.publish('restart-event-bus'); } return destination; } @@ -236,7 +236,7 @@ export class MessageEventBus extends EventEmitter { delete this.destinations[id]; } if (notifyWorkers) { - await this.orchestrationService.publish('restartEventBus'); + await this.orchestrationService.publish('restart-event-bus'); } return result; } diff --git a/packages/cli/src/events/__tests__/log-streaming-event-relay.test.ts b/packages/cli/src/events/__tests__/log-streaming-event-relay.test.ts index d768218950..4727c8ef72 100644 --- a/packages/cli/src/events/__tests__/log-streaming-event-relay.test.ts +++ b/packages/cli/src/events/__tests__/log-streaming-event-relay.test.ts @@ -3,8 +3,8 @@ import type { INode, IRun, IWorkflowBase } from 'n8n-workflow'; import type { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { EventService } from '@/events/event.service'; -import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay'; -import type { RelayEventMap } from '@/events/relay-event-map'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; +import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import type { IWorkflowDb } from '@/interfaces'; describe('LogStreamingEventRelay', () => { diff --git a/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts b/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts index 9a05835205..df65a70ecb 100644 --- a/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts +++ b/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts @@ -9,8 +9,8 @@ import type { ProjectRelationRepository } from '@/databases/repositories/project import type { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import type { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { EventService } from '@/events/event.service'; -import type { RelayEventMap } from '@/events/relay-event-map'; -import { TelemetryEventRelay } from '@/events/telemetry-event-relay'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; +import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay'; import type { IWorkflowDb } from '@/interfaces'; import type { License } from '@/license'; import type { NodeTypes } from '@/node-types'; @@ -37,6 +37,10 @@ describe('TelemetryEventRelay', () => { includeQueueMetrics: false, }, }, + logging: { + level: 'info', + outputs: ['console'], + }, }); const workflowRepository = mock(); const nodeTypes = mock(); @@ -1053,4 +1057,20 @@ describe('TelemetryEventRelay', () => { ); }); }); + + describe('Community+ registered', () => { + it('should track `license-community-plus-registered` event', () => { + const event: RelayEventMap['license-community-plus-registered'] = { + email: 'user@example.com', + licenseKey: 'license123', + }; + + eventService.emit('license-community-plus-registered', event); + + expect(telemetry.track).toHaveBeenCalledWith('User registered for license community plus', { + email: 'user@example.com', + licenseKey: 'license123', + }); + }); + }); }); diff --git a/packages/cli/src/events/event.service.ts b/packages/cli/src/events/event.service.ts index 10ba7666ef..b8e00ecea7 100644 --- a/packages/cli/src/events/event.service.ts +++ b/packages/cli/src/events/event.service.ts @@ -2,11 +2,12 @@ import { Service } from 'typedi'; import { TypedEmitter } from '@/typed-emitter'; -import type { AiEventMap } from './ai-event-map'; -import type { QueueMetricsEventMap } from './queue-metrics-event-map'; -import type { RelayEventMap } from './relay-event-map'; +import type { AiEventMap } from './maps/ai.event-map'; +import type { PubSubEventMap } from './maps/pub-sub.event-map'; +import type { QueueMetricsEventMap } from './maps/queue-metrics.event-map'; +import type { RelayEventMap } from './maps/relay.event-map'; -type EventMap = RelayEventMap & QueueMetricsEventMap & AiEventMap; +type EventMap = RelayEventMap & QueueMetricsEventMap & AiEventMap & PubSubEventMap; @Service() export class EventService extends TypedEmitter {} diff --git a/packages/cli/src/events/ai-event-map.ts b/packages/cli/src/events/maps/ai.event-map.ts similarity index 100% rename from packages/cli/src/events/ai-event-map.ts rename to packages/cli/src/events/maps/ai.event-map.ts diff --git a/packages/cli/src/events/maps/pub-sub.event-map.ts b/packages/cli/src/events/maps/pub-sub.event-map.ts new file mode 100644 index 0000000000..ff27741b9b --- /dev/null +++ b/packages/cli/src/events/maps/pub-sub.event-map.ts @@ -0,0 +1,84 @@ +import type { PushType, WorkerStatus } from '@n8n/api-types'; + +import type { IWorkflowDb } from '@/interfaces'; + +export type PubSubEventMap = PubSubCommandMap & PubSubWorkerResponseMap; + +export type PubSubCommandMap = { + // #region Lifecycle + + 'reload-license': never; + + 'restart-event-bus': never; + + 'reload-external-secrets-providers': never; + + // #endregion + + // #region Community packages + + 'community-package-install': { + packageName: string; + packageVersion: string; + }; + + 'community-package-update': { + packageName: string; + packageVersion: string; + }; + + 'community-package-uninstall': { + packageName: string; + }; + + // #endregion + + // #region Worker view + + 'get-worker-id': never; + + 'get-worker-status': never; + + // #endregion + + // #region Multi-main setup + + 'add-webhooks-triggers-and-pollers': { + workflowId: string; + }; + + 'remove-triggers-and-pollers': { + workflowId: string; + }; + + 'display-workflow-activation': { + workflowId: string; + }; + + 'display-workflow-deactivation': { + workflowId: string; + }; + + 'display-workflow-activation-error': { + workflowId: string; + errorMessage: string; + }; + + 'relay-execution-lifecycle-event': { + type: PushType; + args: Record; + pushRef: string; + }; + + 'clear-test-webhooks': { + webhookKey: string; + workflowEntity: IWorkflowDb; + pushRef: string; + }; + + // #endregion +}; + +export type PubSubWorkerResponseMap = { + 'response-to-get-worker-status': WorkerStatus; +}; diff --git a/packages/cli/src/events/queue-metrics-event-map.ts b/packages/cli/src/events/maps/queue-metrics.event-map.ts similarity index 100% rename from packages/cli/src/events/queue-metrics-event-map.ts rename to packages/cli/src/events/maps/queue-metrics.event-map.ts diff --git a/packages/cli/src/events/relay-event-map.ts b/packages/cli/src/events/maps/relay.event-map.ts similarity index 98% rename from packages/cli/src/events/relay-event-map.ts rename to packages/cli/src/events/maps/relay.event-map.ts index a53a36842e..21b673a2b5 100644 --- a/packages/cli/src/events/relay-event-map.ts +++ b/packages/cli/src/events/maps/relay.event-map.ts @@ -11,7 +11,7 @@ import type { ProjectRole } from '@/databases/entities/project-relation'; import type { GlobalRole } from '@/databases/entities/user'; import type { IWorkflowDb } from '@/interfaces'; -import type { AiEventMap } from './ai-event-map'; +import type { AiEventMap } from './ai.event-map'; export type UserLike = { id: string; @@ -420,6 +420,11 @@ export type RelayEventMap = { success: boolean; }; + 'license-community-plus-registered': { + email: string; + licenseKey: string; + }; + // #endregion // #region Variable diff --git a/packages/cli/src/events/event-relay.ts b/packages/cli/src/events/relays/event-relay.ts similarity index 81% rename from packages/cli/src/events/event-relay.ts rename to packages/cli/src/events/relays/event-relay.ts index 3202b69c15..13e7dc01be 100644 --- a/packages/cli/src/events/event-relay.ts +++ b/packages/cli/src/events/relays/event-relay.ts @@ -1,8 +1,7 @@ import { Service } from 'typedi'; -import type { RelayEventMap } from '@/events/relay-event-map'; - -import { EventService } from './event.service'; +import { EventService } from '@/events/event.service'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; @Service() export class EventRelay { diff --git a/packages/cli/src/events/log-streaming-event-relay.ts b/packages/cli/src/events/relays/log-streaming.event-relay.ts similarity index 98% rename from packages/cli/src/events/log-streaming-event-relay.ts rename to packages/cli/src/events/relays/log-streaming.event-relay.ts index 788e5e50c4..c65af2874c 100644 --- a/packages/cli/src/events/log-streaming-event-relay.ts +++ b/packages/cli/src/events/relays/log-streaming.event-relay.ts @@ -3,10 +3,9 @@ import { Service } from 'typedi'; import { Redactable } from '@/decorators/redactable'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { EventRelay } from '@/events/event-relay'; -import type { RelayEventMap } from '@/events/relay-event-map'; - -import { EventService } from './event.service'; +import { EventService } from '@/events/event.service'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; +import { EventRelay } from '@/events/relays/event-relay'; @Service() export class LogStreamingEventRelay extends EventRelay { diff --git a/packages/cli/src/events/telemetry-event-relay.ts b/packages/cli/src/events/relays/telemetry.event-relay.ts similarity index 98% rename from packages/cli/src/events/telemetry-event-relay.ts rename to packages/cli/src/events/relays/telemetry.event-relay.ts index 82beb17198..11d84751d0 100644 --- a/packages/cli/src/events/telemetry-event-relay.ts +++ b/packages/cli/src/events/relays/telemetry.event-relay.ts @@ -12,14 +12,14 @@ import { ProjectRelationRepository } from '@/databases/repositories/project-rela import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { EventService } from '@/events/event.service'; -import type { RelayEventMap } from '@/events/relay-event-map'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; import { determineFinalExecutionStatus } from '@/execution-lifecycle-hooks/shared/shared-hook-functions'; import type { IExecutionTrackProperties } from '@/interfaces'; import { License } from '@/license'; import { NodeTypes } from '@/node-types'; import { EventRelay } from './event-relay'; -import { Telemetry } from '../telemetry'; +import { Telemetry } from '../../telemetry'; @Service() export class TelemetryEventRelay extends EventRelay { @@ -54,6 +54,7 @@ export class TelemetryEventRelay extends EventRelay { 'source-control-user-finished-push-ui': (event) => this.sourceControlUserFinishedPushUi(event), 'license-renewal-attempted': (event) => this.licenseRenewalAttempted(event), + 'license-community-plus-registered': (event) => this.licenseCommunityPlusRegistered(event), 'variable-created': () => this.variableCreated(), 'external-secrets-provider-settings-saved': (event) => this.externalSecretsProviderSettingsSaved(event), @@ -234,6 +235,16 @@ export class TelemetryEventRelay extends EventRelay { }); } + private licenseCommunityPlusRegistered({ + email, + licenseKey, + }: RelayEventMap['license-community-plus-registered']) { + this.telemetry.track('User registered for license community plus', { + email, + licenseKey, + }); + } + // #endregion // #region Variable diff --git a/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts b/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts index 80e620fb17..c8e6b3e88f 100644 --- a/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts +++ b/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts @@ -9,7 +9,7 @@ import { ExecutionRepository } from '@/databases/repositories/execution.reposito import { saveExecutionProgress } from '@/execution-lifecycle-hooks/save-execution-progress'; import * as fnModule from '@/execution-lifecycle-hooks/to-save-settings'; import type { IExecutionResponse } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { mockInstance } from '@test/mocking'; mockInstance(Logger); diff --git a/packages/cli/src/execution-lifecycle-hooks/restore-binary-data-id.ts b/packages/cli/src/execution-lifecycle-hooks/restore-binary-data-id.ts index 31b045ade8..d9a1a9a0e9 100644 --- a/packages/cli/src/execution-lifecycle-hooks/restore-binary-data-id.ts +++ b/packages/cli/src/execution-lifecycle-hooks/restore-binary-data-id.ts @@ -4,7 +4,7 @@ import type { IRun, WorkflowExecuteMode } from 'n8n-workflow'; import Container from 'typedi'; import config from '@/config'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; /** * Whenever the execution ID is not available to the binary data service at the diff --git a/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts b/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts index 212990211f..ca9899e1ec 100644 --- a/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts +++ b/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts @@ -4,7 +4,7 @@ import { Container } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { toSaveSettings } from '@/execution-lifecycle-hooks/to-save-settings'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; export async function saveExecutionProgress( workflowData: IWorkflowBase, diff --git a/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts b/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts index d6d55e63e5..68fd528f14 100644 --- a/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts +++ b/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts @@ -1,10 +1,10 @@ import pick from 'lodash/pick'; -import type { ExecutionStatus, IRun, IWorkflowBase } from 'n8n-workflow'; +import { ensureError, type ExecutionStatus, type IRun, type IWorkflowBase } from 'n8n-workflow'; import { Container } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; -import type { ExecutionPayload, IExecutionDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import type { IExecutionDb, UpdateExecutionPayload } from '@/interfaces'; +import { Logger } from '@/logging/logger.service'; import { ExecutionMetadataService } from '@/services/execution-metadata.service'; import { isWorkflowIdValid } from '@/utils'; @@ -46,7 +46,7 @@ export function prepareExecutionDataForDbUpdate(parameters: { 'pinData', ]); - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: UpdateExecutionPayload = { data: runData.data, mode: runData.mode, finished: runData.finished ? runData.finished : false, @@ -95,7 +95,8 @@ export async function updateExistingExecution(parameters: { ); } } catch (e) { - logger.error(`Failed to save metadata for execution ID ${executionId}`, e as Error); + const error = ensureError(e); + logger.error(`Failed to save metadata for execution ID ${executionId}`, { error }); } if (executionData.finished === true && executionData.retryOf !== undefined) { diff --git a/packages/cli/src/executions/execution-recovery.service.ts b/packages/cli/src/executions/execution-recovery.service.ts index e1e6a1f180..33576d1368 100644 --- a/packages/cli/src/executions/execution-recovery.service.ts +++ b/packages/cli/src/executions/execution-recovery.service.ts @@ -10,7 +10,7 @@ import { NodeCrashedError } from '@/errors/node-crashed.error'; import { WorkflowCrashedError } from '@/errors/workflow-crashed.error'; import { EventService } from '@/events/event.service'; import type { IExecutionResponse } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { Push } from '@/push'; import { getWorkflowHooksMain } from '@/workflow-execute-additional-data'; // @TODO: Dependency cycle diff --git a/packages/cli/src/executions/execution.service.ts b/packages/cli/src/executions/execution.service.ts index a04c10e45f..5f4ec0c535 100644 --- a/packages/cli/src/executions/execution.service.ts +++ b/packages/cli/src/executions/execution.service.ts @@ -21,7 +21,7 @@ import { ActiveExecutions } from '@/active-executions'; import { ConcurrencyControlService } from '@/concurrency/concurrency-control.service'; import config from '@/config'; import type { User } from '@/databases/entities/user'; -import { AnnotationTagMappingRepository } from '@/databases/repositories/annotation-tag-mapping.repository'; +import { AnnotationTagMappingRepository } from '@/databases/repositories/annotation-tag-mapping.repository.ee'; import { ExecutionAnnotationRepository } from '@/databases/repositories/execution-annotation.repository'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import type { IGetExecutionsQueryFilter } from '@/databases/repositories/execution.repository'; @@ -32,13 +32,13 @@ import { QueuedExecutionRetryError } from '@/errors/queued-execution-retry.error import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import type { - ExecutionPayload, + CreateExecutionPayload, IExecutionFlattedResponse, IExecutionResponse, IWorkflowDb, } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { WaitTracker } from '@/wait-tracker'; import { WorkflowRunner } from '@/workflow-runner'; @@ -321,11 +321,10 @@ export class ExecutionService { }, }; - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: CreateExecutionPayload = { data: executionData, mode, finished: false, - startedAt: new Date(), workflowData, workflowId: workflow.id, stoppedAt: new Date(), diff --git a/packages/cli/src/executions/execution.types.ts b/packages/cli/src/executions/execution.types.ts index ba574199a0..04d68d8197 100644 --- a/packages/cli/src/executions/execution.types.ts +++ b/packages/cli/src/executions/execution.types.ts @@ -80,6 +80,7 @@ export namespace ExecutionSummaries { startedBefore: string; annotationTags: string[]; // tag IDs vote: AnnotationVote; + projectId: string; }>; type AccessFields = { diff --git a/packages/cli/src/external-secrets/external-secrets-manager.ee.ts b/packages/cli/src/external-secrets/external-secrets-manager.ee.ts index dd33132d37..e175f2969c 100644 --- a/packages/cli/src/external-secrets/external-secrets-manager.ee.ts +++ b/packages/cli/src/external-secrets/external-secrets-manager.ee.ts @@ -10,7 +10,7 @@ import type { SecretsProviderSettings, } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { EXTERNAL_SECRETS_INITIAL_BACKOFF, EXTERNAL_SECRETS_MAX_BACKOFF } from './constants'; @@ -79,7 +79,7 @@ export class ExternalSecretsManager { } async broadcastReloadExternalSecretsProviders() { - await Container.get(OrchestrationService).publish('reloadExternalSecretsProviders'); + await Container.get(OrchestrationService).publish('reload-external-secrets-providers'); } private decryptSecretsSettings(value: string): ExternalSecretsSettings { diff --git a/packages/cli/src/external-secrets/providers/vault.ts b/packages/cli/src/external-secrets/providers/vault.ts index e325e69935..398c40745d 100644 --- a/packages/cli/src/external-secrets/providers/vault.ts +++ b/packages/cli/src/external-secrets/providers/vault.ts @@ -5,7 +5,7 @@ import { Container } from 'typedi'; import type { SecretsProviderSettings, SecretsProviderState } from '@/interfaces'; import { SecretsProvider } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { DOCS_HELP_NOTICE, EXTERNAL_SECRETS_NAME_REGEX } from '../constants'; import { preferGet } from '../external-secrets-helper.ee'; diff --git a/packages/cli/src/generic-helpers.ts b/packages/cli/src/generic-helpers.ts index 47ef1a796b..e5978bb34a 100644 --- a/packages/cli/src/generic-helpers.ts +++ b/packages/cli/src/generic-helpers.ts @@ -1,6 +1,6 @@ import { validate } from 'class-validator'; -import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity'; +import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; import type { CredentialsEntity } from '@/databases/entities/credentials-entity'; import type { TagEntity } from '@/databases/entities/tag-entity'; import type { User } from '@/databases/entities/user'; diff --git a/packages/cli/src/interfaces.ts b/packages/cli/src/interfaces.ts index 629925e8dc..4d2cd9b2d9 100644 --- a/packages/cli/src/interfaces.ts +++ b/packages/cli/src/interfaces.ts @@ -22,11 +22,13 @@ import type { INodeProperties, IUserSettings, IWorkflowExecutionDataProcess, + DeduplicationMode, + DeduplicationItemTypes, } from 'n8n-workflow'; import type PCancelable from 'p-cancelable'; import type { ActiveWorkflowManager } from '@/active-workflow-manager'; -import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity'; +import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; import type { AuthProviderType } from '@/databases/entities/auth-identity'; import type { SharedCredentials } from '@/databases/entities/shared-credentials'; import type { TagEntity } from '@/databases/entities/tag-entity'; @@ -48,6 +50,20 @@ export interface ICredentialsOverwrite { [key: string]: ICredentialDataDecryptedObject; } +// ---------------------------------- +// ProcessedData +// ---------------------------------- + +export interface IProcessedDataLatest { + mode: DeduplicationMode; + data: DeduplicationItemTypes; +} + +export interface IProcessedDataEntries { + mode: DeduplicationMode; + data: DeduplicationItemTypes[]; +} + // ---------------------------------- // tags // ---------------------------------- @@ -115,6 +131,7 @@ export type SaveExecutionDataType = 'all' | 'none'; export interface IExecutionBase { id: string; mode: WorkflowExecuteMode; + createdAt: Date; // set by DB startedAt: Date; stoppedAt?: Date; // empty value means execution is still running workflowId: string; @@ -131,10 +148,11 @@ export interface IExecutionDb extends IExecutionBase { workflowData: IWorkflowBase; } -/** - * Payload for creating or updating an execution. - */ -export type ExecutionPayload = Omit; +/** Payload for creating an execution. */ +export type CreateExecutionPayload = Omit; + +/** Payload for updating an execution. */ +export type UpdateExecutionPayload = Omit; export interface IExecutionResponse extends IExecutionBase { id: string; diff --git a/packages/cli/src/ldap/ldap.service.ee.ts b/packages/cli/src/ldap/ldap.service.ee.ts index 84c79c7651..b552db6974 100644 --- a/packages/cli/src/ldap/ldap.service.ee.ts +++ b/packages/cli/src/ldap/ldap.service.ee.ts @@ -14,7 +14,7 @@ import { SettingsRepository } from '@/databases/repositories/settings.repository import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { getCurrentAuthenticationMethod, isEmailCurrentAuthenticationMethod, diff --git a/packages/cli/src/license.ts b/packages/cli/src/license.ts index 75a57efd2c..c6e8dfa19f 100644 --- a/packages/cli/src/license.ts +++ b/packages/cli/src/license.ts @@ -6,7 +6,7 @@ import Container, { Service } from 'typedi'; import config from '@/config'; import { SettingsRepository } from '@/databases/repositories/settings.repository'; import { OnShutdown } from '@/decorators/on-shutdown'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { LicenseMetricsService } from '@/metrics/license-metrics.service'; import { OrchestrationService } from '@/services/orchestration.service'; @@ -37,7 +37,9 @@ export class License { private readonly orchestrationService: OrchestrationService, private readonly settingsRepository: SettingsRepository, private readonly licenseMetricsService: LicenseMetricsService, - ) {} + ) { + this.logger = this.logger.withScope('license'); + } /** * Whether this instance should renew the license - on init and periodically. @@ -109,9 +111,9 @@ export class License { await this.manager.initialize(); this.logger.debug('License initialized'); - } catch (e: unknown) { - if (e instanceof Error) { - this.logger.error('Could not initialize license manager sdk', e); + } catch (error: unknown) { + if (error instanceof Error) { + this.logger.error('Could not initialize license manager sdk', { error }); } } } @@ -146,7 +148,7 @@ export class License { this.orchestrationService.isFollower ) { this.logger.debug( - '[Multi-main setup] Instance is follower, skipping sending of "reloadLicense" command...', + '[Multi-main setup] Instance is follower, skipping sending of "reload-license" command...', ); return; } @@ -160,7 +162,7 @@ export class License { if (config.getEnv('executions.mode') === 'queue') { const { Publisher } = await import('@/scaling/pubsub/publisher.service'); - await Container.get(Publisher).publishCommand({ command: 'reloadLicense' }); + await Container.get(Publisher).publishCommand({ command: 'reload-license' }); } const isS3Selected = config.getEnv('binaryDataManager.mode') === 's3'; @@ -253,6 +255,10 @@ export class License { return this.isFeatureEnabled(LICENSE_FEATURES.AI_ASSISTANT); } + isAskAiEnabled() { + return this.isFeatureEnabled(LICENSE_FEATURES.ASK_AI); + } + isAdvancedExecutionFiltersEnabled() { return this.isFeatureEnabled(LICENSE_FEATURES.ADVANCED_EXECUTION_FILTERS); } diff --git a/packages/cli/src/license/__tests__/license.service.test.ts b/packages/cli/src/license/__tests__/license.service.test.ts index 5fe4d6c692..77afe04a2c 100644 --- a/packages/cli/src/license/__tests__/license.service.test.ts +++ b/packages/cli/src/license/__tests__/license.service.test.ts @@ -1,4 +1,5 @@ import type { TEntitlement } from '@n8n_io/license-sdk'; +import axios, { AxiosError } from 'axios'; import { mock } from 'jest-mock-extended'; import type { WorkflowRepository } from '@/databases/repositories/workflow.repository'; @@ -7,6 +8,8 @@ import type { EventService } from '@/events/event.service'; import type { License } from '@/license'; import { LicenseErrors, LicenseService } from '@/license/license.service'; +jest.mock('axios'); + describe('LicenseService', () => { const license = mock(); const workflowRepository = mock(); @@ -84,4 +87,37 @@ describe('LicenseService', () => { }); }); }); + + describe('registerCommunityEdition', () => { + test('on success', async () => { + jest + .spyOn(axios, 'post') + .mockResolvedValueOnce({ data: { title: 'Title', text: 'Text', licenseKey: 'abc-123' } }); + const data = await licenseService.registerCommunityEdition({ + email: 'test@ema.il', + instanceId: '123', + instanceUrl: 'http://localhost', + licenseType: 'community-registered', + }); + + expect(data).toEqual({ title: 'Title', text: 'Text' }); + expect(eventService.emit).toHaveBeenCalledWith('license-community-plus-registered', { + email: 'test@ema.il', + licenseKey: 'abc-123', + }); + }); + + test('on failure', async () => { + jest.spyOn(axios, 'post').mockRejectedValueOnce(new AxiosError('Failed')); + await expect( + licenseService.registerCommunityEdition({ + email: 'test@ema.il', + instanceId: '123', + instanceUrl: 'http://localhost', + licenseType: 'community-registered', + }), + ).rejects.toThrowError('Failed'); + expect(eventService.emit).not.toHaveBeenCalled(); + }); + }); }); diff --git a/packages/cli/src/license/license.controller.ts b/packages/cli/src/license/license.controller.ts index e1644046bb..db895ef4a0 100644 --- a/packages/cli/src/license/license.controller.ts +++ b/packages/cli/src/license/license.controller.ts @@ -1,14 +1,21 @@ +import { CommunityRegisteredRequestDto } from '@n8n/api-types'; import type { AxiosError } from 'axios'; +import { InstanceSettings } from 'n8n-core'; -import { Get, Post, RestController, GlobalScope } from '@/decorators'; +import { Get, Post, RestController, GlobalScope, Body } from '@/decorators'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; -import { AuthenticatedRequest, LicenseRequest } from '@/requests'; +import { AuthenticatedRequest, AuthlessRequest, LicenseRequest } from '@/requests'; +import { UrlService } from '@/services/url.service'; import { LicenseService } from './license.service'; @RestController('/license') export class LicenseController { - constructor(private readonly licenseService: LicenseService) {} + constructor( + private readonly licenseService: LicenseService, + private readonly instanceSettings: InstanceSettings, + private readonly urlService: UrlService, + ) {} @Get('/') async getLicenseData() { @@ -32,6 +39,20 @@ export class LicenseController { } } + @Post('/enterprise/community-registered') + async registerCommunityEdition( + _req: AuthlessRequest, + _res: Response, + @Body payload: CommunityRegisteredRequestDto, + ) { + return await this.licenseService.registerCommunityEdition({ + email: payload.email, + instanceId: this.instanceSettings.instanceId, + instanceUrl: this.urlService.getInstanceBaseUrl(), + licenseType: 'community-registered', + }); + } + @Post('/activate') @GlobalScope('license:manage') async activateLicense(req: LicenseRequest.Activate) { diff --git a/packages/cli/src/license/license.service.ts b/packages/cli/src/license/license.service.ts index d92c181f8d..43f9961334 100644 --- a/packages/cli/src/license/license.service.ts +++ b/packages/cli/src/license/license.service.ts @@ -1,4 +1,5 @@ -import axios from 'axios'; +import axios, { AxiosError } from 'axios'; +import { ensureError } from 'n8n-workflow'; import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; @@ -6,15 +7,14 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { EventService } from '@/events/event.service'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; type LicenseError = Error & { errorId?: keyof typeof LicenseErrors }; export const LicenseErrors = { SCHEMA_VALIDATION: 'Activation key is in the wrong format', - RESERVATION_EXHAUSTED: - 'Activation key has been used too many times. Please contact sales@n8n.io if you would like to extend it', + RESERVATION_EXHAUSTED: 'Activation key has been used too many times', RESERVATION_EXPIRED: 'Activation key has expired', NOT_FOUND: 'Activation key not found', RESERVATION_CONFLICT: 'Activation key not found', @@ -60,6 +60,43 @@ export class LicenseService { }); } + async registerCommunityEdition({ + email, + instanceId, + instanceUrl, + licenseType, + }: { + email: string; + instanceId: string; + instanceUrl: string; + licenseType: string; + }): Promise<{ title: string; text: string }> { + try { + const { + data: { licenseKey, ...rest }, + } = await axios.post<{ title: string; text: string; licenseKey: string }>( + 'https://enterprise.n8n.io/community-registered', + { + email, + instanceId, + instanceUrl, + licenseType, + }, + ); + this.eventService.emit('license-community-plus-registered', { email, licenseKey }); + return rest; + } catch (e: unknown) { + if (e instanceof AxiosError) { + const error = e as AxiosError<{ message: string }>; + const errorMsg = error.response?.data?.message ?? e.message; + throw new BadRequestError('Failed to register community edition: ' + errorMsg); + } else { + this.logger.error('Failed to register community edition', { error: ensureError(e) }); + throw new BadRequestError('Failed to register community edition'); + } + } + } + getManagementJwt(): string { return this.license.getManagementJwt(); } diff --git a/packages/cli/src/load-nodes-and-credentials.ts b/packages/cli/src/load-nodes-and-credentials.ts index e2daaa0e76..a5cff3764f 100644 --- a/packages/cli/src/load-nodes-and-credentials.ts +++ b/packages/cli/src/load-nodes-and-credentials.ts @@ -18,6 +18,7 @@ import type { } from 'n8n-workflow'; import { NodeHelpers, ApplicationError, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; import path from 'path'; +import picocolors from 'picocolors'; import { Container, Service } from 'typedi'; import { @@ -27,7 +28,8 @@ import { CLI_DIR, inE2ETests, } from '@/constants'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; +import { isContainedWithin } from '@/utils/path-util'; interface LoadedNodesAndCredentials { nodes: INodeTypeData; @@ -146,6 +148,7 @@ export class LoadNodesAndCredentials { path.join(nodeModulesDir, packagePath), ); } catch (error) { + this.logger.error((error as Error).message); ErrorReporter.error(error); } } @@ -153,14 +156,13 @@ export class LoadNodesAndCredentials { resolveIcon(packageName: string, url: string): string | undefined { const loader = this.loaders[packageName]; - if (loader) { - const pathPrefix = `/icons/${packageName}/`; - const filePath = path.resolve(loader.directory, url.substring(pathPrefix.length)); - if (!path.relative(loader.directory, filePath).includes('..')) { - return filePath; - } + if (!loader) { + return undefined; } - return undefined; + const pathPrefix = `/icons/${packageName}/`; + const filePath = path.resolve(loader.directory, url.substring(pathPrefix.length)); + + return isContainedWithin(loader.directory, filePath) ? filePath : undefined; } getCustomDirectories(): string[] { @@ -258,6 +260,13 @@ export class LoadNodesAndCredentials { dir: string, ) { const loader = new constructor(dir, this.excludeNodes, this.includeNodes); + if (loader instanceof PackageDirectoryLoader && loader.packageName in this.loaders) { + throw new ApplicationError( + picocolors.red( + `nodes package ${loader.packageName} is already loaded.\n Please delete this second copy at path ${dir}`, + ), + ); + } await loader.loadAll(); this.loaders[loader.packageName] = loader; return loader; diff --git a/packages/cli/src/logger.ts b/packages/cli/src/logger.ts deleted file mode 100644 index 7a24bdbf28..0000000000 --- a/packages/cli/src/logger.ts +++ /dev/null @@ -1,117 +0,0 @@ -import callsites from 'callsites'; -import { LoggerProxy, type IDataObject, LOG_LEVELS } from 'n8n-workflow'; -import { basename } from 'path'; -import { Service } from 'typedi'; -import { inspect } from 'util'; -import winston from 'winston'; - -import config from '@/config'; - -const noOp = () => {}; - -@Service() -export class Logger { - private logger: winston.Logger; - - constructor() { - const level = config.getEnv('logs.level'); - - this.logger = winston.createLogger({ - level, - silent: level === 'silent', - }); - - // Change all methods with higher log-level to no-op - for (const levelName of LOG_LEVELS) { - if (this.logger.levels[levelName] > this.logger.levels[level]) { - Object.defineProperty(this, levelName, { value: noOp }); - } - } - - const output = config - .getEnv('logs.output') - .split(',') - .map((line) => line.trim()); - - if (output.includes('console')) { - let format: winston.Logform.Format; - if (level === 'debug') { - format = winston.format.combine( - winston.format.metadata(), - winston.format.timestamp(), - winston.format.colorize({ all: true }), - - winston.format.printf(({ level: logLevel, message, timestamp, metadata }) => { - return `${timestamp} | ${logLevel.padEnd(18)} | ${message}${ - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - Object.keys(metadata).length ? ` ${JSON.stringify(inspect(metadata))}` : '' - }`; - }), - ); - } else { - format = winston.format.printf(({ message }: { message: string }) => message); - } - - this.logger.add( - new winston.transports.Console({ - format, - }), - ); - } - - if (output.includes('file')) { - const fileLogFormat = winston.format.combine( - winston.format.timestamp(), - winston.format.metadata(), - winston.format.json(), - ); - this.logger.add( - new winston.transports.File({ - filename: config.getEnv('logs.file.location'), - format: fileLogFormat, - maxsize: config.getEnv('logs.file.fileSizeMax') * 1048576, // config * 1mb - maxFiles: config.getEnv('logs.file.fileCountMax'), - }), - ); - } - - LoggerProxy.init(this); - } - - private log(level: (typeof LOG_LEVELS)[number], message: string, meta: object = {}): void { - const callsite = callsites(); - // We are using the third array element as the structure is as follows: - // [0]: this file - // [1]: Should be Logger - // [2]: Should point to the caller. - // Note: getting line number is useless because at this point - // We are in runtime, so it means we are looking at compiled js files - const logDetails = {} as IDataObject; - if (callsite[2] !== undefined) { - logDetails.file = basename(callsite[2].getFileName() || ''); - const functionName = callsite[2].getFunctionName(); - if (functionName) { - logDetails.function = functionName; - } - } - this.logger.log(level, message, { ...meta, ...logDetails }); - } - - // Convenience methods below - - error(message: string, meta: object = {}): void { - this.log('error', message, meta); - } - - warn(message: string, meta: object = {}): void { - this.log('warn', message, meta); - } - - info(message: string, meta: object = {}): void { - this.log('info', message, meta); - } - - debug(message: string, meta: object = {}): void { - this.log('debug', message, meta); - } -} diff --git a/packages/cli/src/logging/__tests__/logger.service.test.ts b/packages/cli/src/logging/__tests__/logger.service.test.ts new file mode 100644 index 0000000000..d01a709639 --- /dev/null +++ b/packages/cli/src/logging/__tests__/logger.service.test.ts @@ -0,0 +1,152 @@ +import type { GlobalConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; + +import { Logger } from '@/logging/logger.service'; + +describe('Logger', () => { + describe('transports', () => { + test('if `console` selected, should set console transport', () => { + const globalConfig = mock({ + logging: { + level: 'info', + outputs: ['console'], + scopes: [], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const { transports } = logger.getInternalLogger(); + + expect(transports).toHaveLength(1); + + const [transport] = transports; + + expect(transport.constructor.name).toBe('Console'); + }); + + test('if `file` selected, should set file transport', () => { + const globalConfig = mock({ + logging: { + level: 'info', + outputs: ['file'], + scopes: [], + file: { + fileSizeMax: 100, + fileCountMax: 16, + location: 'logs/n8n.log', + }, + }, + }); + + const logger = new Logger(globalConfig, mock({ n8nFolder: '/tmp' })); + + const { transports } = logger.getInternalLogger(); + + expect(transports).toHaveLength(1); + + const [transport] = transports; + + expect(transport.constructor.name).toBe('File'); + }); + }); + + describe('levels', () => { + test('if `error` selected, should enable `error` level', () => { + const globalConfig = mock({ + logging: { + level: 'error', + outputs: ['console'], + scopes: [], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(true); + expect(internalLogger.isWarnEnabled()).toBe(false); + expect(internalLogger.isInfoEnabled()).toBe(false); + expect(internalLogger.isDebugEnabled()).toBe(false); + }); + + test('if `warn` selected, should enable `error` and `warn` levels', () => { + const globalConfig = mock({ + logging: { + level: 'warn', + outputs: ['console'], + scopes: [], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(true); + expect(internalLogger.isWarnEnabled()).toBe(true); + expect(internalLogger.isInfoEnabled()).toBe(false); + expect(internalLogger.isDebugEnabled()).toBe(false); + }); + + test('if `info` selected, should enable `error`, `warn`, and `info` levels', () => { + const globalConfig = mock({ + logging: { + level: 'info', + outputs: ['console'], + scopes: [], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(true); + expect(internalLogger.isWarnEnabled()).toBe(true); + expect(internalLogger.isInfoEnabled()).toBe(true); + expect(internalLogger.isDebugEnabled()).toBe(false); + }); + + test('if `debug` selected, should enable all levels', () => { + const globalConfig = mock({ + logging: { + level: 'debug', + outputs: ['console'], + scopes: [], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(true); + expect(internalLogger.isWarnEnabled()).toBe(true); + expect(internalLogger.isInfoEnabled()).toBe(true); + expect(internalLogger.isDebugEnabled()).toBe(true); + }); + + test('if `silent` selected, should disable all levels', () => { + const globalConfig = mock({ + logging: { + level: 'silent', + outputs: ['console'], + scopes: [], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(false); + expect(internalLogger.isWarnEnabled()).toBe(false); + expect(internalLogger.isInfoEnabled()).toBe(false); + expect(internalLogger.isDebugEnabled()).toBe(false); + expect(internalLogger.silent).toBe(true); + }); + }); +}); diff --git a/packages/cli/src/logging/constants.ts b/packages/cli/src/logging/constants.ts new file mode 100644 index 0000000000..107327694b --- /dev/null +++ b/packages/cli/src/logging/constants.ts @@ -0,0 +1,3 @@ +export const noOp = () => {}; + +export const LOG_LEVELS = ['error', 'warn', 'info', 'debug', 'silent'] as const; diff --git a/packages/cli/src/logging/logger.service.ts b/packages/cli/src/logging/logger.service.ts new file mode 100644 index 0000000000..8bdb9177de --- /dev/null +++ b/packages/cli/src/logging/logger.service.ts @@ -0,0 +1,220 @@ +import type { LogScope } from '@n8n/config'; +import { GlobalConfig } from '@n8n/config'; +import callsites from 'callsites'; +import type { TransformableInfo } from 'logform'; +import { InstanceSettings } from 'n8n-core'; +import { LoggerProxy, LOG_LEVELS } from 'n8n-workflow'; +import path, { basename } from 'node:path'; +import pc from 'picocolors'; +import { Service } from 'typedi'; +import winston from 'winston'; + +import { inDevelopment, inProduction } from '@/constants'; +import { isObjectLiteral } from '@/utils'; + +import { noOp } from './constants'; +import type { LogLocationMetadata, LogLevel, LogMetadata } from './types'; + +@Service() +export class Logger { + private internalLogger: winston.Logger; + + private readonly level: LogLevel; + + private readonly scopes: Set; + + private get isScopingEnabled() { + return this.scopes.size > 0; + } + + constructor( + private readonly globalConfig: GlobalConfig, + private readonly instanceSettings: InstanceSettings, + ) { + this.level = this.globalConfig.logging.level; + + const isSilent = this.level === 'silent'; + + this.internalLogger = winston.createLogger({ + level: this.level, + silent: isSilent, + }); + + if (!isSilent) { + this.setLevel(); + + const { outputs, scopes } = this.globalConfig.logging; + + if (outputs.includes('console')) this.setConsoleTransport(); + if (outputs.includes('file')) this.setFileTransport(); + + this.scopes = new Set(scopes); + } + + LoggerProxy.init(this); + } + + private setInternalLogger(internalLogger: winston.Logger) { + this.internalLogger = internalLogger; + } + + withScope(scope: LogScope) { + const scopedLogger = new Logger(this.globalConfig, this.instanceSettings); + const childLogger = this.internalLogger.child({ scope }); + + scopedLogger.setInternalLogger(childLogger); + + return scopedLogger; + } + + private log(level: LogLevel, message: string, metadata: LogMetadata) { + const location: LogLocationMetadata = {}; + + const caller = callsites().at(2); // zeroth and first are this file, second is caller + + if (caller !== undefined) { + location.file = basename(caller.getFileName() ?? ''); + const fnName = caller.getFunctionName(); + if (fnName) location.function = fnName; + } + + this.internalLogger.log(level, message, { ...metadata, ...location }); + } + + private setLevel() { + const { levels } = this.internalLogger; + + for (const logLevel of LOG_LEVELS) { + if (levels[logLevel] > levels[this.level]) { + // numerically higher (less severe) log levels become no-op + // to prevent overhead from `callsites` calls + Object.defineProperty(this, logLevel, { value: noOp }); + } + } + } + + private setConsoleTransport() { + const format = + this.level === 'debug' && inDevelopment + ? this.debugDevConsoleFormat() + : this.level === 'debug' && inProduction + ? this.debugProdConsoleFormat() + : winston.format.printf(({ message }: { message: string }) => message); + + this.internalLogger.add(new winston.transports.Console({ format })); + } + + private scopeFilter() { + return winston.format((info: TransformableInfo & { metadata: LogMetadata }) => { + const shouldIncludeScope = info.metadata.scope && this.scopes.has(info.metadata.scope); + + if (this.isScopingEnabled && !shouldIncludeScope) return false; + + return info; + })(); + } + + private debugDevConsoleFormat() { + return winston.format.combine( + winston.format.metadata(), + winston.format.timestamp({ format: () => this.devTsFormat() }), + winston.format.colorize({ all: true }), + this.scopeFilter(), + winston.format.printf(({ level: _level, message, timestamp, metadata: _metadata }) => { + const SEPARATOR = ' '.repeat(3); + const LOG_LEVEL_COLUMN_WIDTH = 15; // 5 columns + ANSI color codes + const level = _level.toLowerCase().padEnd(LOG_LEVEL_COLUMN_WIDTH, ' '); + const metadata = this.toPrintable(_metadata); + return [timestamp, level, message + ' ' + pc.dim(metadata)].join(SEPARATOR); + }), + ); + } + + private debugProdConsoleFormat() { + return winston.format.combine( + winston.format.metadata(), + winston.format.timestamp(), + this.scopeFilter(), + winston.format.printf(({ level, message, timestamp, metadata }) => { + const _metadata = this.toPrintable(metadata); + return `${timestamp} | ${level.padEnd(5)} | ${message}${_metadata ? ' ' + _metadata : ''}`; + }), + ); + } + + private devTsFormat() { + const now = new Date(); + const pad = (num: number, digits: number = 2) => num.toString().padStart(digits, '0'); + const hours = pad(now.getHours()); + const minutes = pad(now.getMinutes()); + const seconds = pad(now.getSeconds()); + const milliseconds = pad(now.getMilliseconds(), 3); + return `${hours}:${minutes}:${seconds}.${milliseconds}`; + } + + private toPrintable(metadata: unknown) { + if (isObjectLiteral(metadata) && Object.keys(metadata).length > 0) { + return inProduction + ? JSON.stringify(metadata) + : JSON.stringify(metadata) + .replace(/{"/g, '{ "') + .replace(/,"/g, ', "') + .replace(/:/g, ': ') + .replace(/}/g, ' }'); // spacing for readability + } + + return ''; + } + + private setFileTransport() { + const format = winston.format.combine( + winston.format.timestamp(), + winston.format.metadata(), + winston.format.json(), + ); + + const filename = path.join( + this.instanceSettings.n8nFolder, + this.globalConfig.logging.file.location, + ); + + const { fileSizeMax, fileCountMax } = this.globalConfig.logging.file; + + this.internalLogger.add( + new winston.transports.File({ + filename, + format, + maxsize: fileSizeMax * 1_048_576, // config * 1 MiB in bytes + maxFiles: fileCountMax, + }), + ); + } + + // #region Convenience methods + + error(message: string, metadata: LogMetadata = {}) { + this.log('error', message, metadata); + } + + warn(message: string, metadata: LogMetadata = {}) { + this.log('warn', message, metadata); + } + + info(message: string, metadata: LogMetadata = {}) { + this.log('info', message, metadata); + } + + debug(message: string, metadata: LogMetadata = {}) { + this.log('debug', message, metadata); + } + + // #endregion + + // #region For testing only + + getInternalLogger() { + return this.internalLogger; + } + + // #endregion +} diff --git a/packages/cli/src/logging/types.ts b/packages/cli/src/logging/types.ts new file mode 100644 index 0000000000..b6022c0bf6 --- /dev/null +++ b/packages/cli/src/logging/types.ts @@ -0,0 +1,14 @@ +import type { LogScope } from '@n8n/config'; + +import type { LOG_LEVELS } from './constants'; + +export type LogLevel = (typeof LOG_LEVELS)[number]; + +export type LogMetadata = { + [key: string]: unknown; + scope?: LogScope; + file?: string; + function?: string; +}; + +export type LogLocationMetadata = Pick; diff --git a/packages/cli/src/posthog/__tests__/posthog.test.ts b/packages/cli/src/posthog/__tests__/posthog.test.ts index f2869afcbf..5c8fe282bf 100644 --- a/packages/cli/src/posthog/__tests__/posthog.test.ts +++ b/packages/cli/src/posthog/__tests__/posthog.test.ts @@ -1,3 +1,5 @@ +import type { GlobalConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; import { InstanceSettings } from 'n8n-core'; import { PostHog } from 'posthog-node'; @@ -15,6 +17,8 @@ describe('PostHog', () => { const instanceSettings = mockInstance(InstanceSettings, { instanceId }); + const globalConfig = mock({ logging: { level: 'debug' } }); + beforeAll(() => { config.set('diagnostics.config.posthog.apiKey', apiKey); config.set('diagnostics.config.posthog.apiHost', apiHost); @@ -26,7 +30,7 @@ describe('PostHog', () => { }); it('inits PostHog correctly', async () => { - const ph = new PostHogClient(instanceSettings); + const ph = new PostHogClient(instanceSettings, globalConfig); await ph.init(); expect(PostHog.prototype.constructor).toHaveBeenCalledWith(apiKey, { host: apiHost }); @@ -35,7 +39,7 @@ describe('PostHog', () => { it('does not initialize or track if diagnostics are not enabled', async () => { config.set('diagnostics.enabled', false); - const ph = new PostHogClient(instanceSettings); + const ph = new PostHogClient(instanceSettings, globalConfig); await ph.init(); ph.track({ @@ -55,7 +59,7 @@ describe('PostHog', () => { test: true, }; - const ph = new PostHogClient(instanceSettings); + const ph = new PostHogClient(instanceSettings, globalConfig); await ph.init(); ph.track({ @@ -75,7 +79,7 @@ describe('PostHog', () => { it('gets feature flags', async () => { const createdAt = new Date(); - const ph = new PostHogClient(instanceSettings); + const ph = new PostHogClient(instanceSettings, globalConfig); await ph.init(); await ph.getFeatureFlags({ diff --git a/packages/cli/src/posthog/index.ts b/packages/cli/src/posthog/index.ts index 08a501f0fc..8dec9755b3 100644 --- a/packages/cli/src/posthog/index.ts +++ b/packages/cli/src/posthog/index.ts @@ -1,3 +1,4 @@ +import { GlobalConfig } from '@n8n/config'; import { InstanceSettings } from 'n8n-core'; import type { FeatureFlags, ITelemetryTrackProperties } from 'n8n-workflow'; import type { PostHog } from 'posthog-node'; @@ -10,7 +11,10 @@ import type { PublicUser } from '@/interfaces'; export class PostHogClient { private postHog?: PostHog; - constructor(private readonly instanceSettings: InstanceSettings) {} + constructor( + private readonly instanceSettings: InstanceSettings, + private readonly globalConfig: GlobalConfig, + ) {} async init() { const enabled = config.getEnv('diagnostics.enabled'); @@ -23,7 +27,7 @@ export class PostHogClient { host: config.getEnv('diagnostics.config.posthog.apiHost'), }); - const logLevel = config.getEnv('logs.level'); + const logLevel = this.globalConfig.logging.level; if (logLevel === 'debug') { this.postHog.debug(true); } diff --git a/packages/cli/src/public-api/index.ts b/packages/cli/src/public-api/index.ts index c240a3efa3..1264f57496 100644 --- a/packages/cli/src/public-api/index.ts +++ b/packages/cli/src/public-api/index.ts @@ -10,10 +10,10 @@ import { Container } from 'typedi'; import validator from 'validator'; import YAML from 'yamljs'; -import { UserRepository } from '@/databases/repositories/user.repository'; import { EventService } from '@/events/event.service'; import { License } from '@/license'; import type { AuthenticatedRequest } from '@/requests'; +import { PublicApiKeyService } from '@/services/public-api-key.service'; import { UrlService } from '@/services/url.service'; async function createApiRouter( @@ -90,10 +90,9 @@ async function createApiRouter( _scopes: unknown, schema: OpenAPIV3.ApiKeySecurityScheme, ): Promise => { - const apiKey = req.headers[schema.name.toLowerCase()] as string; - const user = await Container.get(UserRepository).findOne({ - where: { apiKey }, - }); + const providedApiKey = req.headers[schema.name.toLowerCase()] as string; + + const user = await Container.get(PublicApiKeyService).getUserForApiKey(providedApiKey); if (!user) return false; diff --git a/packages/cli/src/public-api/types.ts b/packages/cli/src/public-api/types.ts index b69acff8d9..e2d22eac2c 100644 --- a/packages/cli/src/public-api/types.ts +++ b/packages/cli/src/public-api/types.ts @@ -84,11 +84,7 @@ export declare namespace WorkflowRequest { type Activate = Get; type GetTags = Get; type UpdateTags = AuthenticatedRequest<{ id: string }, {}, TagEntity[]>; - type Transfer = AuthenticatedRequest< - { workflowId: string }, - {}, - { destinationProjectId: string } - >; + type Transfer = AuthenticatedRequest<{ id: string }, {}, { destinationProjectId: string }>; } export declare namespace UserRequest { diff --git a/packages/cli/src/public-api/v1/handlers/workflows/workflows.handler.ts b/packages/cli/src/public-api/v1/handlers/workflows/workflows.handler.ts index f46b177c61..b0956a15c1 100644 --- a/packages/cli/src/public-api/v1/handlers/workflows/workflows.handler.ts +++ b/packages/cli/src/public-api/v1/handlers/workflows/workflows.handler.ts @@ -73,11 +73,13 @@ export = { transferWorkflow: [ projectScope('workflow:move', 'workflow'), async (req: WorkflowRequest.Transfer, res: express.Response) => { + const { id: workflowId } = req.params; + const body = z.object({ destinationProjectId: z.string() }).parse(req.body); await Container.get(EnterpriseWorkflowService).transferOne( req.user, - req.params.workflowId, + workflowId, body.destinationProjectId, ); diff --git a/packages/cli/src/push/__tests__/websocket.push.test.ts b/packages/cli/src/push/__tests__/websocket.push.test.ts index f62038c6b3..209f91b17e 100644 --- a/packages/cli/src/push/__tests__/websocket.push.test.ts +++ b/packages/cli/src/push/__tests__/websocket.push.test.ts @@ -4,7 +4,7 @@ import { Container } from 'typedi'; import type WebSocket from 'ws'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { WebSocketPush } from '@/push/websocket.push'; import { mockInstance } from '@test/mocking'; diff --git a/packages/cli/src/push/abstract.push.ts b/packages/cli/src/push/abstract.push.ts index f3ae6606ed..24cafa8121 100644 --- a/packages/cli/src/push/abstract.push.ts +++ b/packages/cli/src/push/abstract.push.ts @@ -1,8 +1,9 @@ import type { PushPayload, PushType } from '@n8n/api-types'; import { assert, jsonStringify } from 'n8n-workflow'; +import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; -import type { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { OnPushMessage } from '@/push/types'; import { TypedEmitter } from '@/typed-emitter'; @@ -16,6 +17,7 @@ export interface AbstractPushEvents { * * @emits message when a message is received from a client */ +@Service() export abstract class AbstractPush extends TypedEmitter { protected connections: Record = {}; @@ -23,9 +25,12 @@ export abstract class AbstractPush extends TypedEmitter this.pingAll(), 60 * 1000); } protected add(pushRef: string, userId: User['id'], connection: Connection) { @@ -75,6 +80,12 @@ export abstract class AbstractPush extends TypedEmitter(type: Type, data: PushPayload) { this.sendTo(type, data, Object.keys(this.connections)); } diff --git a/packages/cli/src/push/sse.push.ts b/packages/cli/src/push/sse.push.ts index 96af003b4b..04e39d6d79 100644 --- a/packages/cli/src/push/sse.push.ts +++ b/packages/cli/src/push/sse.push.ts @@ -1,8 +1,6 @@ import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; -import SSEChannel from 'sse-channel'; import { AbstractPush } from './abstract.push'; import type { PushRequest, PushResponse } from './types'; @@ -11,29 +9,41 @@ type Connection = { req: PushRequest; res: PushResponse }; @Service() export class SSEPush extends AbstractPush { - readonly channel = new SSEChannel(); - - readonly connections: Record = {}; - - constructor(logger: Logger) { - super(logger); - - this.channel.on('disconnect', (_, { req }) => { - this.remove(req?.query?.pushRef); - }); - } - add(pushRef: string, userId: User['id'], connection: Connection) { + const { req, res } = connection; + + // Initialize the connection + req.socket.setTimeout(0); + req.socket.setNoDelay(true); + req.socket.setKeepAlive(true); + res.setHeader('Content-Type', 'text/event-stream; charset=UTF-8'); + res.setHeader('Cache-Control', 'no-cache'); + res.setHeader('Connection', 'keep-alive'); + res.writeHead(200); + res.write(':ok\n\n'); + res.flush(); + super.add(pushRef, userId, connection); - this.channel.addClient(connection.req, connection.res); + + // When the client disconnects, remove the client + const removeClient = () => this.remove(pushRef); + req.once('end', removeClient); + req.once('close', removeClient); + res.once('finish', removeClient); } protected close({ res }: Connection) { res.end(); - this.channel.removeClient(res); } protected sendToOneConnection(connection: Connection, data: string) { - this.channel.send(data, [connection.res]); + const { res } = connection; + res.write('data: ' + data + '\n\n'); + res.flush(); + } + + protected ping({ res }: Connection) { + res.write(':ping\n\n'); + res.flush(); } } diff --git a/packages/cli/src/push/types.ts b/packages/cli/src/push/types.ts index db9121eecc..b0db44ba1a 100644 --- a/packages/cli/src/push/types.ts +++ b/packages/cli/src/push/types.ts @@ -11,7 +11,15 @@ export type PushRequest = AuthenticatedRequest<{}, {}, {}, { pushRef: string }>; export type SSEPushRequest = PushRequest & { ws: undefined }; export type WebSocketPushRequest = PushRequest & { ws: WebSocket }; -export type PushResponse = Response & { req: PushRequest }; +export type PushResponse = Response & { + req: PushRequest; + /** + * `flush()` is defined in the compression middleware. + * This is necessary because the compression middleware sometimes waits + * for a certain amount of data before sending the data to the client + */ + flush: () => void; +}; export interface OnPushMessage { pushRef: string; diff --git a/packages/cli/src/push/websocket.push.ts b/packages/cli/src/push/websocket.push.ts index 013663cab6..a2ea39c500 100644 --- a/packages/cli/src/push/websocket.push.ts +++ b/packages/cli/src/push/websocket.push.ts @@ -3,7 +3,6 @@ import { Service } from 'typedi'; import type WebSocket from 'ws'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; import { AbstractPush } from './abstract.push'; @@ -13,13 +12,6 @@ function heartbeat(this: WebSocket) { @Service() export class WebSocketPush extends AbstractPush { - constructor(logger: Logger) { - super(logger); - - // Ping all connected clients every 60 seconds - setInterval(() => this.pingAll(), 60 * 1000); - } - add(pushRef: string, userId: User['id'], connection: WebSocket) { connection.isAlive = true; connection.on('pong', heartbeat); @@ -67,17 +59,12 @@ export class WebSocketPush extends AbstractPush { connection.send(data); } - private pingAll() { - for (const pushRef in this.connections) { - const connection = this.connections[pushRef]; - // If a connection did not respond with a `PONG` in the last 60 seconds, disconnect - if (!connection.isAlive) { - delete this.connections[pushRef]; - return connection.terminate(); - } - - connection.isAlive = false; - connection.ping(); + protected ping(connection: WebSocket): void { + // If a connection did not respond with a `PONG` in the last 60 seconds, disconnect + if (!connection.isAlive) { + return connection.terminate(); } + connection.isAlive = false; + connection.ping(); } } diff --git a/packages/cli/src/requests.ts b/packages/cli/src/requests.ts index 5afe97f31a..e25a244f5f 100644 --- a/packages/cli/src/requests.ts +++ b/packages/cli/src/requests.ts @@ -180,6 +180,14 @@ export declare namespace CredentialRequest { >; } +// ---------------------------------- +// /api-keys +// ---------------------------------- + +export declare namespace ApiKeysRequest { + export type DeleteAPIKey = AuthenticatedRequest<{ id: string }>; +} + // ---------------------------------- // /me // ---------------------------------- @@ -578,5 +586,6 @@ export declare namespace AiAssistantRequest { type Chat = AuthenticatedRequest<{}, {}, AiAssistantSDK.ChatRequestPayload>; type SuggestionPayload = { sessionId: string; suggestionId: string }; - type ApplySuggestion = AuthenticatedRequest<{}, {}, SuggestionPayload>; + type ApplySuggestionPayload = AuthenticatedRequest<{}, {}, SuggestionPayload>; + type AskAiPayload = AuthenticatedRequest<{}, {}, AiAssistantSDK.AskAiRequestPayload>; } diff --git a/packages/cli/src/response-helper.ts b/packages/cli/src/response-helper.ts index c9f7270d5f..0dff1625cb 100644 --- a/packages/cli/src/response-helper.ts +++ b/packages/cli/src/response-helper.ts @@ -10,9 +10,9 @@ import picocolors from 'picocolors'; import Container from 'typedi'; import { inDevelopment } from '@/constants'; +import { Logger } from '@/logging/logger.service'; import { ResponseError } from './errors/response-errors/abstract/response.error'; -import { Logger } from './logger'; export function sendSuccessResponse( res: Response, diff --git a/packages/cli/src/runners/__tests__/task-broker.test.ts b/packages/cli/src/runners/__tests__/task-broker.test.ts new file mode 100644 index 0000000000..f5b91a3f2c --- /dev/null +++ b/packages/cli/src/runners/__tests__/task-broker.test.ts @@ -0,0 +1,504 @@ +import { mock } from 'jest-mock-extended'; + +import { TaskRejectError } from '../errors'; +import type { RunnerMessage, TaskResultData } from '../runner-types'; +import { TaskBroker } from '../task-broker.service'; +import type { TaskOffer, TaskRequest, TaskRunner } from '../task-broker.service'; + +describe('TaskBroker', () => { + let taskBroker: TaskBroker; + + beforeEach(() => { + taskBroker = new TaskBroker(mock()); + jest.restoreAllMocks(); + }); + + describe('expireTasks', () => { + it('should remove expired task offers and keep valid task offers', () => { + const now = process.hrtime.bigint(); + + const validOffer: TaskOffer = { + offerId: 'valid', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), // 1 second in the future + }; + + const expiredOffer1: TaskOffer = { + offerId: 'expired1', + runnerId: 'runner2', + taskType: 'taskType1', + validFor: 1000, + validUntil: now - BigInt(1000 * 1_000_000), // 1 second in the past + }; + + const expiredOffer2: TaskOffer = { + offerId: 'expired2', + runnerId: 'runner3', + taskType: 'taskType1', + validFor: 2000, + validUntil: now - BigInt(2000 * 1_000_000), // 2 seconds in the past + }; + + taskBroker.setPendingTaskOffers([validOffer, expiredOffer1, expiredOffer2]); + + taskBroker.expireTasks(); + + const offers = taskBroker.getPendingTaskOffers(); + + expect(offers).toHaveLength(1); + expect(offers[0]).toEqual(validOffer); + }); + }); + + describe('registerRunner', () => { + it('should add a runner to known runners', () => { + const runnerId = 'runner1'; + const runner = mock({ id: runnerId }); + const messageCallback = jest.fn(); + + taskBroker.registerRunner(runner, messageCallback); + + const knownRunners = taskBroker.getKnownRunners(); + const runnerIds = [...knownRunners.keys()]; + + expect(runnerIds).toHaveLength(1); + expect(runnerIds[0]).toEqual(runnerId); + + expect(knownRunners.get(runnerId)?.runner).toEqual(runner); + expect(knownRunners.get(runnerId)?.messageCallback).toEqual(messageCallback); + }); + }); + + describe('registerRequester', () => { + it('should add a requester to known requesters', () => { + const requesterId = 'requester1'; + const messageCallback = jest.fn(); + + taskBroker.registerRequester(requesterId, messageCallback); + + const knownRequesters = taskBroker.getKnownRequesters(); + const requesterIds = [...knownRequesters.keys()]; + + expect(requesterIds).toHaveLength(1); + expect(requesterIds[0]).toEqual(requesterId); + + expect(knownRequesters.get(requesterId)).toEqual(messageCallback); + }); + }); + + describe('deregisterRunner', () => { + it('should remove a runner from known runners', () => { + const runnerId = 'runner1'; + const runner = mock({ id: runnerId }); + const messageCallback = jest.fn(); + + taskBroker.registerRunner(runner, messageCallback); + taskBroker.deregisterRunner(runnerId); + + const knownRunners = taskBroker.getKnownRunners(); + const runnerIds = Object.keys(knownRunners); + + expect(runnerIds).toHaveLength(0); + }); + }); + + describe('deregisterRequester', () => { + it('should remove a requester from known requesters', () => { + const requesterId = 'requester1'; + const messageCallback = jest.fn(); + + taskBroker.registerRequester(requesterId, messageCallback); + taskBroker.deregisterRequester(requesterId); + + const knownRequesters = taskBroker.getKnownRequesters(); + const requesterIds = Object.keys(knownRequesters); + + expect(requesterIds).toHaveLength(0); + }); + }); + + describe('taskRequested', () => { + it('should match a pending offer to an incoming request', async () => { + const now = process.hrtime.bigint(); + + const offer: TaskOffer = { + offerId: 'offer1', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + taskBroker.setPendingTaskOffers([offer]); + + const request: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + }; + + jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); // allow Jest to exit cleanly + + taskBroker.taskRequested(request); + + expect(taskBroker.acceptOffer).toHaveBeenCalled(); + expect(taskBroker.getPendingTaskOffers()).toHaveLength(0); + }); + }); + + describe('taskOffered', () => { + it('should match a pending request to an incoming offer', () => { + const now = process.hrtime.bigint(); + + const request: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + acceptInProgress: false, + }; + + taskBroker.setPendingTaskRequests([request]); + + const offer: TaskOffer = { + offerId: 'offer1', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); // allow Jest to exit cleanly + + taskBroker.taskOffered(offer); + + expect(taskBroker.acceptOffer).toHaveBeenCalled(); + expect(taskBroker.getPendingTaskOffers()).toHaveLength(0); + }); + }); + + describe('settleTasks', () => { + it('should match task offers with task requests by task type', () => { + const now = process.hrtime.bigint(); + + const offer1: TaskOffer = { + offerId: 'offer1', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + const offer2: TaskOffer = { + offerId: 'offer2', + runnerId: 'runner2', + taskType: 'taskType2', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + const request1: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + acceptInProgress: false, + }; + + const request2: TaskRequest = { + requestId: 'request2', + requesterId: 'requester2', + taskType: 'taskType2', + acceptInProgress: false, + }; + + const request3: TaskRequest = { + requestId: 'request3', + requesterId: 'requester3', + taskType: 'taskType3', // will have no match + acceptInProgress: false, + }; + + taskBroker.setPendingTaskOffers([offer1, offer2]); + taskBroker.setPendingTaskRequests([request1, request2, request3]); + + const acceptOfferSpy = jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); + + taskBroker.settleTasks(); + + expect(acceptOfferSpy).toHaveBeenCalledTimes(2); + expect(acceptOfferSpy).toHaveBeenCalledWith(offer1, request1); + expect(acceptOfferSpy).toHaveBeenCalledWith(offer2, request2); + + const remainingOffers = taskBroker.getPendingTaskOffers(); + expect(remainingOffers).toHaveLength(0); + }); + + it('should not match a request whose acceptance is in progress', () => { + const now = process.hrtime.bigint(); + + const offer: TaskOffer = { + offerId: 'offer1', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + const request: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + acceptInProgress: true, + }; + + taskBroker.setPendingTaskOffers([offer]); + taskBroker.setPendingTaskRequests([request]); + + const acceptOfferSpy = jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); + + taskBroker.settleTasks(); + + expect(acceptOfferSpy).not.toHaveBeenCalled(); + + const remainingOffers = taskBroker.getPendingTaskOffers(); + expect(remainingOffers).toHaveLength(1); + expect(remainingOffers[0]).toEqual(offer); + + const remainingRequests = taskBroker.getPendingTaskRequests(); + expect(remainingRequests).toHaveLength(1); + expect(remainingRequests[0]).toEqual(request); + }); + + it('should expire tasks before settling', () => { + const now = process.hrtime.bigint(); + + const validOffer: TaskOffer = { + offerId: 'valid', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), // 1 second in the future + }; + + const expiredOffer: TaskOffer = { + offerId: 'expired', + runnerId: 'runner2', + taskType: 'taskType2', // will be removed before matching + validFor: 1000, + validUntil: now - BigInt(1000 * 1_000_000), // 1 second in the past + }; + + const request1: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + acceptInProgress: false, + }; + + const request2: TaskRequest = { + requestId: 'request2', + requesterId: 'requester2', + taskType: 'taskType2', + acceptInProgress: false, + }; + + taskBroker.setPendingTaskOffers([validOffer, expiredOffer]); + taskBroker.setPendingTaskRequests([request1, request2]); + + const acceptOfferSpy = jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); + + taskBroker.settleTasks(); + + expect(acceptOfferSpy).toHaveBeenCalledTimes(1); + expect(acceptOfferSpy).toHaveBeenCalledWith(validOffer, request1); + + const remainingOffers = taskBroker.getPendingTaskOffers(); + expect(remainingOffers).toHaveLength(0); + }); + }); + + describe('onRunnerMessage', () => { + it('should handle `runner:taskaccepted` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + + const message: RunnerMessage.ToN8n.TaskAccepted = { + type: 'runner:taskaccepted', + taskId, + }; + + const accept = jest.fn(); + const reject = jest.fn(); + + taskBroker.setRunnerAcceptRejects({ [taskId]: { accept, reject } }); + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + + await taskBroker.onRunnerMessage(runnerId, message); + + const runnerAcceptRejects = taskBroker.getRunnerAcceptRejects(); + + expect(accept).toHaveBeenCalled(); + expect(reject).not.toHaveBeenCalled(); + expect(runnerAcceptRejects.get(taskId)).toBeUndefined(); + }); + + it('should handle `runner:taskrejected` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const rejectionReason = 'Task execution failed'; + + const message: RunnerMessage.ToN8n.TaskRejected = { + type: 'runner:taskrejected', + taskId, + reason: rejectionReason, + }; + + const accept = jest.fn(); + const reject = jest.fn(); + + taskBroker.setRunnerAcceptRejects({ [taskId]: { accept, reject } }); + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + + await taskBroker.onRunnerMessage(runnerId, message); + + const runnerAcceptRejects = taskBroker.getRunnerAcceptRejects(); + + expect(accept).not.toHaveBeenCalled(); + expect(reject).toHaveBeenCalledWith(new TaskRejectError(rejectionReason)); + expect(runnerAcceptRejects.get(taskId)).toBeUndefined(); + }); + + it('should handle `runner:taskdone` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const data = mock(); + + const message: RunnerMessage.ToN8n.TaskDone = { + type: 'runner:taskdone', + taskId, + data, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:taskdone', + taskId, + data, + }); + + expect(taskBroker.getTasks().get(taskId)).toBeUndefined(); + }); + + it('should handle `runner:taskerror` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const errorMessage = 'Task execution failed'; + + const message: RunnerMessage.ToN8n.TaskError = { + type: 'runner:taskerror', + taskId, + error: errorMessage, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:taskerror', + taskId, + error: errorMessage, + }); + + expect(taskBroker.getTasks().get(taskId)).toBeUndefined(); + }); + + it('should handle `runner:taskdatarequest` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const requestId = 'request1'; + const requestType = 'input'; + const param = 'test_param'; + + const message: RunnerMessage.ToN8n.TaskDataRequest = { + type: 'runner:taskdatarequest', + taskId, + requestId, + requestType, + param, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:taskdatarequest', + taskId, + requestId, + requestType, + param, + }); + }); + + it('should handle `runner:rpc` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const callId = 'call1'; + const rpcName = 'helpers.httpRequestWithAuthentication'; + const rpcParams = ['param1', 'param2']; + + const message: RunnerMessage.ToN8n.RPC = { + type: 'runner:rpc', + taskId, + callId, + name: rpcName, + params: rpcParams, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:rpc', + taskId, + callId, + name: rpcName, + params: rpcParams, + }); + }); + }); +}); diff --git a/packages/cli/src/runners/__tests__/task-runner-process.test.ts b/packages/cli/src/runners/__tests__/task-runner-process.test.ts new file mode 100644 index 0000000000..b2ad678ee1 --- /dev/null +++ b/packages/cli/src/runners/__tests__/task-runner-process.test.ts @@ -0,0 +1,48 @@ +import { GlobalConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; +import type { ChildProcess, SpawnOptions } from 'node:child_process'; + +import { mockInstance } from '../../../test/shared/mocking'; +import type { TaskRunnerAuthService } from '../auth/task-runner-auth.service'; +import { TaskRunnerProcess } from '../task-runner-process'; + +const spawnMock = jest.fn(() => + mock({ + stdout: { + pipe: jest.fn(), + }, + stderr: { + pipe: jest.fn(), + }, + }), +); +require('child_process').spawn = spawnMock; + +describe('TaskRunnerProcess', () => { + const globalConfig = mockInstance(GlobalConfig); + const authService = mock(); + const taskRunnerProcess = new TaskRunnerProcess(globalConfig, authService); + + afterEach(async () => { + spawnMock.mockClear(); + }); + + describe('start', () => { + it('should propagate NODE_FUNCTION_ALLOW_BUILTIN and NODE_FUNCTION_ALLOW_EXTERNAL from env', async () => { + jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken'); + process.env.NODE_FUNCTION_ALLOW_BUILTIN = '*'; + process.env.NODE_FUNCTION_ALLOW_EXTERNAL = '*'; + + await taskRunnerProcess.start(); + + // @ts-expect-error The type is not correct + const options = spawnMock.mock.calls[0][2] as SpawnOptions; + expect(options.env).toEqual( + expect.objectContaining({ + NODE_FUNCTION_ALLOW_BUILTIN: '*', + NODE_FUNCTION_ALLOW_EXTERNAL: '*', + }), + ); + }); + }); +}); diff --git a/packages/cli/src/runners/auth/__tests__/task-runner-auth.controller.test.ts b/packages/cli/src/runners/auth/__tests__/task-runner-auth.controller.test.ts new file mode 100644 index 0000000000..7d43f91458 --- /dev/null +++ b/packages/cli/src/runners/auth/__tests__/task-runner-auth.controller.test.ts @@ -0,0 +1,115 @@ +import { GlobalConfig } from '@n8n/config'; +import type { NextFunction, Response } from 'express'; +import { mock } from 'jest-mock-extended'; + +import { CacheService } from '@/services/cache/cache.service'; +import { mockInstance } from '@test/mocking'; + +import { BadRequestError } from '../../../errors/response-errors/bad-request.error'; +import { ForbiddenError } from '../../../errors/response-errors/forbidden.error'; +import type { AuthlessRequest } from '../../../requests'; +import type { TaskRunnerServerInitRequest } from '../../runner-types'; +import { TaskRunnerAuthController } from '../task-runner-auth.controller'; +import { TaskRunnerAuthService } from '../task-runner-auth.service'; + +describe('TaskRunnerAuthController', () => { + const globalConfig = mockInstance(GlobalConfig, { + cache: { + backend: 'memory', + memory: { + maxSize: 1024, + ttl: 9999, + }, + }, + taskRunners: { + authToken: 'random-secret', + }, + }); + const TTL = 100; + const cacheService = new CacheService(globalConfig); + const authService = new TaskRunnerAuthService(globalConfig, cacheService, TTL); + const authController = new TaskRunnerAuthController(authService); + + const createMockGrantTokenReq = (token?: string) => + ({ + body: { + token, + }, + }) as unknown as AuthlessRequest; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('createGrantToken', () => { + it('should throw BadRequestError when auth token is missing', async () => { + const req = createMockGrantTokenReq(); + + // Act + await expect(authController.createGrantToken(req)).rejects.toThrowError(BadRequestError); + }); + + it('should throw ForbiddenError when auth token is invalid', async () => { + const req = createMockGrantTokenReq('invalid'); + + // Act + await expect(authController.createGrantToken(req)).rejects.toThrowError(ForbiddenError); + }); + + it('should return rant token when auth token is valid', async () => { + const req = createMockGrantTokenReq('random-secret'); + + // Act + await expect(authController.createGrantToken(req)).resolves.toStrictEqual({ + token: expect.any(String), + }); + }); + }); + + describe('authMiddleware', () => { + const res = mock(); + const next = jest.fn() as NextFunction; + + const createMockReqWithToken = (token?: string) => + mock({ + headers: { + authorization: `Bearer ${token}`, + }, + }); + + beforeEach(() => { + res.status.mockReturnThis(); + }); + + it('should respond with 401 when grant token is missing', async () => { + const req = mock({}); + + await authController.authMiddleware(req, res, next); + + expect(next).not.toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(401); + expect(res.json).toHaveBeenCalledWith({ code: 401, message: 'Unauthorized' }); + }); + + it('should respond with 403 when grant token is invalid', async () => { + const req = createMockReqWithToken('invalid'); + + await authController.authMiddleware(req, res, next); + + expect(next).not.toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(403); + expect(res.json).toHaveBeenCalledWith({ code: 403, message: 'Forbidden' }); + }); + + it('should call next() when grant token is valid', async () => { + const { token: validToken } = await authController.createGrantToken( + createMockGrantTokenReq('random-secret'), + ); + + await authController.authMiddleware(createMockReqWithToken(validToken), res, next); + + expect(next).toHaveBeenCalled(); + expect(res.status).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/packages/cli/src/runners/auth/__tests__/task-runner-auth.service.test.ts b/packages/cli/src/runners/auth/__tests__/task-runner-auth.service.test.ts new file mode 100644 index 0000000000..a1321945e3 --- /dev/null +++ b/packages/cli/src/runners/auth/__tests__/task-runner-auth.service.test.ts @@ -0,0 +1,95 @@ +import { GlobalConfig } from '@n8n/config'; +import { sleep } from 'n8n-workflow'; + +import config from '@/config'; +import { CacheService } from '@/services/cache/cache.service'; +import { retryUntil } from '@test-integration/retry-until'; + +import { mockInstance } from '../../../../test/shared/mocking'; +import { TaskRunnerAuthService } from '../task-runner-auth.service'; + +describe('TaskRunnerAuthService', () => { + config.set('taskRunners.authToken', 'random-secret'); + + const globalConfig = mockInstance(GlobalConfig, { + cache: { + backend: 'memory', + memory: { + maxSize: 1024, + ttl: 9999, + }, + }, + taskRunners: { + authToken: 'random-secret', + }, + }); + const TTL = 100; + const cacheService = new CacheService(globalConfig); + const authService = new TaskRunnerAuthService(globalConfig, cacheService, TTL); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('isValidAuthToken', () => { + it('should be valid for the configured token', () => { + expect(authService.isValidAuthToken('random-secret')); + }); + + it('should be invalid for anything else', () => { + expect(authService.isValidAuthToken('!random-secret')); + }); + }); + + describe('createGrantToken', () => { + it('should generate a random token', async () => { + expect(typeof (await authService.createGrantToken())).toBe('string'); + }); + + it('should store the generated token in cache', async () => { + // Arrange + const cacheSetSpy = jest.spyOn(cacheService, 'set'); + + // Act + const token = await authService.createGrantToken(); + + // Assert + expect(cacheSetSpy).toHaveBeenCalledWith(`grant-token:${token}`, '1', TTL); + }); + }); + + describe('tryConsumeGrantToken', () => { + it('should return false for an invalid grant token', async () => { + expect(await authService.tryConsumeGrantToken('random-secret')).toBe(false); + }); + + it('should return true for a valid grant token', async () => { + // Arrange + const grantToken = await authService.createGrantToken(); + + // Act + expect(await authService.tryConsumeGrantToken(grantToken)).toBe(true); + }); + + it('should return false for a already used grant token', async () => { + // Arrange + const grantToken = await authService.createGrantToken(); + + // Act + expect(await authService.tryConsumeGrantToken(grantToken)).toBe(true); + expect(await authService.tryConsumeGrantToken(grantToken)).toBe(false); + }); + + it('should return false for an expired grant token', async () => { + // Arrange + const grantToken = await authService.createGrantToken(); + + // Act + await sleep(TTL + 1); + + await retryUntil(async () => + expect(await authService.tryConsumeGrantToken(grantToken)).toBe(false), + ); + }); + }); +}); diff --git a/packages/cli/src/runners/auth/task-runner-auth.controller.ts b/packages/cli/src/runners/auth/task-runner-auth.controller.ts new file mode 100644 index 0000000000..a117dfca0d --- /dev/null +++ b/packages/cli/src/runners/auth/task-runner-auth.controller.ts @@ -0,0 +1,62 @@ +import type { NextFunction, Response } from 'express'; +import { Service } from 'typedi'; + +import type { AuthlessRequest } from '@/requests'; + +import { taskRunnerAuthRequestBodySchema } from './task-runner-auth.schema'; +import { TaskRunnerAuthService } from './task-runner-auth.service'; +import { BadRequestError } from '../../errors/response-errors/bad-request.error'; +import { ForbiddenError } from '../../errors/response-errors/forbidden.error'; +import type { TaskRunnerServerInitRequest } from '../runner-types'; + +/** + * Controller responsible for authenticating Task Runner connections + */ +@Service() +export class TaskRunnerAuthController { + constructor(private readonly taskRunnerAuthService: TaskRunnerAuthService) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + this.authMiddleware = this.authMiddleware.bind(this); + } + + /** + * Validates the provided auth token and creates and responds with a grant token, + * which can be used to initiate a task runner connection. + */ + async createGrantToken(req: AuthlessRequest) { + const result = await taskRunnerAuthRequestBodySchema.safeParseAsync(req.body); + if (!result.success) { + throw new BadRequestError(result.error.errors[0].code); + } + + const { token: authToken } = result.data; + if (!this.taskRunnerAuthService.isValidAuthToken(authToken)) { + throw new ForbiddenError(); + } + + const grantToken = await this.taskRunnerAuthService.createGrantToken(); + return { + token: grantToken, + }; + } + + /** + * Middleware to authenticate task runner init requests + */ + async authMiddleware(req: TaskRunnerServerInitRequest, res: Response, next: NextFunction) { + const authHeader = req.headers.authorization; + if (typeof authHeader !== 'string' || !authHeader.startsWith('Bearer ')) { + res.status(401).json({ code: 401, message: 'Unauthorized' }); + return; + } + + const grantToken = authHeader.slice('Bearer '.length); + const isConsumed = await this.taskRunnerAuthService.tryConsumeGrantToken(grantToken); + if (!isConsumed) { + res.status(403).json({ code: 403, message: 'Forbidden' }); + return; + } + + next(); + } +} diff --git a/packages/cli/src/runners/auth/task-runner-auth.schema.ts b/packages/cli/src/runners/auth/task-runner-auth.schema.ts new file mode 100644 index 0000000000..c3ab2c17f2 --- /dev/null +++ b/packages/cli/src/runners/auth/task-runner-auth.schema.ts @@ -0,0 +1,5 @@ +import { z } from 'zod'; + +export const taskRunnerAuthRequestBodySchema = z.object({ + token: z.string().min(1), +}); diff --git a/packages/cli/src/runners/auth/task-runner-auth.service.ts b/packages/cli/src/runners/auth/task-runner-auth.service.ts new file mode 100644 index 0000000000..5907cf6678 --- /dev/null +++ b/packages/cli/src/runners/auth/task-runner-auth.service.ts @@ -0,0 +1,56 @@ +import { GlobalConfig } from '@n8n/config'; +import { randomBytes } from 'crypto'; +import { Service } from 'typedi'; + +import { Time } from '@/constants'; +import { CacheService } from '@/services/cache/cache.service'; + +const GRANT_TOKEN_TTL = 15 * Time.seconds.toMilliseconds; + +@Service() +export class TaskRunnerAuthService { + constructor( + private readonly globalConfig: GlobalConfig, + private readonly cacheService: CacheService, + // For unit testing purposes + private readonly grantTokenTtl = GRANT_TOKEN_TTL, + ) {} + + isValidAuthToken(token: string) { + return token === this.globalConfig.taskRunners.authToken; + } + + /** + * @returns grant token that can be used to establish a task runner connection + */ + async createGrantToken() { + const grantToken = this.generateGrantToken(); + + const key = this.cacheKeyForGrantToken(grantToken); + await this.cacheService.set(key, '1', this.grantTokenTtl); + + return grantToken; + } + + /** + * Checks if the given `grantToken` is a valid token and marks it as + * used. + */ + async tryConsumeGrantToken(grantToken: string) { + const key = this.cacheKeyForGrantToken(grantToken); + const consumed = await this.cacheService.get(key); + // Not found from cache --> Invalid token + if (consumed === undefined) return false; + + await this.cacheService.delete(key); + return true; + } + + private generateGrantToken() { + return randomBytes(32).toString('hex'); + } + + private cacheKeyForGrantToken(grantToken: string) { + return `grant-token:${grantToken}`; + } +} diff --git a/packages/cli/src/runners/errors.ts b/packages/cli/src/runners/errors.ts new file mode 100644 index 0000000000..cc53e18fd4 --- /dev/null +++ b/packages/cli/src/runners/errors.ts @@ -0,0 +1,9 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class TaskRejectError extends ApplicationError { + constructor(public reason: string) { + super(`Task rejected with reason: ${reason}`, { level: 'info' }); + } +} + +export class TaskError extends ApplicationError {} diff --git a/packages/cli/src/runners/runner-types.ts b/packages/cli/src/runners/runner-types.ts new file mode 100644 index 0000000000..f615754e02 --- /dev/null +++ b/packages/cli/src/runners/runner-types.ts @@ -0,0 +1,243 @@ +import type { Response } from 'express'; +import type { INodeExecutionData } from 'n8n-workflow'; +import type WebSocket from 'ws'; + +import type { TaskRunner } from './task-broker.service'; +import type { AuthlessRequest } from '../requests'; + +export type DataRequestType = 'input' | 'node' | 'all'; + +export interface TaskResultData { + result: INodeExecutionData[]; + customData?: Record; +} + +export interface TaskRunnerServerInitRequest + extends AuthlessRequest<{}, {}, {}, { id: TaskRunner['id']; token?: string }> { + ws: WebSocket; +} + +export type TaskRunnerServerInitResponse = Response & { req: TaskRunnerServerInitRequest }; + +export namespace N8nMessage { + export namespace ToRunner { + export interface InfoRequest { + type: 'broker:inforequest'; + } + + export interface RunnerRegistered { + type: 'broker:runnerregistered'; + } + + export interface TaskOfferAccept { + type: 'broker:taskofferaccept'; + taskId: string; + offerId: string; + } + + export interface TaskCancel { + type: 'broker:taskcancel'; + taskId: string; + reason: string; + } + + export interface TaskSettings { + type: 'broker:tasksettings'; + taskId: string; + settings: unknown; + } + + export interface RPCResponse { + type: 'broker:rpcresponse'; + callId: string; + taskId: string; + status: 'success' | 'error'; + data: unknown; + } + + export interface TaskDataResponse { + type: 'broker:taskdataresponse'; + taskId: string; + requestId: string; + data: unknown; + } + + export type All = + | InfoRequest + | TaskOfferAccept + | TaskCancel + | TaskSettings + | RunnerRegistered + | RPCResponse + | TaskDataResponse; + } + + export namespace ToRequester { + export interface TaskReady { + type: 'broker:taskready'; + requestId: string; + taskId: string; + } + + export interface TaskDone { + type: 'broker:taskdone'; + taskId: string; + data: TaskResultData; + } + + export interface TaskError { + type: 'broker:taskerror'; + taskId: string; + error: unknown; + } + + export interface TaskDataRequest { + type: 'broker:taskdatarequest'; + taskId: string; + requestId: string; + requestType: DataRequestType; + param?: string; + } + + export interface RPC { + type: 'broker:rpc'; + callId: string; + taskId: string; + name: (typeof RPC_ALLOW_LIST)[number]; + params: unknown[]; + } + + export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC; + } +} + +export namespace RequesterMessage { + export namespace ToN8n { + export interface TaskSettings { + type: 'requester:tasksettings'; + taskId: string; + settings: unknown; + } + + export interface TaskCancel { + type: 'requester:taskcancel'; + taskId: string; + reason: string; + } + + export interface TaskDataResponse { + type: 'requester:taskdataresponse'; + taskId: string; + requestId: string; + data: unknown; + } + + export interface RPCResponse { + type: 'requester:rpcresponse'; + taskId: string; + callId: string; + status: 'success' | 'error'; + data: unknown; + } + + export interface TaskRequest { + type: 'requester:taskrequest'; + requestId: string; + taskType: string; + } + + export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest; + } +} + +export namespace RunnerMessage { + export namespace ToN8n { + export interface Info { + type: 'runner:info'; + name: string; + types: string[]; + } + + export interface TaskAccepted { + type: 'runner:taskaccepted'; + taskId: string; + } + + export interface TaskRejected { + type: 'runner:taskrejected'; + taskId: string; + reason: string; + } + + export interface TaskDone { + type: 'runner:taskdone'; + taskId: string; + data: TaskResultData; + } + + export interface TaskError { + type: 'runner:taskerror'; + taskId: string; + error: unknown; + } + + export interface TaskOffer { + type: 'runner:taskoffer'; + offerId: string; + taskType: string; + validFor: number; + } + + export interface TaskDataRequest { + type: 'runner:taskdatarequest'; + taskId: string; + requestId: string; + requestType: DataRequestType; + param?: string; + } + + export interface RPC { + type: 'runner:rpc'; + callId: string; + taskId: string; + name: (typeof RPC_ALLOW_LIST)[number]; + params: unknown[]; + } + + export type All = + | Info + | TaskDone + | TaskError + | TaskAccepted + | TaskRejected + | TaskOffer + | RPC + | TaskDataRequest; + } +} + +export const RPC_ALLOW_LIST = [ + 'logNodeOutput', + 'helpers.httpRequestWithAuthentication', + 'helpers.requestWithAuthenticationPaginated', + // "helpers.normalizeItems" + // "helpers.constructExecutionMetaData" + // "helpers.assertBinaryData" + 'helpers.getBinaryDataBuffer', + // "helpers.copyInputItems" + // "helpers.returnJsonArray" + 'helpers.getSSHClient', + 'helpers.createReadStream', + // "helpers.getStoragePath" + 'helpers.writeContentToFile', + 'helpers.prepareBinaryData', + 'helpers.setBinaryDataBuffer', + 'helpers.copyBinaryFile', + 'helpers.binaryToBuffer', + // "helpers.binaryToString" + // "helpers.getBinaryPath" + 'helpers.getBinaryStream', + 'helpers.getBinaryMetadata', + 'helpers.createDeferredPromise', + 'helpers.httpRequest', +] as const; diff --git a/packages/cli/src/runners/runner-ws-server.ts b/packages/cli/src/runners/runner-ws-server.ts new file mode 100644 index 0000000000..ef9e52f5f5 --- /dev/null +++ b/packages/cli/src/runners/runner-ws-server.ts @@ -0,0 +1,189 @@ +import { GlobalConfig } from '@n8n/config'; +import type { Application } from 'express'; +import { ServerResponse, type Server } from 'http'; +import { ApplicationError } from 'n8n-workflow'; +import type { Socket } from 'net'; +import Container, { Service } from 'typedi'; +import { parse as parseUrl } from 'url'; +import { Server as WSServer } from 'ws'; +import type WebSocket from 'ws'; + +import { Logger } from '@/logging/logger.service'; +import { send } from '@/response-helper'; +import { TaskRunnerAuthController } from '@/runners/auth/task-runner-auth.controller'; + +import type { + RunnerMessage, + N8nMessage, + TaskRunnerServerInitRequest, + TaskRunnerServerInitResponse, +} from './runner-types'; +import { TaskBroker, type MessageCallback, type TaskRunner } from './task-broker.service'; + +function heartbeat(this: WebSocket) { + this.isAlive = true; +} + +function getEndpointBasePath(restEndpoint: string) { + const globalConfig = Container.get(GlobalConfig); + + let path = globalConfig.taskRunners.path; + if (path.startsWith('/')) { + path = path.slice(1); + } + if (path.endsWith('/')) { + path = path.slice(-1); + } + + return `/${restEndpoint}/${path}`; +} + +function getWsEndpoint(restEndpoint: string) { + return `${getEndpointBasePath(restEndpoint)}/_ws`; +} + +@Service() +export class TaskRunnerService { + runnerConnections: Map = new Map(); + + constructor( + private readonly logger: Logger, + private readonly taskBroker: TaskBroker, + ) {} + + sendMessage(id: TaskRunner['id'], message: N8nMessage.ToRunner.All) { + this.runnerConnections.get(id)?.send(JSON.stringify(message)); + } + + add(id: TaskRunner['id'], connection: WebSocket) { + connection.isAlive = true; + connection.on('pong', heartbeat); + + let isConnected = false; + + const onMessage = (data: WebSocket.RawData) => { + try { + const buffer = Array.isArray(data) ? Buffer.concat(data) : Buffer.from(data); + + const message: RunnerMessage.ToN8n.All = JSON.parse( + buffer.toString('utf8'), + ) as RunnerMessage.ToN8n.All; + + if (!isConnected && message.type !== 'runner:info') { + return; + } else if (!isConnected && message.type === 'runner:info') { + this.removeConnection(id); + isConnected = true; + + this.runnerConnections.set(id, connection); + + this.taskBroker.registerRunner( + { + id, + taskTypes: message.types, + lastSeen: new Date(), + name: message.name, + }, + this.sendMessage.bind(this, id) as MessageCallback, + ); + + this.sendMessage(id, { type: 'broker:runnerregistered' }); + + this.logger.info(`Runner "${message.name}"(${id}) has been registered`); + return; + } + + void this.taskBroker.onRunnerMessage(id, message); + } catch (error) { + this.logger.error(`Couldn't parse message from runner "${id}"`, { + error: error as unknown, + id, + data, + }); + } + }; + + // Makes sure to remove the session if the connection is closed + connection.once('close', () => { + connection.off('pong', heartbeat); + connection.off('message', onMessage); + this.removeConnection(id); + }); + + connection.on('message', onMessage); + connection.send( + JSON.stringify({ type: 'broker:inforequest' } as N8nMessage.ToRunner.InfoRequest), + ); + } + + removeConnection(id: TaskRunner['id']) { + const connection = this.runnerConnections.get(id); + if (connection) { + this.taskBroker.deregisterRunner(id); + connection.close(); + this.runnerConnections.delete(id); + } + } + + handleRequest(req: TaskRunnerServerInitRequest, _res: TaskRunnerServerInitResponse) { + this.add(req.query.id, req.ws); + } +} + +// Checks for upgrade requests on the runners path and upgrades the connection +// then, passes the request back to the app to handle the routing +export const setupRunnerServer = (restEndpoint: string, server: Server, app: Application) => { + const globalConfig = Container.get(GlobalConfig); + const { authToken } = globalConfig.taskRunners; + + if (!authToken) { + throw new ApplicationError( + 'Authentication token must be configured when task runners are enabled. Use N8N_RUNNERS_AUTH_TOKEN environment variable to set it.', + ); + } + + const endpoint = getWsEndpoint(restEndpoint); + const wsServer = new WSServer({ noServer: true }); + server.on('upgrade', (request: TaskRunnerServerInitRequest, socket: Socket, head) => { + if (parseUrl(request.url).pathname !== endpoint) { + // We can't close the connection here since the Push connections + // are using the same HTTP server and upgrade requests and this + // gets triggered for both + return; + } + + wsServer.handleUpgrade(request, socket, head, (ws) => { + request.ws = ws; + + const response = new ServerResponse(request); + response.writeHead = (statusCode) => { + if (statusCode > 200) ws.close(); + return response; + }; + + // @ts-expect-error Hidden API? + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + app.handle(request, response); + }); + }); +}; + +export const setupRunnerHandler = (restEndpoint: string, app: Application) => { + const wsEndpoint = getWsEndpoint(restEndpoint); + const authEndpoint = `${getEndpointBasePath(restEndpoint)}/auth`; + + const taskRunnerAuthController = Container.get(TaskRunnerAuthController); + const taskRunnerService = Container.get(TaskRunnerService); + app.use( + wsEndpoint, + // eslint-disable-next-line @typescript-eslint/unbound-method + taskRunnerAuthController.authMiddleware, + (req: TaskRunnerServerInitRequest, res: TaskRunnerServerInitResponse) => + taskRunnerService.handleRequest(req, res), + ); + + app.post( + authEndpoint, + send(async (req) => await taskRunnerAuthController.createGrantToken(req)), + ); +}; diff --git a/packages/cli/src/runners/task-broker.service.ts b/packages/cli/src/runners/task-broker.service.ts new file mode 100644 index 0000000000..829910b468 --- /dev/null +++ b/packages/cli/src/runners/task-broker.service.ts @@ -0,0 +1,553 @@ +import { ApplicationError } from 'n8n-workflow'; +import { nanoid } from 'nanoid'; +import { Service } from 'typedi'; + +import { Logger } from '@/logging/logger.service'; + +import { TaskRejectError } from './errors'; +import type { N8nMessage, RunnerMessage, RequesterMessage, TaskResultData } from './runner-types'; + +export interface TaskRunner { + id: string; + name?: string; + taskTypes: string[]; + lastSeen: Date; +} + +export interface Task { + id: string; + runnerId: TaskRunner['id']; + requesterId: string; + taskType: string; +} + +export interface TaskOffer { + offerId: string; + runnerId: TaskRunner['id']; + taskType: string; + validFor: number; + validUntil: bigint; +} + +export interface TaskRequest { + requestId: string; + requesterId: string; + taskType: string; + + acceptInProgress?: boolean; +} + +export type MessageCallback = (message: N8nMessage.ToRunner.All) => Promise | void; +export type RequesterMessageCallback = ( + message: N8nMessage.ToRequester.All, +) => Promise | void; + +type RunnerAcceptCallback = () => void; +type RequesterAcceptCallback = (settings: RequesterMessage.ToN8n.TaskSettings['settings']) => void; +type TaskRejectCallback = (reason: TaskRejectError) => void; + +@Service() +export class TaskBroker { + private knownRunners: Map< + TaskRunner['id'], + { runner: TaskRunner; messageCallback: MessageCallback } + > = new Map(); + + private requesters: Map = new Map(); + + private tasks: Map = new Map(); + + private runnerAcceptRejects: Map< + Task['id'], + { accept: RunnerAcceptCallback; reject: TaskRejectCallback } + > = new Map(); + + private requesterAcceptRejects: Map< + Task['id'], + { accept: RequesterAcceptCallback; reject: TaskRejectCallback } + > = new Map(); + + private pendingTaskOffers: TaskOffer[] = []; + + private pendingTaskRequests: TaskRequest[] = []; + + constructor(private readonly logger: Logger) {} + + expireTasks() { + const now = process.hrtime.bigint(); + const invalidOffers: number[] = []; + for (let i = 0; i < this.pendingTaskOffers.length; i++) { + if (this.pendingTaskOffers[i].validUntil < now) { + invalidOffers.push(i); + } + } + + // We reverse the list so the later indexes are valid after deleting earlier ones + invalidOffers.reverse().forEach((i) => this.pendingTaskOffers.splice(i, 1)); + } + + registerRunner(runner: TaskRunner, messageCallback: MessageCallback) { + this.knownRunners.set(runner.id, { runner, messageCallback }); + } + + deregisterRunner(runnerId: string) { + this.knownRunners.delete(runnerId); + } + + registerRequester(requesterId: string, messageCallback: RequesterMessageCallback) { + this.requesters.set(requesterId, messageCallback); + } + + deregisterRequester(requesterId: string) { + this.requesters.delete(requesterId); + } + + private async messageRunner(runnerId: TaskRunner['id'], message: N8nMessage.ToRunner.All) { + await this.knownRunners.get(runnerId)?.messageCallback(message); + } + + private async messageRequester(requesterId: string, message: N8nMessage.ToRequester.All) { + await this.requesters.get(requesterId)?.(message); + } + + async onRunnerMessage(runnerId: TaskRunner['id'], message: RunnerMessage.ToN8n.All) { + const runner = this.knownRunners.get(runnerId); + if (!runner) { + return; + } + switch (message.type) { + case 'runner:taskaccepted': + this.handleRunnerAccept(message.taskId); + break; + case 'runner:taskrejected': + this.handleRunnerReject(message.taskId, message.reason); + break; + case 'runner:taskoffer': + this.taskOffered({ + runnerId, + taskType: message.taskType, + offerId: message.offerId, + validFor: message.validFor, + validUntil: process.hrtime.bigint() + BigInt(message.validFor * 1_000_000), + }); + break; + case 'runner:taskdone': + await this.taskDoneHandler(message.taskId, message.data); + break; + case 'runner:taskerror': + await this.taskErrorHandler(message.taskId, message.error); + break; + case 'runner:taskdatarequest': + await this.handleDataRequest( + message.taskId, + message.requestId, + message.requestType, + message.param, + ); + break; + + case 'runner:rpc': + await this.handleRpcRequest(message.taskId, message.callId, message.name, message.params); + break; + // Already handled + case 'runner:info': + break; + } + } + + async handleRpcRequest( + taskId: Task['id'], + callId: string, + name: RunnerMessage.ToN8n.RPC['name'], + params: unknown[], + ) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.messageRequester(task.requesterId, { + type: 'broker:rpc', + taskId, + callId, + name, + params, + }); + } + + handleRunnerAccept(taskId: Task['id']) { + const acceptReject = this.runnerAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.accept(); + this.runnerAcceptRejects.delete(taskId); + } + } + + handleRunnerReject(taskId: Task['id'], reason: string) { + const acceptReject = this.runnerAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.reject(new TaskRejectError(reason)); + this.runnerAcceptRejects.delete(taskId); + } + } + + async handleDataRequest( + taskId: Task['id'], + requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'], + requestType: RunnerMessage.ToN8n.TaskDataRequest['requestType'], + param?: string, + ) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.messageRequester(task.requesterId, { + type: 'broker:taskdatarequest', + taskId, + requestId, + requestType, + param, + }); + } + + async handleResponse( + taskId: Task['id'], + requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'], + data: unknown, + ) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.messageRunner(task.requesterId, { + type: 'broker:taskdataresponse', + taskId, + requestId, + data, + }); + } + + async onRequesterMessage(requesterId: string, message: RequesterMessage.ToN8n.All) { + switch (message.type) { + case 'requester:tasksettings': + this.handleRequesterAccept(message.taskId, message.settings); + break; + case 'requester:taskcancel': + await this.cancelTask(message.taskId, message.reason); + break; + case 'requester:taskrequest': + this.taskRequested({ + taskType: message.taskType, + requestId: message.requestId, + requesterId, + }); + break; + case 'requester:taskdataresponse': + await this.handleRequesterDataResponse(message.taskId, message.requestId, message.data); + break; + case 'requester:rpcresponse': + await this.handleRequesterRpcResponse( + message.taskId, + message.callId, + message.status, + message.data, + ); + break; + } + } + + async handleRequesterRpcResponse( + taskId: string, + callId: string, + status: RequesterMessage.ToN8n.RPCResponse['status'], + data: unknown, + ) { + const runner = await this.getRunnerOrFailTask(taskId); + await this.messageRunner(runner.id, { + type: 'broker:rpcresponse', + taskId, + callId, + status, + data, + }); + } + + async handleRequesterDataResponse(taskId: Task['id'], requestId: string, data: unknown) { + const runner = await this.getRunnerOrFailTask(taskId); + + await this.messageRunner(runner.id, { + type: 'broker:taskdataresponse', + taskId, + requestId, + data, + }); + } + + handleRequesterAccept( + taskId: Task['id'], + settings: RequesterMessage.ToN8n.TaskSettings['settings'], + ) { + const acceptReject = this.requesterAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.accept(settings); + this.requesterAcceptRejects.delete(taskId); + } + } + + handleRequesterReject(taskId: Task['id'], reason: string) { + const acceptReject = this.requesterAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.reject(new TaskRejectError(reason)); + this.requesterAcceptRejects.delete(taskId); + } + } + + private async cancelTask(taskId: Task['id'], reason: string) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + this.tasks.delete(taskId); + + await this.messageRunner(task.runnerId, { + type: 'broker:taskcancel', + taskId, + reason, + }); + } + + private async failTask(taskId: Task['id'], reason: string) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + this.tasks.delete(taskId); + // TODO: special message type? + await this.messageRequester(task.requesterId, { + type: 'broker:taskerror', + taskId, + error: reason, + }); + } + + private async getRunnerOrFailTask(taskId: Task['id']): Promise { + const task = this.tasks.get(taskId); + if (!task) { + throw new ApplicationError(`Cannot find runner, failed to find task (${taskId})`, { + level: 'error', + }); + } + const runner = this.knownRunners.get(task.runnerId); + if (!runner) { + const reason = `Cannot find runner, failed to find runner (${task.runnerId})`; + await this.failTask(taskId, reason); + throw new ApplicationError(reason, { + level: 'error', + }); + } + return runner.runner; + } + + async sendTaskSettings(taskId: Task['id'], settings: unknown) { + const runner = await this.getRunnerOrFailTask(taskId); + await this.messageRunner(runner.id, { + type: 'broker:tasksettings', + taskId, + settings, + }); + } + + async taskDoneHandler(taskId: Task['id'], data: TaskResultData) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.requesters.get(task.requesterId)?.({ + type: 'broker:taskdone', + taskId: task.id, + data, + }); + this.tasks.delete(task.id); + } + + async taskErrorHandler(taskId: Task['id'], error: unknown) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.requesters.get(task.requesterId)?.({ + type: 'broker:taskerror', + taskId: task.id, + error, + }); + this.tasks.delete(task.id); + } + + async acceptOffer(offer: TaskOffer, request: TaskRequest): Promise { + const taskId = nanoid(8); + + try { + const acceptPromise = new Promise((resolve, reject) => { + this.runnerAcceptRejects.set(taskId, { accept: resolve as () => void, reject }); + + // TODO: customisable timeout + setTimeout(() => { + reject('Runner timed out'); + }, 2000); + }); + + await this.messageRunner(offer.runnerId, { + type: 'broker:taskofferaccept', + offerId: offer.offerId, + taskId, + }); + + await acceptPromise; + } catch (e) { + request.acceptInProgress = false; + if (e instanceof TaskRejectError) { + this.logger.info(`Task (${taskId}) rejected by Runner with reason "${e.reason}"`); + return; + } + throw e; + } + + const task: Task = { + id: taskId, + taskType: offer.taskType, + runnerId: offer.runnerId, + requesterId: request.requesterId, + }; + + this.tasks.set(taskId, task); + const requestIndex = this.pendingTaskRequests.findIndex( + (r) => r.requestId === request.requestId, + ); + if (requestIndex === -1) { + this.logger.error( + `Failed to find task request (${request.requestId}) after a task was accepted. This shouldn't happen, and might be a race condition.`, + ); + return; + } + this.pendingTaskRequests.splice(requestIndex, 1); + + try { + const acceptPromise = new Promise( + (resolve, reject) => { + this.requesterAcceptRejects.set(taskId, { + accept: resolve as (settings: RequesterMessage.ToN8n.TaskSettings['settings']) => void, + reject, + }); + + // TODO: customisable timeout + setTimeout(() => { + reject('Requester timed out'); + }, 2000); + }, + ); + + await this.messageRequester(request.requesterId, { + type: 'broker:taskready', + requestId: request.requestId, + taskId, + }); + + const settings = await acceptPromise; + await this.sendTaskSettings(task.id, settings); + } catch (e) { + if (e instanceof TaskRejectError) { + await this.cancelTask(task.id, e.reason); + this.logger.info(`Task (${taskId}) rejected by Requester with reason "${e.reason}"`); + return; + } + await this.cancelTask(task.id, 'Unknown reason'); + throw e; + } + } + + // Find matching task offers and requests, then let the runner + // know that an offer has been accepted + // + // *DO NOT MAKE THIS FUNCTION ASYNC* + // This function relies on never yielding. + // If you need to make this function async, you'll need to + // implement some kind of locking for the requests and task + // lists + settleTasks() { + this.expireTasks(); + + for (const request of this.pendingTaskRequests) { + if (request.acceptInProgress) { + continue; + } + const offerIndex = this.pendingTaskOffers.findIndex((o) => o.taskType === request.taskType); + if (offerIndex === -1) { + continue; + } + const offer = this.pendingTaskOffers[offerIndex]; + + request.acceptInProgress = true; + this.pendingTaskOffers.splice(offerIndex, 1); + + void this.acceptOffer(offer, request); + } + } + + taskRequested(request: TaskRequest) { + this.pendingTaskRequests.push(request); + this.settleTasks(); + } + + taskOffered(offer: TaskOffer) { + this.pendingTaskOffers.push(offer); + this.settleTasks(); + } + + /** + * For testing only + */ + + getTasks() { + return this.tasks; + } + + getPendingTaskOffers() { + return this.pendingTaskOffers; + } + + getPendingTaskRequests() { + return this.pendingTaskRequests; + } + + getKnownRunners() { + return this.knownRunners; + } + + getKnownRequesters() { + return this.requesters; + } + + getRunnerAcceptRejects() { + return this.runnerAcceptRejects; + } + + setTasks(tasks: Record) { + this.tasks = new Map(Object.entries(tasks)); + } + + setPendingTaskOffers(pendingTaskOffers: TaskOffer[]) { + this.pendingTaskOffers = pendingTaskOffers; + } + + setPendingTaskRequests(pendingTaskRequests: TaskRequest[]) { + this.pendingTaskRequests = pendingTaskRequests; + } + + setRunnerAcceptRejects( + runnerAcceptRejects: Record< + string, + { accept: RunnerAcceptCallback; reject: TaskRejectCallback } + >, + ) { + this.runnerAcceptRejects = new Map(Object.entries(runnerAcceptRejects)); + } +} diff --git a/packages/cli/src/runners/task-managers/single-main-task-manager.ts b/packages/cli/src/runners/task-managers/single-main-task-manager.ts new file mode 100644 index 0000000000..b5b60df72b --- /dev/null +++ b/packages/cli/src/runners/task-managers/single-main-task-manager.ts @@ -0,0 +1,30 @@ +import Container from 'typedi'; + +import { TaskManager } from './task-manager'; +import type { RequesterMessage } from '../runner-types'; +import type { RequesterMessageCallback } from '../task-broker.service'; +import { TaskBroker } from '../task-broker.service'; + +export class SingleMainTaskManager extends TaskManager { + taskBroker: TaskBroker; + + id: string = 'single-main'; + + constructor() { + super(); + this.registerRequester(); + } + + registerRequester() { + this.taskBroker = Container.get(TaskBroker); + + this.taskBroker.registerRequester( + this.id, + this.onMessage.bind(this) as RequesterMessageCallback, + ); + } + + sendMessage(message: RequesterMessage.ToN8n.All) { + void this.taskBroker.onRequesterMessage(this.id, message); + } +} diff --git a/packages/cli/src/runners/task-managers/task-manager.ts b/packages/cli/src/runners/task-managers/task-manager.ts new file mode 100644 index 0000000000..58d8ade906 --- /dev/null +++ b/packages/cli/src/runners/task-managers/task-manager.ts @@ -0,0 +1,413 @@ +import { + type EnvProviderState, + type IExecuteFunctions, + type Workflow, + type IRunExecutionData, + type INodeExecutionData, + type ITaskDataConnections, + type INode, + type WorkflowParameters, + type INodeParameters, + type WorkflowExecuteMode, + type IExecuteData, + type IDataObject, + type IWorkflowExecuteAdditionalData, + type Result, + createResultOk, + createResultError, +} from 'n8n-workflow'; +import { nanoid } from 'nanoid'; + +import { + RPC_ALLOW_LIST, + type TaskResultData, + type N8nMessage, + type RequesterMessage, +} from '../runner-types'; + +export type RequestAccept = (jobId: string) => void; +export type RequestReject = (reason: string) => void; + +export type TaskAccept = (data: TaskResultData) => void; +export type TaskReject = (error: unknown) => void; + +export interface TaskData { + executeFunctions: IExecuteFunctions; + inputData: ITaskDataConnections; + node: INode; + + workflow: Workflow; + runExecutionData: IRunExecutionData; + runIndex: number; + itemIndex: number; + activeNodeName: string; + connectionInputData: INodeExecutionData[]; + siblingParameters: INodeParameters; + mode: WorkflowExecuteMode; + envProviderState: EnvProviderState; + executeData?: IExecuteData; + defaultReturnRunIndex: number; + selfData: IDataObject; + contextNodeName: string; + additionalData: IWorkflowExecuteAdditionalData; +} + +export interface PartialAdditionalData { + executionId?: string; + restartExecutionId?: string; + restApiUrl: string; + instanceBaseUrl: string; + formWaitingBaseUrl: string; + webhookBaseUrl: string; + webhookWaitingBaseUrl: string; + webhookTestBaseUrl: string; + currentNodeParameters?: INodeParameters; + executionTimeoutTimestamp?: number; + userId?: string; + variables: IDataObject; +} + +export interface AllCodeTaskData { + workflow: Omit; + inputData: ITaskDataConnections; + node: INode; + + runExecutionData: IRunExecutionData; + runIndex: number; + itemIndex: number; + activeNodeName: string; + connectionInputData: INodeExecutionData[]; + siblingParameters: INodeParameters; + mode: WorkflowExecuteMode; + envProviderState: EnvProviderState; + executeData?: IExecuteData; + defaultReturnRunIndex: number; + selfData: IDataObject; + contextNodeName: string; + additionalData: PartialAdditionalData; +} + +export interface TaskRequest { + requestId: string; + taskType: string; + settings: unknown; + data: TaskData; +} + +export interface Task { + taskId: string; + settings: unknown; + data: TaskData; +} + +interface ExecuteFunctionObject { + [name: string]: ((...args: unknown[]) => unknown) | ExecuteFunctionObject; +} + +const workflowToParameters = (workflow: Workflow): Omit => { + return { + id: workflow.id, + name: workflow.name, + active: workflow.active, + connections: workflow.connectionsBySourceNode, + nodes: Object.values(workflow.nodes), + pinData: workflow.pinData, + settings: workflow.settings, + staticData: workflow.staticData, + }; +}; + +export class TaskManager { + requestAcceptRejects: Map = new Map(); + + taskAcceptRejects: Map = new Map(); + + pendingRequests: Map = new Map(); + + tasks: Map = new Map(); + + async startTask( + additionalData: IWorkflowExecuteAdditionalData, + taskType: string, + settings: unknown, + executeFunctions: IExecuteFunctions, + inputData: ITaskDataConnections, + node: INode, + workflow: Workflow, + runExecutionData: IRunExecutionData, + runIndex: number, + itemIndex: number, + activeNodeName: string, + connectionInputData: INodeExecutionData[], + siblingParameters: INodeParameters, + mode: WorkflowExecuteMode, + envProviderState: EnvProviderState, + executeData?: IExecuteData, + defaultReturnRunIndex = -1, + selfData: IDataObject = {}, + contextNodeName: string = activeNodeName, + ): Promise> { + const data: TaskData = { + workflow, + runExecutionData, + runIndex, + connectionInputData, + inputData, + node, + executeFunctions, + itemIndex, + siblingParameters, + mode, + envProviderState, + executeData, + defaultReturnRunIndex, + selfData, + contextNodeName, + activeNodeName, + additionalData, + }; + + const request: TaskRequest = { + requestId: nanoid(), + taskType, + settings, + data, + }; + + this.pendingRequests.set(request.requestId, request); + + const taskIdPromise = new Promise((resolve, reject) => { + this.requestAcceptRejects.set(request.requestId, { + accept: resolve, + reject, + }); + }); + + this.sendMessage({ + type: 'requester:taskrequest', + requestId: request.requestId, + taskType, + }); + + const taskId = await taskIdPromise; + + const task: Task = { + taskId, + data, + settings, + }; + this.tasks.set(task.taskId, task); + + try { + const dataPromise = new Promise((resolve, reject) => { + this.taskAcceptRejects.set(task.taskId, { + accept: resolve, + reject, + }); + }); + + this.sendMessage({ + type: 'requester:tasksettings', + taskId, + settings, + }); + + const resultData = await dataPromise; + // Set custom execution data (`$execution.customData`) if sent + if (resultData.customData) { + Object.entries(resultData.customData).forEach(([k, v]) => { + if (!runExecutionData.resultData.metadata) { + runExecutionData.resultData.metadata = {}; + } + runExecutionData.resultData.metadata[k] = v; + }); + } + + return createResultOk(resultData.result as TData); + } catch (e: unknown) { + return createResultError(e as TError); + } finally { + this.tasks.delete(taskId); + } + } + + sendMessage(_message: RequesterMessage.ToN8n.All) {} + + onMessage(message: N8nMessage.ToRequester.All) { + switch (message.type) { + case 'broker:taskready': + this.taskReady(message.requestId, message.taskId); + break; + case 'broker:taskdone': + this.taskDone(message.taskId, message.data); + break; + case 'broker:taskerror': + this.taskError(message.taskId, message.error); + break; + case 'broker:taskdatarequest': + this.sendTaskData(message.taskId, message.requestId, message.requestType); + break; + case 'broker:rpc': + void this.handleRpc(message.taskId, message.callId, message.name, message.params); + break; + } + } + + taskReady(requestId: string, taskId: string) { + const acceptReject = this.requestAcceptRejects.get(requestId); + if (!acceptReject) { + this.rejectTask( + taskId, + 'Request ID not found. In multi-main setup, it is possible for one of the mains to have reported ready state already.', + ); + return; + } + + acceptReject.accept(taskId); + this.requestAcceptRejects.delete(requestId); + } + + rejectTask(jobId: string, reason: string) { + this.sendMessage({ + type: 'requester:taskcancel', + taskId: jobId, + reason, + }); + } + + taskDone(taskId: string, data: TaskResultData) { + const acceptReject = this.taskAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.accept(data); + this.taskAcceptRejects.delete(taskId); + } + } + + taskError(taskId: string, error: unknown) { + const acceptReject = this.taskAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.reject(error); + this.taskAcceptRejects.delete(taskId); + } + } + + sendTaskData( + taskId: string, + requestId: string, + requestType: N8nMessage.ToRequester.TaskDataRequest['requestType'], + ) { + const job = this.tasks.get(taskId); + if (!job) { + // TODO: logging + return; + } + if (requestType === 'all') { + const jd = job.data; + const ad = jd.additionalData; + const data: AllCodeTaskData = { + workflow: workflowToParameters(jd.workflow), + connectionInputData: jd.connectionInputData, + inputData: jd.inputData, + itemIndex: jd.itemIndex, + activeNodeName: jd.activeNodeName, + contextNodeName: jd.contextNodeName, + defaultReturnRunIndex: jd.defaultReturnRunIndex, + mode: jd.mode, + envProviderState: jd.envProviderState, + node: jd.node, + runExecutionData: jd.runExecutionData, + runIndex: jd.runIndex, + selfData: jd.selfData, + siblingParameters: jd.siblingParameters, + executeData: jd.executeData, + additionalData: { + formWaitingBaseUrl: ad.formWaitingBaseUrl, + instanceBaseUrl: ad.instanceBaseUrl, + restApiUrl: ad.restApiUrl, + variables: ad.variables, + webhookBaseUrl: ad.webhookBaseUrl, + webhookTestBaseUrl: ad.webhookTestBaseUrl, + webhookWaitingBaseUrl: ad.webhookWaitingBaseUrl, + currentNodeParameters: ad.currentNodeParameters, + executionId: ad.executionId, + executionTimeoutTimestamp: ad.executionTimeoutTimestamp, + restartExecutionId: ad.restartExecutionId, + userId: ad.userId, + }, + }; + this.sendMessage({ + type: 'requester:taskdataresponse', + taskId, + requestId, + data, + }); + } + } + + async handleRpc( + taskId: string, + callId: string, + name: N8nMessage.ToRequester.RPC['name'], + params: unknown[], + ) { + const job = this.tasks.get(taskId); + if (!job) { + // TODO: logging + return; + } + + try { + if (!RPC_ALLOW_LIST.includes(name)) { + this.sendMessage({ + type: 'requester:rpcresponse', + taskId, + callId, + status: 'error', + data: 'Method not allowed', + }); + return; + } + const splitPath = name.split('.'); + + const funcs = job.data.executeFunctions; + + let func: ((...args: unknown[]) => Promise) | undefined = undefined; + let funcObj: ExecuteFunctionObject[string] | undefined = + funcs as unknown as ExecuteFunctionObject; + for (const part of splitPath) { + funcObj = (funcObj as ExecuteFunctionObject)[part] ?? undefined; + if (!funcObj) { + break; + } + } + func = funcObj as unknown as (...args: unknown[]) => Promise; + if (!func) { + this.sendMessage({ + type: 'requester:rpcresponse', + taskId, + callId, + status: 'error', + data: 'Could not find method', + }); + return; + } + const data = (await func.call(funcs, ...params)) as unknown; + + this.sendMessage({ + type: 'requester:rpcresponse', + taskId, + callId, + status: 'success', + data, + }); + } catch (e) { + this.sendMessage({ + type: 'requester:rpcresponse', + taskId, + callId, + status: 'error', + data: e, + }); + } + } +} diff --git a/packages/cli/src/runners/task-runner-process.ts b/packages/cli/src/runners/task-runner-process.ts new file mode 100644 index 0000000000..9f570fcb38 --- /dev/null +++ b/packages/cli/src/runners/task-runner-process.ts @@ -0,0 +1,92 @@ +import { GlobalConfig } from '@n8n/config'; +import * as a from 'node:assert/strict'; +import { spawn } from 'node:child_process'; +import * as process from 'node:process'; +import { Service } from 'typedi'; + +import { TaskRunnerAuthService } from './auth/task-runner-auth.service'; +import { OnShutdown } from '../decorators/on-shutdown'; + +type ChildProcess = ReturnType; + +/** + * Manages the JS task runner process as a child process + */ +@Service() +export class TaskRunnerProcess { + public get isRunning() { + return this.process !== null; + } + + /** The process ID of the task runner process */ + public get pid() { + return this.process?.pid; + } + + private process: ChildProcess | null = null; + + /** Promise that resolves after the process has exited */ + private runPromise: Promise | null = null; + + private isShuttingDown = false; + + constructor( + private readonly globalConfig: GlobalConfig, + private readonly authService: TaskRunnerAuthService, + ) {} + + async start() { + a.ok(!this.process, 'Task Runner Process already running'); + + const grantToken = await this.authService.createGrantToken(); + const startScript = require.resolve('@n8n/task-runner'); + + this.process = spawn('node', [startScript], { + env: { + PATH: process.env.PATH, + N8N_RUNNERS_GRANT_TOKEN: grantToken, + N8N_RUNNERS_N8N_URI: `127.0.0.1:${this.globalConfig.taskRunners.port}`, + NODE_FUNCTION_ALLOW_BUILTIN: process.env.NODE_FUNCTION_ALLOW_BUILTIN, + NODE_FUNCTION_ALLOW_EXTERNAL: process.env.NODE_FUNCTION_ALLOW_EXTERNAL, + }, + }); + + this.process.stdout?.pipe(process.stdout); + this.process.stderr?.pipe(process.stderr); + + this.monitorProcess(this.process); + } + + @OnShutdown() + async stop() { + if (!this.process) { + return; + } + + this.isShuttingDown = true; + + // TODO: Timeout & force kill + this.process.kill(); + await this.runPromise; + + this.isShuttingDown = false; + } + + private monitorProcess(taskRunnerProcess: ChildProcess) { + this.runPromise = new Promise((resolve) => { + taskRunnerProcess.on('exit', (code) => { + this.onProcessExit(code, resolve); + }); + }); + } + + private onProcessExit(_code: number | null, resolveFn: () => void) { + this.process = null; + resolveFn(); + + // If we are not shutting down, restart the process + if (!this.isShuttingDown) { + setImmediate(async () => await this.start()); + } + } +} diff --git a/packages/cli/src/runners/task-runner-server.ts b/packages/cli/src/runners/task-runner-server.ts new file mode 100644 index 0000000000..fc31c100a3 --- /dev/null +++ b/packages/cli/src/runners/task-runner-server.ts @@ -0,0 +1,201 @@ +import { GlobalConfig } from '@n8n/config'; +import compression from 'compression'; +import express from 'express'; +import * as a from 'node:assert/strict'; +import { randomBytes } from 'node:crypto'; +import { ServerResponse, type Server, createServer as createHttpServer } from 'node:http'; +import type { AddressInfo, Socket } from 'node:net'; +import { parse as parseUrl } from 'node:url'; +import { Service } from 'typedi'; +import { Server as WSServer } from 'ws'; + +import { inTest, LOWEST_SHUTDOWN_PRIORITY } from '@/constants'; +import { OnShutdown } from '@/decorators/on-shutdown'; +import { Logger } from '@/logging/logger.service'; +import { bodyParser, rawBodyReader } from '@/middlewares'; +import { send } from '@/response-helper'; +import { TaskRunnerAuthController } from '@/runners/auth/task-runner-auth.controller'; +import type { + TaskRunnerServerInitRequest, + TaskRunnerServerInitResponse, +} from '@/runners/runner-types'; +import { TaskRunnerService } from '@/runners/runner-ws-server'; + +/** + * Task Runner HTTP & WS server + */ +@Service() +export class TaskRunnerServer { + private server: Server | undefined; + + private wsServer: WSServer | undefined; + + readonly app: express.Application; + + public get port() { + return (this.server?.address() as AddressInfo)?.port; + } + + private get upgradeEndpoint() { + return `${this.getEndpointBasePath()}/_ws`; + } + + constructor( + private readonly logger: Logger, + private readonly globalConfig: GlobalConfig, + private readonly taskRunnerAuthController: TaskRunnerAuthController, + private readonly taskRunnerService: TaskRunnerService, + ) { + this.app = express(); + this.app.disable('x-powered-by'); + + if (!this.globalConfig.taskRunners.authToken) { + // Generate an auth token if one is not set + this.globalConfig.taskRunners.authToken = randomBytes(32).toString('hex'); + } + } + + async start(): Promise { + await this.setupHttpServer(); + + this.setupWsServer(); + + if (!inTest) { + await this.setupErrorHandlers(); + } + + this.setupCommonMiddlewares(); + + this.configureRoutes(); + } + + @OnShutdown(LOWEST_SHUTDOWN_PRIORITY) + async stop(): Promise { + if (this.wsServer) { + this.wsServer.close(); + this.wsServer = undefined; + } + if (this.server) { + await new Promise((resolve) => this.server?.close(() => resolve())); + this.server = undefined; + } + } + + /** Creates an HTTP server and listens to the configured port */ + private async setupHttpServer() { + const { app } = this; + + this.server = createHttpServer(app); + + const { + taskRunners: { port, listen_address: address }, + } = this.globalConfig; + + this.server.on('error', (error: Error & { code: string }) => { + if (error.code === 'EADDRINUSE') { + this.logger.info( + `n8n Task Runner's port ${port} is already in use. Do you have another instance of n8n running already?`, + ); + process.exit(1); + } + }); + + await new Promise((resolve) => { + a.ok(this.server); + this.server.listen(port, address, () => resolve()); + }); + + this.logger.info(`n8n Task Runner server ready on ${address}, port ${port}`); + } + + /** Creates WebSocket server for handling upgrade requests */ + private setupWsServer() { + const { authToken } = this.globalConfig.taskRunners; + a.ok(authToken); + a.ok(this.server); + + this.wsServer = new WSServer({ noServer: true }); + this.server.on('upgrade', this.handleUpgradeRequest); + } + + private async setupErrorHandlers() { + const { app } = this; + + // Augment errors sent to Sentry + const { + Handlers: { requestHandler, errorHandler }, + } = await import('@sentry/node'); + app.use(requestHandler()); + app.use(errorHandler()); + } + + private setupCommonMiddlewares() { + // Compress the response data + this.app.use(compression()); + + this.app.use(rawBodyReader); + this.app.use(bodyParser); + } + + private configureRoutes() { + this.app.use( + this.upgradeEndpoint, + // eslint-disable-next-line @typescript-eslint/unbound-method + this.taskRunnerAuthController.authMiddleware, + (req: TaskRunnerServerInitRequest, res: TaskRunnerServerInitResponse) => + this.taskRunnerService.handleRequest(req, res), + ); + + const authEndpoint = `${this.getEndpointBasePath()}/auth`; + this.app.post( + authEndpoint, + send(async (req) => await this.taskRunnerAuthController.createGrantToken(req)), + ); + } + + private handleUpgradeRequest = ( + request: TaskRunnerServerInitRequest, + socket: Socket, + head: Buffer, + ) => { + if (parseUrl(request.url).pathname !== this.upgradeEndpoint) { + socket.write('HTTP/1.1 404 Not Found\r\n\r\n'); + socket.destroy(); + return; + } + + if (!this.wsServer) { + // This might happen if the server is shutting down and we receive an upgrade request + socket.write('HTTP/1.1 503 Service Unavailable\r\n\r\n'); + socket.destroy(); + return; + } + + this.wsServer.handleUpgrade(request, socket, head, (ws) => { + request.ws = ws; + + const response = new ServerResponse(request); + response.writeHead = (statusCode) => { + if (statusCode > 200) ws.close(100); + return response; + }; + + // @ts-expect-error Delegate the request to the express app. This function is not exposed + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + this.app.handle(request, response); + }); + }; + + /** Returns the normalized base path for the task runner endpoints */ + private getEndpointBasePath() { + let path = this.globalConfig.taskRunners.path; + if (!path.startsWith('/')) { + path = `/${path}`; + } + if (path.endsWith('/')) { + path = path.slice(-1); + } + + return path; + } +} diff --git a/packages/cli/src/scaling/__tests__/publisher.service.test.ts b/packages/cli/src/scaling/__tests__/publisher.service.test.ts index 311ee0bbb8..05bb52bc6a 100644 --- a/packages/cli/src/scaling/__tests__/publisher.service.test.ts +++ b/packages/cli/src/scaling/__tests__/publisher.service.test.ts @@ -3,13 +3,10 @@ import { mock } from 'jest-mock-extended'; import config from '@/config'; import { generateNanoId } from '@/databases/utils/generators'; -import type { - RedisServiceCommandObject, - RedisServiceWorkerResponseObject, -} from '@/scaling/redis/redis-service-commands'; import type { RedisClientService } from '@/services/redis-client.service'; import { Publisher } from '../pubsub/publisher.service'; +import type { PubSub } from '../pubsub/pubsub.types'; describe('Publisher', () => { let queueModeId: string; @@ -49,13 +46,13 @@ describe('Publisher', () => { describe('publishCommand', () => { it('should publish command into `n8n.commands` pubsub channel', async () => { const publisher = new Publisher(mock(), redisClientService); - const msg = mock({ command: 'reloadLicense' }); + const msg = mock({ command: 'reload-license' }); await publisher.publishCommand(msg); expect(client.publish).toHaveBeenCalledWith( 'n8n.commands', - JSON.stringify({ ...msg, senderId: queueModeId }), + JSON.stringify({ ...msg, senderId: queueModeId, selfSend: false, debounce: true }), ); }); }); @@ -63,8 +60,8 @@ describe('Publisher', () => { describe('publishWorkerResponse', () => { it('should publish worker response into `n8n.worker-response` pubsub channel', async () => { const publisher = new Publisher(mock(), redisClientService); - const msg = mock({ - command: 'reloadExternalSecretsProviders', + const msg = mock({ + response: 'response-to-get-worker-status', }); await publisher.publishWorkerResponse(msg); diff --git a/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts b/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts new file mode 100644 index 0000000000..314ded0b8b --- /dev/null +++ b/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts @@ -0,0 +1,878 @@ +import type { WorkerStatus } from '@n8n/api-types'; +import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; +import type { Workflow } from 'n8n-workflow'; + +import type { ActiveWorkflowManager } from '@/active-workflow-manager'; +import type { WorkflowRepository } from '@/databases/repositories/workflow.repository'; +import type { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; +import { EventService } from '@/events/event.service'; +import type { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; +import type { IWorkflowDb } from '@/interfaces'; +import type { License } from '@/license'; +import type { Push } from '@/push'; +import type { WebSocketPush } from '@/push/websocket.push'; +import type { CommunityPackagesService } from '@/services/community-packages.service'; +import type { TestWebhooks } from '@/webhooks/test-webhooks'; + +import type { Publisher } from '../pubsub/publisher.service'; +import { PubSubHandler } from '../pubsub/pubsub-handler'; +import type { WorkerStatusService } from '../worker-status.service'; + +const flushPromises = async () => await new Promise((resolve) => setImmediate(resolve)); + +describe('PubSubHandler', () => { + const eventService = new EventService(); + const license = mock(); + const eventbus = mock(); + const externalSecretsManager = mock(); + const communityPackagesService = mock(); + const publisher = mock(); + const workerStatusService = mock(); + const activeWorkflowManager = mock(); + const push = mock(); + const workflowRepository = mock(); + const testWebhooks = mock(); + + afterEach(() => { + eventService.removeAllListeners(); + }); + + describe('in webhook process', () => { + const instanceSettings = mock({ instanceType: 'webhook' }); + + it('should set up handlers in webhook process', () => { + // @ts-expect-error Spying on private method + const setupHandlers = jest.spyOn(PubSubHandler.prototype, 'setupHandlers'); + + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + expect(setupHandlers).toHaveBeenCalledWith({ + 'reload-license': expect.any(Function), + 'restart-event-bus': expect.any(Function), + 'reload-external-secrets-providers': expect.any(Function), + 'community-package-install': expect.any(Function), + 'community-package-update': expect.any(Function), + 'community-package-uninstall': expect.any(Function), + }); + }); + + it('should reload license on `reload-license` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('reload-license'); + + expect(license.reload).toHaveBeenCalled(); + }); + + it('should restart event bus on `restart-event-bus` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('restart-event-bus'); + + expect(eventbus.restart).toHaveBeenCalled(); + }); + + it('should reload providers on `reload-external-secrets-providers` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('reload-external-secrets-providers'); + + expect(externalSecretsManager.reloadAllProviders).toHaveBeenCalled(); + }); + + it('should install community package on `community-package-install` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-install', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should update community package on `community-package-update` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-update', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should uninstall community package on `community-package-uninstall` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-uninstall', { + packageName: 'test-package', + }); + + expect(communityPackagesService.removeNpmPackage).toHaveBeenCalledWith('test-package'); + }); + }); + + describe('in worker process', () => { + const instanceSettings = mock({ instanceType: 'worker' }); + + it('should set up handlers in worker process', () => { + // @ts-expect-error Spying on private method + const setupHandlersSpy = jest.spyOn(PubSubHandler.prototype, 'setupHandlers'); + + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + expect(setupHandlersSpy).toHaveBeenCalledWith({ + 'reload-license': expect.any(Function), + 'restart-event-bus': expect.any(Function), + 'reload-external-secrets-providers': expect.any(Function), + 'community-package-install': expect.any(Function), + 'community-package-update': expect.any(Function), + 'community-package-uninstall': expect.any(Function), + 'get-worker-status': expect.any(Function), + }); + }); + + it('should reload license on `reload-license` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('reload-license'); + + expect(license.reload).toHaveBeenCalled(); + }); + + it('should restart event bus on `restart-event-bus` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('restart-event-bus'); + + expect(eventbus.restart).toHaveBeenCalled(); + }); + + it('should reload providers on `reload-external-secrets-providers` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('reload-external-secrets-providers'); + + expect(externalSecretsManager.reloadAllProviders).toHaveBeenCalled(); + }); + + it('should install community package on `community-package-install` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-install', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should update community package on `community-package-update` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-update', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should uninstall community package on `community-package-uninstall` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-uninstall', { + packageName: 'test-package', + }); + + expect(communityPackagesService.removeNpmPackage).toHaveBeenCalledWith('test-package'); + }); + + it('should generate status on `get-worker-status` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('get-worker-status'); + + expect(workerStatusService.generateStatus).toHaveBeenCalled(); + }); + }); + + describe('in main process', () => { + const instanceSettings = mock({ + instanceType: 'main', + isLeader: true, + isFollower: false, + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should set up command and worker response handlers in main process', () => { + // @ts-expect-error Spying on private method + const setupHandlersSpy = jest.spyOn(PubSubHandler.prototype, 'setupHandlers'); + + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + expect(setupHandlersSpy).toHaveBeenCalledWith({ + 'reload-license': expect.any(Function), + 'restart-event-bus': expect.any(Function), + 'reload-external-secrets-providers': expect.any(Function), + 'community-package-install': expect.any(Function), + 'community-package-update': expect.any(Function), + 'community-package-uninstall': expect.any(Function), + 'add-webhooks-triggers-and-pollers': expect.any(Function), + 'remove-triggers-and-pollers': expect.any(Function), + 'display-workflow-activation': expect.any(Function), + 'display-workflow-deactivation': expect.any(Function), + 'display-workflow-activation-error': expect.any(Function), + 'relay-execution-lifecycle-event': expect.any(Function), + 'clear-test-webhooks': expect.any(Function), + 'response-to-get-worker-status': expect.any(Function), + }); + }); + + it('should reload license on `reload-license` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('reload-license'); + + expect(license.reload).toHaveBeenCalled(); + }); + + it('should restart event bus on `restart-event-bus` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('restart-event-bus'); + + expect(eventbus.restart).toHaveBeenCalled(); + }); + + it('should reload providers on `reload-external-secrets-providers` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('reload-external-secrets-providers'); + + expect(externalSecretsManager.reloadAllProviders).toHaveBeenCalled(); + }); + + it('should install community package on `community-package-install` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-install', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should update community package on `community-package-update` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-update', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should uninstall community package on `community-package-uninstall` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + eventService.emit('community-package-uninstall', { + packageName: 'test-package', + }); + + expect(communityPackagesService.removeNpmPackage).toHaveBeenCalledWith('test-package'); + }); + + describe('multi-main setup', () => { + it('if leader, should handle `add-webhooks-triggers-and-pollers` event', async () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('add-webhooks-triggers-and-pollers', { workflowId }); + + await flushPromises(); + + expect(activeWorkflowManager.add).toHaveBeenCalledWith(workflowId, 'activate', undefined, { + shouldPublish: false, + }); + expect(push.broadcast).toHaveBeenCalledWith('workflowActivated', { workflowId }); + expect(publisher.publishCommand).toHaveBeenCalledWith({ + command: 'display-workflow-activation', + payload: { workflowId }, + }); + }); + + it('if follower, should skip `add-webhooks-triggers-and-pollers` event', async () => { + new PubSubHandler( + eventService, + mock({ instanceType: 'main', isLeader: false, isFollower: true }), + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('add-webhooks-triggers-and-pollers', { workflowId }); + + await flushPromises(); + + expect(activeWorkflowManager.add).not.toHaveBeenCalled(); + expect(push.broadcast).not.toHaveBeenCalled(); + expect(publisher.publishCommand).not.toHaveBeenCalled(); + }); + + it('if leader, should handle `remove-triggers-and-pollers` event', async () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('remove-triggers-and-pollers', { workflowId }); + + await flushPromises(); + + expect(activeWorkflowManager.removeActivationError).toHaveBeenCalledWith(workflowId); + expect(activeWorkflowManager.removeWorkflowTriggersAndPollers).toHaveBeenCalledWith( + workflowId, + ); + expect(push.broadcast).toHaveBeenCalledWith('workflowDeactivated', { workflowId }); + expect(publisher.publishCommand).toHaveBeenCalledWith({ + command: 'display-workflow-deactivation', + payload: { workflowId }, + }); + }); + + it('if follower, should skip `remove-triggers-and-pollers` event', async () => { + new PubSubHandler( + eventService, + mock({ instanceType: 'main', isLeader: false, isFollower: true }), + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('remove-triggers-and-pollers', { workflowId }); + + await flushPromises(); + + expect(activeWorkflowManager.removeActivationError).not.toHaveBeenCalled(); + expect(activeWorkflowManager.removeWorkflowTriggersAndPollers).not.toHaveBeenCalled(); + expect(push.broadcast).not.toHaveBeenCalled(); + expect(publisher.publishCommand).not.toHaveBeenCalled(); + }); + + it('should handle `display-workflow-activation` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('display-workflow-activation', { workflowId }); + + expect(push.broadcast).toHaveBeenCalledWith('workflowActivated', { workflowId }); + }); + + it('should handle `display-workflow-deactivation` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + + eventService.emit('display-workflow-deactivation', { workflowId }); + + expect(push.broadcast).toHaveBeenCalledWith('workflowDeactivated', { workflowId }); + }); + + it('should handle `display-workflow-activation-error` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workflowId = 'test-workflow-id'; + const errorMessage = 'Test error message'; + + eventService.emit('display-workflow-activation-error', { workflowId, errorMessage }); + + expect(push.broadcast).toHaveBeenCalledWith('workflowFailedToActivate', { + workflowId, + errorMessage, + }); + }); + + it('should handle `relay-execution-lifecycle-event` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const pushRef = 'test-push-ref'; + const type = 'executionStarted'; + const args = { testArg: 'value' }; + + push.getBackend.mockReturnValue( + mock({ hasPushRef: jest.fn().mockReturnValue(true) }), + ); + + eventService.emit('relay-execution-lifecycle-event', { type, args, pushRef }); + + expect(push.send).toHaveBeenCalledWith(type, args, pushRef); + }); + + it('should handle `clear-test-webhooks` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const webhookKey = 'test-webhook-key'; + const workflowEntity = mock({ id: 'test-workflow-id' }); + const pushRef = 'test-push-ref'; + + push.getBackend.mockReturnValue( + mock({ hasPushRef: jest.fn().mockReturnValue(true) }), + ); + testWebhooks.toWorkflow.mockReturnValue(mock({ id: 'test-workflow-id' })); + + eventService.emit('clear-test-webhooks', { webhookKey, workflowEntity, pushRef }); + + expect(testWebhooks.clearTimeout).toHaveBeenCalledWith(webhookKey); + expect(testWebhooks.deactivateWebhooks).toHaveBeenCalled(); + }); + + it('should handle `response-to-get-worker-status event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + publisher, + workerStatusService, + activeWorkflowManager, + push, + workflowRepository, + testWebhooks, + ).init(); + + const workerStatus = mock({ senderId: 'worker-1', loadAvg: [123] }); + + eventService.emit('response-to-get-worker-status', workerStatus); + + expect(push.broadcast).toHaveBeenCalledWith('sendWorkerStatusMessage', { + workerId: workerStatus.senderId, + status: workerStatus, + }); + }); + }); + }); +}); diff --git a/packages/cli/src/scaling/__tests__/scaling.service.test.ts b/packages/cli/src/scaling/__tests__/scaling.service.test.ts index 9beae22af6..a6c14ab964 100644 --- a/packages/cli/src/scaling/__tests__/scaling.service.test.ts +++ b/packages/cli/src/scaling/__tests__/scaling.service.test.ts @@ -6,12 +6,12 @@ import { ApplicationError } from 'n8n-workflow'; import Container from 'typedi'; import type { OrchestrationService } from '@/services/orchestration.service'; -import { mockInstance } from '@test/mocking'; +import { mockInstance, mockLogger } from '@test/mocking'; import { JOB_TYPE_NAME, QUEUE_NAME } from '../constants'; import type { JobProcessor } from '../job-processor'; import { ScalingService } from '../scaling.service'; -import type { Job, JobData, JobOptions, JobQueue } from '../scaling.types'; +import type { Job, JobData, JobQueue } from '../scaling.types'; const queue = mock({ client: { ping: jest.fn() }, @@ -74,7 +74,7 @@ describe('ScalingService', () => { instanceSettings.markAsLeader(); scalingService = new ScalingService( - mock(), + mockLogger(), mock(), jobProcessor, globalConfig, @@ -208,10 +208,13 @@ describe('ScalingService', () => { queue.add.mockResolvedValue(mock({ id: '456' })); const jobData = mock({ executionId: '123' }); - const jobOptions = mock(); - await scalingService.addJob(jobData, jobOptions); + await scalingService.addJob(jobData, { priority: 100 }); - expect(queue.add).toHaveBeenCalledWith(JOB_TYPE_NAME, jobData, jobOptions); + expect(queue.add).toHaveBeenCalledWith(JOB_TYPE_NAME, jobData, { + priority: 100, + removeOnComplete: true, + removeOnFail: true, + }); }); }); diff --git a/packages/cli/src/scaling/__tests__/subscriber.service.test.ts b/packages/cli/src/scaling/__tests__/subscriber.service.test.ts index 14e67a8d4d..62834dba33 100644 --- a/packages/cli/src/scaling/__tests__/subscriber.service.test.ts +++ b/packages/cli/src/scaling/__tests__/subscriber.service.test.ts @@ -17,14 +17,14 @@ describe('Subscriber', () => { describe('constructor', () => { it('should init Redis client in scaling mode', () => { - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); expect(subscriber.getClient()).toEqual(client); }); it('should not init Redis client in regular mode', () => { config.set('executions.mode', 'regular'); - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); expect(subscriber.getClient()).toBeUndefined(); }); @@ -32,7 +32,7 @@ describe('Subscriber', () => { describe('shutdown', () => { it('should disconnect Redis client', () => { - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); subscriber.shutdown(); expect(client.disconnect).toHaveBeenCalled(); }); @@ -40,24 +40,11 @@ describe('Subscriber', () => { describe('subscribe', () => { it('should subscribe to pubsub channel', async () => { - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); await subscriber.subscribe('n8n.commands'); expect(client.subscribe).toHaveBeenCalledWith('n8n.commands', expect.any(Function)); }); }); - - describe('setMessageHandler', () => { - it('should set message handler function for channel', () => { - const subscriber = new Subscriber(mock(), redisClientService); - const channel = 'n8n.commands'; - const handlerFn = jest.fn(); - - subscriber.setMessageHandler(channel, handlerFn); - - // @ts-expect-error Private field - expect(subscriber.handlers).toEqual(new Map([[channel, handlerFn]])); - }); - }); }); diff --git a/packages/cli/src/scaling/__tests__/worker-server.test.ts b/packages/cli/src/scaling/__tests__/worker-server.test.ts index d5716bfac4..778d403bf2 100644 --- a/packages/cli/src/scaling/__tests__/worker-server.test.ts +++ b/packages/cli/src/scaling/__tests__/worker-server.test.ts @@ -5,7 +5,6 @@ import type { InstanceSettings } from 'n8n-core'; import { AssertionError } from 'node:assert'; import * as http from 'node:http'; -import { PortTakenError } from '@/errors/port-taken.error'; import type { ExternalHooks } from '@/external-hooks'; import type { PrometheusMetricsService } from '@/metrics/prometheus-metrics.service'; import { bodyParser, rawBodyReader } from '@/middlewares'; @@ -34,7 +33,7 @@ describe('WorkerServer', () => { beforeEach(() => { globalConfig = mock({ queue: { - health: { active: true, port: 5678 }, + health: { active: true, port: 5678, address: '0.0.0.0' }, }, credentials: { overwrite: { endpoint: '' }, @@ -51,16 +50,19 @@ describe('WorkerServer', () => { globalConfig, mock(), mock(), - mock(), externalHooks, mock({ instanceType: 'webhook' }), prometheusMetricsService, + mock(), ), ).toThrowError(AssertionError); }); - it('should throw if port taken', async () => { + it('should exit if port taken', async () => { const server = mock(); + const procesExitSpy = jest + .spyOn(process, 'exit') + .mockImplementation(() => undefined as never); jest.spyOn(http, 'createServer').mockReturnValue(server); @@ -69,18 +71,19 @@ describe('WorkerServer', () => { return server; }); - expect( - () => - new WorkerServer( - globalConfig, - mock(), - mock(), - mock(), - externalHooks, - instanceSettings, - prometheusMetricsService, - ), - ).toThrowError(PortTakenError); + new WorkerServer( + globalConfig, + mock(), + mock(), + externalHooks, + instanceSettings, + prometheusMetricsService, + mock(), + ); + + expect(procesExitSpy).toHaveBeenCalledWith(1); + + procesExitSpy.mockRestore(); }); }); @@ -89,8 +92,9 @@ describe('WorkerServer', () => { const server = mock(); jest.spyOn(http, 'createServer').mockReturnValue(server); - server.listen.mockImplementation((_port, callback: () => void) => { - callback(); + server.listen.mockImplementation((...args: unknown[]) => { + const callback = args.find((arg) => typeof arg === 'function'); + if (callback) callback(); return server; }); @@ -98,10 +102,10 @@ describe('WorkerServer', () => { globalConfig, mock(), mock(), - mock(), externalHooks, instanceSettings, prometheusMetricsService, + mock(), ); const CREDENTIALS_OVERWRITE_ENDPOINT = 'credentials/overwrites'; @@ -123,8 +127,9 @@ describe('WorkerServer', () => { const server = mock(); jest.spyOn(http, 'createServer').mockReturnValue(server); - server.listen.mockImplementation((_port, callback: () => void) => { - callback(); + server.listen.mockImplementation((...args: unknown[]) => { + const callback = args.find((arg) => typeof arg === 'function'); + if (callback) callback(); return server; }); @@ -132,10 +137,10 @@ describe('WorkerServer', () => { globalConfig, mock(), mock(), - mock(), externalHooks, instanceSettings, prometheusMetricsService, + mock(), ); await workerServer.init({ health: true, overwrites: false, metrics: true }); @@ -153,10 +158,10 @@ describe('WorkerServer', () => { globalConfig, mock(), mock(), - mock(), externalHooks, instanceSettings, prometheusMetricsService, + mock(), ); await expect( workerServer.init({ health: false, overwrites: false, metrics: false }), @@ -171,14 +176,15 @@ describe('WorkerServer', () => { globalConfig, mock(), mock(), - mock(), externalHooks, instanceSettings, prometheusMetricsService, + mock(), ); - server.listen.mockImplementation((_port, callback: () => void) => { - callback(); + server.listen.mockImplementation((...args: unknown[]) => { + const callback = args.find((arg) => typeof arg === 'function'); + if (callback) callback(); return server; }); diff --git a/packages/cli/src/scaling/constants.ts b/packages/cli/src/scaling/constants.ts index f1e55d7ab1..348f156896 100644 --- a/packages/cli/src/scaling/constants.ts +++ b/packages/cli/src/scaling/constants.ts @@ -7,3 +7,17 @@ export const COMMAND_PUBSUB_CHANNEL = 'n8n.commands'; /** Pubsub channel for messages sent by workers in response to commands from main processes. */ export const WORKER_RESPONSE_PUBSUB_CHANNEL = 'n8n.worker-response'; + +/** + * Commands that should be sent to the sender as well, e.g. during workflow activation and + * deactivation in multi-main setup. */ +export const SELF_SEND_COMMANDS = new Set([ + 'add-webhooks-triggers-and-pollers', + 'remove-triggers-and-pollers', +]); + +/** + * Commands that should not be debounced when received, e.g. during webhook handling in + * multi-main setup. + */ +export const IMMEDIATE_COMMANDS = new Set(['relay-execution-lifecycle-event']); diff --git a/packages/cli/src/scaling/job-processor.ts b/packages/cli/src/scaling/job-processor.ts index 3155b0d90f..49e1383ac6 100644 --- a/packages/cli/src/scaling/job-processor.ts +++ b/packages/cli/src/scaling/job-processor.ts @@ -8,7 +8,7 @@ import { Service } from 'typedi'; import config from '@/config'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; @@ -47,7 +47,7 @@ export class JobProcessor { this.logger.info(`[JobProcessor] Starting job ${job.id} (execution ${executionId})`); - await this.executionRepository.updateStatus(executionId, 'running'); + const startedAt = await this.executionRepository.setRunning(executionId); let { staticData } = execution.workflowData; @@ -137,7 +137,7 @@ export class JobProcessor { workflowId: execution.workflowId, workflowName: execution.workflowData.name, mode: execution.mode, - startedAt: execution.startedAt, + startedAt, retryOf: execution.retryOf ?? '', status: execution.status, }; diff --git a/packages/cli/src/scaling/pubsub/publisher.service.ts b/packages/cli/src/scaling/pubsub/publisher.service.ts index 1f13ef9896..29d31989ff 100644 --- a/packages/cli/src/scaling/pubsub/publisher.service.ts +++ b/packages/cli/src/scaling/pubsub/publisher.service.ts @@ -2,13 +2,12 @@ import type { Redis as SingleNodeClient, Cluster as MultiNodeClient } from 'iore import { Service } from 'typedi'; import config from '@/config'; -import { Logger } from '@/logger'; -import type { - RedisServiceCommandObject, - RedisServiceWorkerResponseObject, -} from '@/scaling/redis/redis-service-commands'; +import { Logger } from '@/logging/logger.service'; import { RedisClientService } from '@/services/redis-client.service'; +import type { PubSub } from './pubsub.types'; +import { IMMEDIATE_COMMANDS, SELF_SEND_COMMANDS } from '../constants'; + /** * Responsible for publishing messages into the pubsub channels used by scaling mode. */ @@ -26,8 +25,6 @@ export class Publisher { if (config.getEnv('executions.mode') !== 'queue') return; this.client = this.redisClientService.createClient({ type: 'publisher(n8n)' }); - - this.client.on('error', (error) => this.logger.error(error.message)); } getClient() { @@ -44,20 +41,25 @@ export class Publisher { // #region Publishing /** Publish a command into the `n8n.commands` channel. */ - async publishCommand(msg: Omit) { + async publishCommand(msg: Omit) { await this.client.publish( 'n8n.commands', - JSON.stringify({ ...msg, senderId: config.getEnv('redis.queueModeId') }), + JSON.stringify({ + ...msg, + senderId: config.getEnv('redis.queueModeId'), + selfSend: SELF_SEND_COMMANDS.has(msg.command), + debounce: !IMMEDIATE_COMMANDS.has(msg.command), + }), ); this.logger.debug(`Published ${msg.command} to command channel`); } /** Publish a response for a command into the `n8n.worker-response` channel. */ - async publishWorkerResponse(msg: RedisServiceWorkerResponseObject) { + async publishWorkerResponse(msg: PubSub.WorkerResponse) { await this.client.publish('n8n.worker-response', JSON.stringify(msg)); - this.logger.debug(`Published response for ${msg.command} to worker response channel`); + this.logger.debug(`Published response ${msg.response} to worker response channel`); } // #endregion diff --git a/packages/cli/src/scaling/pubsub/pubsub-handler.ts b/packages/cli/src/scaling/pubsub/pubsub-handler.ts new file mode 100644 index 0000000000..ca590dd2c2 --- /dev/null +++ b/packages/cli/src/scaling/pubsub/pubsub-handler.ts @@ -0,0 +1,172 @@ +import { InstanceSettings } from 'n8n-core'; +import { ensureError } from 'n8n-workflow'; +import { Service } from 'typedi'; + +import { ActiveWorkflowManager } from '@/active-workflow-manager'; +import config from '@/config'; +import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; +import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; +import { EventService } from '@/events/event.service'; +import type { PubSubEventMap } from '@/events/maps/pub-sub.event-map'; +import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; +import { License } from '@/license'; +import { Push } from '@/push'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; +import { CommunityPackagesService } from '@/services/community-packages.service'; +import { assertNever } from '@/utils'; +import { TestWebhooks } from '@/webhooks/test-webhooks'; + +import type { PubSub } from './pubsub.types'; +import { WorkerStatusService } from '../worker-status.service'; + +/** + * Responsible for handling events emitted from messages received via a pubsub channel. + */ +@Service() +export class PubSubHandler { + constructor( + private readonly eventService: EventService, + private readonly instanceSettings: InstanceSettings, + private readonly license: License, + private readonly eventbus: MessageEventBus, + private readonly externalSecretsManager: ExternalSecretsManager, + private readonly communityPackagesService: CommunityPackagesService, + private readonly publisher: Publisher, + private readonly workerStatusService: WorkerStatusService, + private readonly activeWorkflowManager: ActiveWorkflowManager, + private readonly push: Push, + private readonly workflowRepository: WorkflowRepository, + private readonly testWebhooks: TestWebhooks, + ) {} + + init() { + switch (this.instanceSettings.instanceType) { + case 'webhook': + this.setupHandlers(this.commonHandlers); + break; + case 'worker': + this.setupHandlers({ + ...this.commonHandlers, + 'get-worker-status': async () => + await this.publisher.publishWorkerResponse({ + senderId: config.getEnv('redis.queueModeId'), + response: 'response-to-get-worker-status', + payload: this.workerStatusService.generateStatus(), + }), + }); + break; + case 'main': + this.setupHandlers({ + ...this.commonHandlers, + ...this.multiMainHandlers, + 'response-to-get-worker-status': async (payload) => + this.push.broadcast('sendWorkerStatusMessage', { + workerId: payload.senderId, + status: payload, + }), + }); + + break; + default: + assertNever(this.instanceSettings.instanceType); + } + } + + private setupHandlers( + map: { + [EventName in EventNames]?: (event: PubSubEventMap[EventName]) => void | Promise; + }, + ) { + for (const [eventName, handlerFn] of Object.entries(map) as Array< + [EventNames, (event: PubSubEventMap[EventNames]) => void | Promise] + >) { + this.eventService.on(eventName, async (event) => { + await handlerFn(event); + }); + } + } + + private commonHandlers: { + [EventName in keyof PubSub.CommonEvents]: (event: PubSubEventMap[EventName]) => Promise; + } = { + 'reload-license': async () => await this.license.reload(), + 'restart-event-bus': async () => await this.eventbus.restart(), + 'reload-external-secrets-providers': async () => + await this.externalSecretsManager.reloadAllProviders(), + 'community-package-install': async ({ packageName, packageVersion }) => + await this.communityPackagesService.installOrUpdateNpmPackage(packageName, packageVersion), + 'community-package-update': async ({ packageName, packageVersion }) => + await this.communityPackagesService.installOrUpdateNpmPackage(packageName, packageVersion), + 'community-package-uninstall': async ({ packageName }) => + await this.communityPackagesService.removeNpmPackage(packageName), + }; + + private multiMainHandlers: { + [EventName in keyof PubSub.MultiMainEvents]: ( + event: PubSubEventMap[EventName], + ) => Promise; + } = { + 'add-webhooks-triggers-and-pollers': async ({ workflowId }) => { + if (this.instanceSettings.isFollower) return; + + try { + await this.activeWorkflowManager.add(workflowId, 'activate', undefined, { + shouldPublish: false, // prevent leader from re-publishing message + }); + + this.push.broadcast('workflowActivated', { workflowId }); + + await this.publisher.publishCommand({ + command: 'display-workflow-activation', + payload: { workflowId }, + }); // instruct followers to show activation in UI + } catch (e) { + const error = ensureError(e); + const { message } = error; + + await this.workflowRepository.update(workflowId, { active: false }); + + this.push.broadcast('workflowFailedToActivate', { workflowId, errorMessage: message }); + + await this.publisher.publishCommand({ + command: 'display-workflow-activation-error', + payload: { workflowId, errorMessage: message }, + }); // instruct followers to show activation error in UI + } + }, + 'remove-triggers-and-pollers': async ({ workflowId }) => { + if (this.instanceSettings.isFollower) return; + + await this.activeWorkflowManager.removeActivationError(workflowId); + await this.activeWorkflowManager.removeWorkflowTriggersAndPollers(workflowId); + + this.push.broadcast('workflowDeactivated', { workflowId }); + + // instruct followers to show workflow deactivation in UI + await this.publisher.publishCommand({ + command: 'display-workflow-deactivation', + payload: { workflowId }, + }); + }, + 'display-workflow-activation': async ({ workflowId }) => + this.push.broadcast('workflowActivated', { workflowId }), + 'display-workflow-deactivation': async ({ workflowId }) => + this.push.broadcast('workflowDeactivated', { workflowId }), + 'display-workflow-activation-error': async ({ workflowId, errorMessage }) => + this.push.broadcast('workflowFailedToActivate', { workflowId, errorMessage }), + 'relay-execution-lifecycle-event': async ({ type, args, pushRef }) => { + if (!this.push.getBackend().hasPushRef(pushRef)) return; + + this.push.send(type, args, pushRef); + }, + 'clear-test-webhooks': async ({ webhookKey, workflowEntity, pushRef }) => { + if (!this.push.getBackend().hasPushRef(pushRef)) return; + + this.testWebhooks.clearTimeout(webhookKey); + + const workflow = this.testWebhooks.toWorkflow(workflowEntity); + + await this.testWebhooks.deactivateWebhooks(workflow); + }, + }; +} diff --git a/packages/cli/src/scaling/pubsub/pubsub.types.ts b/packages/cli/src/scaling/pubsub/pubsub.types.ts index 191f4b62d9..b4d6e1a962 100644 --- a/packages/cli/src/scaling/pubsub/pubsub.types.ts +++ b/packages/cli/src/scaling/pubsub/pubsub.types.ts @@ -1,96 +1,129 @@ -import type { PushType, WorkerStatus } from '@n8n/api-types'; - -import type { IWorkflowDb } from '@/interfaces'; +import type { + PubSubCommandMap, + PubSubEventMap, + PubSubWorkerResponseMap, +} from '@/events/maps/pub-sub.event-map'; +import type { Resolve } from '@/utlity.types'; import type { COMMAND_PUBSUB_CHANNEL, WORKER_RESPONSE_PUBSUB_CHANNEL } from '../constants'; -/** Pubsub channel used by scaling mode. */ -export type PubSubChannel = typeof COMMAND_PUBSUB_CHANNEL | typeof WORKER_RESPONSE_PUBSUB_CHANNEL; +export namespace PubSub { + // ---------------------------------- + // channels + // ---------------------------------- -/** Handler function for every message received via a `PubSubChannel`. */ -export type PubSubHandlerFn = (msg: string) => void; + /** Pubsub channel used by scaling mode. */ + export type Channel = typeof COMMAND_PUBSUB_CHANNEL | typeof WORKER_RESPONSE_PUBSUB_CHANNEL; -export type PubSubMessageMap = { - // #region Lifecycle + /** Handler function for every message received via a pubsub channel. */ + export type HandlerFn = (msg: string) => void; - 'reload-license': never; + // ---------------------------------- + // commands + // ---------------------------------- - 'restart-event-bus': { - result: 'success' | 'error'; - error?: string; - }; + type _ToCommand = { + senderId: string; + targets?: string[]; + command: CommandKey; - 'reload-external-secrets-providers': { - result: 'success' | 'error'; - error?: string; - }; + /** Whether the command should be sent to the sender as well. */ + selfSend?: boolean; - 'stop-worker': never; + /** Whether the command should be debounced when received. */ + debounce?: boolean; + } & (PubSubCommandMap[CommandKey] extends never + ? { payload?: never } // some commands carry no payload + : { payload: PubSubCommandMap[CommandKey] }); - // #endregion + type ToCommand = Resolve<_ToCommand>; - // #region Community packages + namespace Commands { + export type ReloadLicense = ToCommand<'reload-license'>; + export type RestartEventBus = ToCommand<'restart-event-bus'>; + export type ReloadExternalSecretsProviders = ToCommand<'reload-external-secrets-providers'>; + export type CommunityPackageInstall = ToCommand<'community-package-install'>; + export type CommunityPackageUpdate = ToCommand<'community-package-update'>; + export type CommunityPackageUninstall = ToCommand<'community-package-uninstall'>; + export type GetWorkerId = ToCommand<'get-worker-id'>; + export type GetWorkerStatus = ToCommand<'get-worker-status'>; + export type AddWebhooksTriggersAndPollers = ToCommand<'add-webhooks-triggers-and-pollers'>; + export type RemoveTriggersAndPollers = ToCommand<'remove-triggers-and-pollers'>; + export type DisplayWorkflowActivation = ToCommand<'display-workflow-activation'>; + export type DisplayWorkflowDeactivation = ToCommand<'display-workflow-deactivation'>; + export type DisplayWorkflowActivationError = ToCommand<'display-workflow-activation-error'>; + export type RelayExecutionLifecycleEvent = ToCommand<'relay-execution-lifecycle-event'>; + export type ClearTestWebhooks = ToCommand<'clear-test-webhooks'>; + } - 'community-package-install': { - packageName: string; - packageVersion: string; - }; + /** Command sent via the `n8n.commands` pubsub channel. */ + export type Command = + | Commands.ReloadLicense + | Commands.RestartEventBus + | Commands.ReloadExternalSecretsProviders + | Commands.CommunityPackageInstall + | Commands.CommunityPackageUpdate + | Commands.CommunityPackageUninstall + | Commands.GetWorkerId + | Commands.GetWorkerStatus + | Commands.AddWebhooksTriggersAndPollers + | Commands.RemoveTriggersAndPollers + | Commands.DisplayWorkflowActivation + | Commands.DisplayWorkflowDeactivation + | Commands.DisplayWorkflowActivationError + | Commands.RelayExecutionLifecycleEvent + | Commands.ClearTestWebhooks; - 'community-package-update': { - packageName: string; - packageVersion: string; - }; + // ---------------------------------- + // worker responses + // ---------------------------------- - 'community-package-uninstall': { - packageName: string; - packageVersion: string; - }; + type _ToWorkerResponse = { + /** ID of worker sending the response. */ + senderId: string; - // #endregion + /** IDs of processes to send the response to. */ + targets?: string[]; - // #region Worker view + /** Content of worker response. */ + response: WorkerResponseKey; - 'get-worker-id': never; + /** Whether the command should be debounced when received. */ + debounce?: boolean; + } & (PubSubWorkerResponseMap[WorkerResponseKey] extends never + ? { payload?: never } // some responses carry no payload + : { payload: PubSubWorkerResponseMap[WorkerResponseKey] }); - 'get-worker-status': WorkerStatus; + type ToWorkerResponse = Resolve< + _ToWorkerResponse + >; - // #endregion + /** Response sent via the `n8n.worker-response` pubsub channel. */ + export type WorkerResponse = ToWorkerResponse<'response-to-get-worker-status'>; - // #region Multi-main setup + /** + * Of all events emitted from pubsub messages, those whose handlers + * are all present in main, worker, and webhook processes. + */ + export type CommonEvents = Pick< + PubSubEventMap, + | 'reload-license' + | 'restart-event-bus' + | 'reload-external-secrets-providers' + | 'community-package-install' + | 'community-package-update' + | 'community-package-uninstall' + >; - 'add-webhooks-triggers-and-pollers': { - workflowId: string; - }; - - 'remove-triggers-and-pollers': { - workflowId: string; - }; - - 'display-workflow-activation': { - workflowId: string; - }; - - 'display-workflow-deactivation': { - workflowId: string; - }; - - // currently 'workflow-failed-to-activate' - 'display-workflow-activation-error': { - workflowId: string; - errorMessage: string; - }; - - 'relay-execution-lifecycle-event': { - type: PushType; - args: Record; - pushRef: string; - }; - - 'clear-test-webhooks': { - webhookKey: string; - workflowEntity: IWorkflowDb; - pushRef: string; - }; - - // #endregion -}; + /** Multi-main events emitted from pubsub messages. */ + export type MultiMainEvents = Pick< + PubSubEventMap, + | 'add-webhooks-triggers-and-pollers' + | 'remove-triggers-and-pollers' + | 'display-workflow-activation' + | 'display-workflow-deactivation' + | 'display-workflow-activation-error' + | 'relay-execution-lifecycle-event' + | 'clear-test-webhooks' + >; +} diff --git a/packages/cli/src/scaling/pubsub/subscriber.service.ts b/packages/cli/src/scaling/pubsub/subscriber.service.ts index 5d4529fdb9..7c7f90fb0e 100644 --- a/packages/cli/src/scaling/pubsub/subscriber.service.ts +++ b/packages/cli/src/scaling/pubsub/subscriber.service.ts @@ -1,11 +1,14 @@ import type { Redis as SingleNodeClient, Cluster as MultiNodeClient } from 'ioredis'; +import debounce from 'lodash/debounce'; +import { jsonParse } from 'n8n-workflow'; import { Service } from 'typedi'; import config from '@/config'; -import { Logger } from '@/logger'; +import { EventService } from '@/events/event.service'; +import { Logger } from '@/logging/logger.service'; import { RedisClientService } from '@/services/redis-client.service'; -import type { PubSubHandlerFn, PubSubChannel } from './pubsub.types'; +import type { PubSub } from './pubsub.types'; /** * Responsible for subscribing to the pubsub channels used by scaling mode. @@ -14,23 +17,30 @@ import type { PubSubHandlerFn, PubSubChannel } from './pubsub.types'; export class Subscriber { private readonly client: SingleNodeClient | MultiNodeClient; - private readonly handlers: Map = new Map(); - // #region Lifecycle constructor( private readonly logger: Logger, private readonly redisClientService: RedisClientService, + private readonly eventService: EventService, ) { // @TODO: Once this class is only ever initialized in scaling mode, throw in the next line instead. if (config.getEnv('executions.mode') !== 'queue') return; this.client = this.redisClientService.createClient({ type: 'subscriber(n8n)' }); - this.client.on('error', (error) => this.logger.error(error.message)); + const handlerFn = (msg: PubSub.Command | PubSub.WorkerResponse) => { + const eventName = 'command' in msg ? msg.command : msg.response; + this.eventService.emit(eventName, msg.payload); + }; - this.client.on('message', (channel: PubSubChannel, message) => { - this.handlers.get(channel)?.(message); + const debouncedHandlerFn = debounce(handlerFn, 300); + + this.client.on('message', (_channel: PubSub.Channel, str) => { + const msg = this.parseMessage(str); + if (!msg) return; + if (msg.debounce) debouncedHandlerFn(msg); + else handlerFn(msg); }); } @@ -47,7 +57,7 @@ export class Subscriber { // #region Subscribing - async subscribe(channel: PubSubChannel) { + async subscribe(channel: PubSub.Channel) { await this.client.subscribe(channel, (error) => { if (error) { this.logger.error('Failed to subscribe to channel', { channel, cause: error }); @@ -58,9 +68,32 @@ export class Subscriber { }); } - /** Set the message handler function for a channel. */ - setMessageHandler(channel: PubSubChannel, handlerFn: PubSubHandlerFn) { - this.handlers.set(channel, handlerFn); + // #region Commands + + private parseMessage(str: string) { + const msg = jsonParse(str, { + fallbackValue: null, + }); + + if (!msg) { + this.logger.debug('Received invalid string via pubsub channel', { message: str }); + + return null; + } + + const queueModeId = config.getEnv('redis.queueModeId'); + + if ( + 'command' in msg && + !msg.selfSend && + (msg.senderId === queueModeId || (msg.targets && !msg.targets.includes(queueModeId))) + ) { + return null; + } + + this.logger.debug('Received message via pubsub channel', msg); + + return msg; } // #endregion diff --git a/packages/cli/src/scaling/redis/redis-service-commands.ts b/packages/cli/src/scaling/redis/redis-service-commands.ts deleted file mode 100644 index e64d1e97fc..0000000000 --- a/packages/cli/src/scaling/redis/redis-service-commands.ts +++ /dev/null @@ -1,103 +0,0 @@ -import type { PushType, WorkerStatus } from '@n8n/api-types'; - -import type { IWorkflowDb } from '@/interfaces'; - -export type RedisServiceCommand = - | 'getStatus' - | 'getId' - | 'restartEventBus' - | 'stopWorker' - | 'reloadLicense' - | 'reloadExternalSecretsProviders' - | 'community-package-install' - | 'community-package-update' - | 'community-package-uninstall' - | 'display-workflow-activation' // multi-main only - | 'display-workflow-deactivation' // multi-main only - | 'add-webhooks-triggers-and-pollers' // multi-main only - | 'remove-triggers-and-pollers' // multi-main only - | 'workflow-failed-to-activate' // multi-main only - | 'relay-execution-lifecycle-event' // multi-main only - | 'clear-test-webhooks'; // multi-main only - -/** - * An object to be sent via Redis pubsub from the main process to the workers. - * @field command: The command to be executed. - * @field targets: The targets to execute the command on. Leave empty to execute on all workers or specify worker ids. - * @field payload: Optional arguments to be sent with the command. - */ -export type RedisServiceBaseCommand = - | { - senderId: string; - command: Exclude< - RedisServiceCommand, - | 'relay-execution-lifecycle-event' - | 'clear-test-webhooks' - | 'community-package-install' - | 'community-package-update' - | 'community-package-uninstall' - >; - payload?: { - [key: string]: string | number | boolean | string[] | number[] | boolean[]; - }; - } - | { - senderId: string; - command: 'relay-execution-lifecycle-event'; - payload: { type: PushType; args: Record; pushRef: string }; - } - | { - senderId: string; - command: 'clear-test-webhooks'; - payload: { webhookKey: string; workflowEntity: IWorkflowDb; pushRef: string }; - } - | { - senderId: string; - command: - | 'community-package-install' - | 'community-package-update' - | 'community-package-uninstall'; - payload: { packageName: string; packageVersion: string }; - }; - -export type RedisServiceWorkerResponseObject = { - workerId: string; -} & ( - | RedisServiceBaseCommand - | { - command: 'getStatus'; - payload: WorkerStatus; - } - | { - command: 'getId'; - } - | { - command: 'restartEventBus'; - payload: { - result: 'success' | 'error'; - error?: string; - }; - } - | { - command: 'reloadExternalSecretsProviders'; - payload: { - result: 'success' | 'error'; - error?: string; - }; - } - | { - command: 'stopWorker'; - } - | { - command: 'workflowActiveStateChanged'; - payload: { - oldState: boolean; - newState: boolean; - workflowId: string; - }; - } -) & { targets?: string[] }; - -export type RedisServiceCommandObject = { - targets?: string[]; -} & RedisServiceBaseCommand; diff --git a/packages/cli/src/scaling/redis/redis.types.ts b/packages/cli/src/scaling/redis/redis.types.ts index ed694904d7..ec7f2397fa 100644 --- a/packages/cli/src/scaling/redis/redis.types.ts +++ b/packages/cli/src/scaling/redis/redis.types.ts @@ -3,8 +3,8 @@ export type RedisClientType = N8nRedisClientType | BullRedisClientType; /** * Redis client used by n8n. * - * - `subscriber(n8n)` to listen for messages from scaling mode communication channels - * - `publisher(n8n)` to send messages into scaling mode communication channels + * - `subscriber(n8n)` to listen for messages from scaling mode pubsub channels + * - `publisher(n8n)` to send messages into scaling mode pubsub channels * - `cache(n8n)` for caching operations (variables, resource ownership, etc.) */ type N8nRedisClientType = 'subscriber(n8n)' | 'publisher(n8n)' | 'cache(n8n)'; diff --git a/packages/cli/src/scaling/scaling.service.ts b/packages/cli/src/scaling/scaling.service.ts index 552802ba70..f35b4348a6 100644 --- a/packages/cli/src/scaling/scaling.service.ts +++ b/packages/cli/src/scaling/scaling.service.ts @@ -1,7 +1,14 @@ import { GlobalConfig } from '@n8n/config'; import { InstanceSettings } from 'n8n-core'; -import { ApplicationError, BINARY_ENCODING, sleep, jsonStringify } from 'n8n-workflow'; +import { + ApplicationError, + BINARY_ENCODING, + sleep, + jsonStringify, + ErrorReporterProxy, +} from 'n8n-workflow'; import type { IExecuteResponsePromiseData } from 'n8n-workflow'; +import { strict } from 'node:assert'; import Container, { Service } from 'typedi'; import { ActiveExecutions } from '@/active-executions'; @@ -11,7 +18,7 @@ import { ExecutionRepository } from '@/databases/repositories/execution.reposito import { OnShutdown } from '@/decorators/on-shutdown'; import { MaxStalledCountError } from '@/errors/max-stalled-count.error'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { JOB_TYPE_NAME, QUEUE_NAME } from './constants'; @@ -40,7 +47,9 @@ export class ScalingService { private readonly instanceSettings: InstanceSettings, private readonly orchestrationService: OrchestrationService, private readonly eventService: EventService, - ) {} + ) { + this.logger = this.logger.withScope('scaling'); + } // #region Lifecycle @@ -70,32 +79,43 @@ export class ScalingService { this.scheduleQueueMetrics(); - this.logger.debug('[ScalingService] Queue setup completed'); + this.logger.debug('Queue setup completed'); } setupWorker(concurrency: number) { this.assertWorker(); this.assertQueue(); - void this.queue.process( - JOB_TYPE_NAME, - concurrency, - async (job: Job) => await this.jobProcessor.processJob(job), - ); + void this.queue.process(JOB_TYPE_NAME, concurrency, async (job: Job) => { + try { + await this.jobProcessor.processJob(job); + } catch (error: unknown) { + // Errors thrown here will be sent to the main instance by bull. Logging + // them out and rethrowing them allows to find out which worker had the + // issue. + this.logger.error('Executing a job errored', { + jobId: job.id, + executionId: job.data.executionId, + error, + }); + ErrorReporterProxy.error(error); + throw error; + } + }); - this.logger.debug('[ScalingService] Worker setup completed'); + this.logger.debug('Worker setup completed'); } @OnShutdown(HIGHEST_SHUTDOWN_PRIORITY) async stop() { await this.queue.pause(true, true); - this.logger.debug('[ScalingService] Queue paused'); + this.logger.debug('Queue paused'); this.stopQueueRecovery(); this.stopQueueMetrics(); - this.logger.debug('[ScalingService] Queue recovery and metrics stopped'); + this.logger.debug('Queue recovery and metrics stopped'); let count = 0; @@ -124,12 +144,24 @@ export class ScalingService { return { active, waiting }; } - async addJob(jobData: JobData, jobOptions: JobOptions) { - const { executionId } = jobData; + /** + * Add a job to the queue. + * + * @param jobData Data of the job to add to the queue. + * @param priority Priority of the job, from `1` (highest) to `MAX_SAFE_INTEGER` (lowest). + */ + async addJob(jobData: JobData, { priority }: { priority: number }) { + strict(priority > 0 && priority <= Number.MAX_SAFE_INTEGER); + + const jobOptions: JobOptions = { + priority, + removeOnComplete: true, + removeOnFail: true, + }; const job = await this.queue.add(JOB_TYPE_NAME, jobData, jobOptions); - this.logger.info(`[ScalingService] Added job ${job.id} (execution ${executionId})`); + this.logger.info(`Added job ${job.id} (execution ${jobData.executionId})`); return job; } @@ -150,16 +182,16 @@ export class ScalingService { try { if (await job.isActive()) { await job.progress({ kind: 'abort-job' }); // being processed by worker - this.logger.debug('[ScalingService] Stopped active job', props); + this.logger.debug('Stopped active job', props); return true; } await job.remove(); // not yet picked up, or waiting for next pickup (stalled) - this.logger.debug('[ScalingService] Stopped inactive job', props); + this.logger.debug('Stopped inactive job', props); return true; } catch (error: unknown) { await job.progress({ kind: 'abort-job' }); - this.logger.error('[ScalingService] Failed to stop job', { ...props, error }); + this.logger.error('Failed to stop job', { ...props, error }); return false; } } @@ -173,42 +205,6 @@ export class ScalingService { // #region Listeners private registerListeners() { - let latestAttemptTs = 0; - let cumulativeTimeoutMs = 0; - - const MAX_TIMEOUT_MS = this.globalConfig.queue.bull.redis.timeoutThreshold; - const RESET_LENGTH_MS = 30_000; - - this.queue.on('error', (error: Error) => { - this.logger.error('[ScalingService] Queue errored', { error }); - - /** - * On Redis connection failure, try to reconnect. On every failed attempt, - * increment a cumulative timeout - if this exceeds a limit, exit the - * process. Reset the cumulative timeout if >30s between retries. - */ - if (error.message.includes('ECONNREFUSED')) { - const nowTs = Date.now(); - if (nowTs - latestAttemptTs > RESET_LENGTH_MS) { - latestAttemptTs = nowTs; - cumulativeTimeoutMs = 0; - } else { - cumulativeTimeoutMs += nowTs - latestAttemptTs; - latestAttemptTs = nowTs; - if (cumulativeTimeoutMs > MAX_TIMEOUT_MS) { - this.logger.error('[ScalingService] Redis unavailable after max timeout'); - this.logger.error('[ScalingService] Exiting process...'); - process.exit(1); - } - } - - this.logger.warn('[ScalingService] Redis unavailable - retrying to connect...'); - return; - } - - throw error; - }); - const { instanceType } = this.instanceSettings; if (instanceType === 'main' || instanceType === 'webhook') { this.registerMainOrWebhookListeners(); @@ -228,6 +224,8 @@ export class ScalingService { }); this.queue.on('error', (error: Error) => { + if ('code' in error && error.code === 'ECONNREFUSED') return; // handled by RedisClientService.retryStrategy + if (error.message.includes('job stalled more than maxStalledCount')) { throw new MaxStalledCountError(error); } @@ -237,11 +235,13 @@ export class ScalingService { * Even if Redis recovers, worker will remain unable to process jobs. */ if (error.message.includes('Error initializing Lua scripts')) { - this.logger.error('[ScalingService] Fatal error initializing worker', { error }); - this.logger.error('[ScalingService] Exiting process...'); + this.logger.error('Fatal error initializing worker', { error }); + this.logger.error('Exiting process...'); process.exit(1); } + this.logger.error('Queue errored', { error }); + throw error; }); } @@ -250,6 +250,14 @@ export class ScalingService { * Register listeners on a `main` or `webhook` process for Bull queue events. */ private registerMainOrWebhookListeners() { + this.queue.on('error', (error: Error) => { + if ('code' in error && error.code === 'ECONNREFUSED') return; // handled by RedisClientService.retryStrategy + + this.logger.error('Queue errored', { error }); + + throw error; + }); + this.queue.on('global:progress', (_jobId: JobId, msg: unknown) => { if (!this.isPubSubMessage(msg)) return; @@ -355,10 +363,10 @@ export class ScalingService { const nextWaitMs = await this.recoverFromQueue(); this.scheduleQueueRecovery(nextWaitMs); } catch (error) { - this.logger.error('[ScalingService] Failed to recover dangling executions from queue', { + this.logger.error('Failed to recover dangling executions from queue', { msg: this.toErrorMsg(error), }); - this.logger.error('[ScalingService] Retrying...'); + this.logger.error('Retrying...'); this.scheduleQueueRecovery(); } @@ -366,7 +374,7 @@ export class ScalingService { const wait = [this.queueRecoveryContext.waitMs / Time.minutes.toMilliseconds, 'min'].join(' '); - this.logger.debug(`[ScalingService] Scheduled queue recovery check for next ${wait}`); + this.logger.debug(`Scheduled queue recovery check for next ${wait}`); } private stopQueueRecovery() { @@ -383,7 +391,7 @@ export class ScalingService { const storedIds = await this.executionRepository.getInProgressExecutionIds(batchSize); if (storedIds.length === 0) { - this.logger.debug('[ScalingService] Completed queue recovery check, no dangling executions'); + this.logger.debug('Completed queue recovery check, no dangling executions'); return waitMs; } @@ -392,23 +400,22 @@ export class ScalingService { const queuedIds = new Set(runningJobs.map((job) => job.data.executionId)); if (queuedIds.size === 0) { - this.logger.debug('[ScalingService] Completed queue recovery check, no dangling executions'); + this.logger.debug('Completed queue recovery check, no dangling executions'); return waitMs; } const danglingIds = storedIds.filter((id) => !queuedIds.has(id)); if (danglingIds.length === 0) { - this.logger.debug('[ScalingService] Completed queue recovery check, no dangling executions'); + this.logger.debug('Completed queue recovery check, no dangling executions'); return waitMs; } await this.executionRepository.markAsCrashed(danglingIds); - this.logger.info( - '[ScalingService] Completed queue recovery check, recovered dangling executions', - { danglingIds }, - ); + this.logger.info('Completed queue recovery check, recovered dangling executions', { + danglingIds, + }); // if this cycle used up the whole batch size, it is possible for there to be // dangling executions outside this check, so speed up next cycle diff --git a/packages/cli/src/scaling/worker-server.ts b/packages/cli/src/scaling/worker-server.ts index abc6a3a024..3cf6995882 100644 --- a/packages/cli/src/scaling/worker-server.ts +++ b/packages/cli/src/scaling/worker-server.ts @@ -2,7 +2,6 @@ import { GlobalConfig } from '@n8n/config'; import type { Application } from 'express'; import express from 'express'; import { InstanceSettings } from 'n8n-core'; -import { ensureError } from 'n8n-workflow'; import { strict as assert } from 'node:assert'; import http from 'node:http'; import type { Server } from 'node:http'; @@ -12,15 +11,13 @@ import { CredentialsOverwrites } from '@/credentials-overwrites'; import * as Db from '@/db'; import { CredentialsOverwritesAlreadySetError } from '@/errors/credentials-overwrites-already-set.error'; import { NonJsonBodyError } from '@/errors/non-json-body.error'; -import { PortTakenError } from '@/errors/port-taken.error'; -import { ServiceUnavailableError } from '@/errors/response-errors/service-unavailable.error'; import { ExternalHooks } from '@/external-hooks'; import type { ICredentialsOverwrite } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PrometheusMetricsService } from '@/metrics/prometheus-metrics.service'; import { rawBodyReader, bodyParser } from '@/middlewares'; import * as ResponseHelper from '@/response-helper'; -import { ScalingService } from '@/scaling/scaling.service'; +import { RedisClientService } from '@/services/redis-client.service'; export type WorkerServerEndpointsConfig = { /** Whether the `/healthz` endpoint is enabled. */ @@ -40,6 +37,8 @@ export type WorkerServerEndpointsConfig = { export class WorkerServer { private readonly port: number; + private readonly address: string; + private readonly server: Server; private readonly app: Application; @@ -51,11 +50,11 @@ export class WorkerServer { constructor( private readonly globalConfig: GlobalConfig, private readonly logger: Logger, - private readonly scalingService: ScalingService, private readonly credentialsOverwrites: CredentialsOverwrites, private readonly externalHooks: ExternalHooks, private readonly instanceSettings: InstanceSettings, private readonly prometheusMetricsService: PrometheusMetricsService, + private readonly redisClientService: RedisClientService, ) { assert(this.instanceSettings.instanceType === 'worker'); @@ -66,9 +65,15 @@ export class WorkerServer { this.server = http.createServer(this.app); this.port = this.globalConfig.queue.health.port; + this.address = this.globalConfig.queue.health.address; this.server.on('error', (error: NodeJS.ErrnoException) => { - if (error.code === 'EADDRINUSE') throw new PortTakenError(this.port); + if (error.code === 'EADDRINUSE') { + this.logger.error( + `Port ${this.port} is already in use, possibly by the n8n main process server. Please set a different port for the worker server.`, + ); + process.exit(1); + } }); } @@ -79,7 +84,7 @@ export class WorkerServer { await this.mountEndpoints(); - await new Promise((resolve) => this.server.listen(this.port, resolve)); + await new Promise((resolve) => this.server.listen(this.port, this.address, resolve)); await this.externalHooks.run('worker.ready'); @@ -87,11 +92,14 @@ export class WorkerServer { } private async mountEndpoints() { - if (this.endpointsConfig.health) { - this.app.get('/healthz', async (req, res) => await this.healthcheck(req, res)); + const { health, overwrites, metrics } = this.endpointsConfig; + + if (health) { + this.app.get('/healthz', async (_, res) => res.send({ status: 'ok' })); + this.app.get('/healthz/readiness', async (_, res) => await this.readiness(_, res)); } - if (this.endpointsConfig.overwrites) { + if (overwrites) { const { endpoint } = this.globalConfig.credentials.overwrite; this.app.post(`/${endpoint}`, rawBodyReader, bodyParser, (req, res) => @@ -99,39 +107,20 @@ export class WorkerServer { ); } - if (this.endpointsConfig.metrics) { + if (metrics) { await this.prometheusMetricsService.init(this.app); } } - private async healthcheck(_req: express.Request, res: express.Response) { - this.logger.debug('[WorkerServer] Health check started'); + private async readiness(_req: express.Request, res: express.Response) { + const isReady = + Db.connectionState.connected && + Db.connectionState.migrated && + this.redisClientService.isConnected(); - try { - await Db.getConnection().query('SELECT 1'); - } catch (value) { - this.logger.error('[WorkerServer] No database connection', ensureError(value)); - - return ResponseHelper.sendErrorResponse( - res, - new ServiceUnavailableError('No database connection'), - ); - } - - try { - await this.scalingService.pingQueue(); - } catch (value) { - this.logger.error('[WorkerServer] No Redis connection', ensureError(value)); - - return ResponseHelper.sendErrorResponse( - res, - new ServiceUnavailableError('No Redis connection'), - ); - } - - this.logger.debug('[WorkerServer] Health check succeeded'); - - ResponseHelper.sendSuccessResponse(res, { status: 'ok' }, true, 200); + return isReady + ? res.status(200).send({ status: 'ok' }) + : res.status(503).send({ status: 'error' }); } private handleOverwrites( diff --git a/packages/cli/src/scaling/worker-status.service.ts b/packages/cli/src/scaling/worker-status.service.ts new file mode 100644 index 0000000000..725cbb0ca7 --- /dev/null +++ b/packages/cli/src/scaling/worker-status.service.ts @@ -0,0 +1,44 @@ +import type { WorkerStatus } from '@n8n/api-types'; +import os from 'node:os'; +import { Service } from 'typedi'; + +import config from '@/config'; +import { N8N_VERSION } from '@/constants'; + +import { JobProcessor } from './job-processor'; + +@Service() +export class WorkerStatusService { + constructor(private readonly jobProcessor: JobProcessor) {} + + generateStatus(): WorkerStatus { + return { + senderId: config.getEnv('redis.queueModeId'), + runningJobsSummary: this.jobProcessor.getRunningJobsSummary(), + freeMem: os.freemem(), + totalMem: os.totalmem(), + uptime: process.uptime(), + loadAvg: os.loadavg(), + cpus: this.getOsCpuString(), + arch: os.arch(), + platform: os.platform(), + hostname: os.hostname(), + interfaces: Object.values(os.networkInterfaces()).flatMap((interfaces) => + (interfaces ?? [])?.map((net) => ({ + family: net.family, + address: net.address, + internal: net.internal, + })), + ), + version: N8N_VERSION, + }; + } + + private getOsCpuString() { + const cpus = os.cpus(); + + if (cpus.length === 0) return 'no CPU info'; + + return `${cpus.length}x ${cpus[0].model} - speed: ${cpus[0].speed}`; + } +} diff --git a/packages/cli/src/security-audit/risk-reporters/instance-risk-reporter.ts b/packages/cli/src/security-audit/risk-reporters/instance-risk-reporter.ts index 4792bf8b6a..b0d6ccfad3 100644 --- a/packages/cli/src/security-audit/risk-reporters/instance-risk-reporter.ts +++ b/packages/cli/src/security-audit/risk-reporters/instance-risk-reporter.ts @@ -6,7 +6,7 @@ import { Service } from 'typedi'; import config from '@/config'; import { getN8nPackageJson, inDevelopment } from '@/constants'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isApiEnabled } from '@/public-api'; import { ENV_VARS_DOCS_URL, diff --git a/packages/cli/src/server.ts b/packages/cli/src/server.ts index 27ac3b09a1..00971d71a5 100644 --- a/packages/cli/src/server.ts +++ b/packages/cli/src/server.ts @@ -21,7 +21,7 @@ import { CredentialsOverwrites } from '@/credentials-overwrites'; import { ControllerRegistry } from '@/decorators'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { EventService } from '@/events/event.service'; -import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay'; +import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import type { ICredentialsOverwrite } from '@/interfaces'; import { isLdapEnabled } from '@/ldap/helpers.ee'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; @@ -35,11 +35,11 @@ import type { FrontendService } from '@/services/frontend.service'; import { OrchestrationService } from '@/services/orchestration.service'; import '@/controllers/active-workflows.controller'; -import '@/controllers/annotation-tags.controller'; +import '@/controllers/annotation-tags.controller.ee'; import '@/controllers/auth.controller'; import '@/controllers/binary-data.controller'; import '@/controllers/curl.controller'; -import '@/controllers/ai-assistant.controller'; +import '@/controllers/ai.controller'; import '@/controllers/dynamic-node-parameters.controller'; import '@/controllers/invitation.controller'; import '@/controllers/me.controller'; @@ -56,6 +56,7 @@ import '@/controllers/translation.controller'; import '@/controllers/users.controller'; import '@/controllers/user-settings.controller'; import '@/controllers/workflow-statistics.controller'; +import '@/controllers/api-keys.controller'; import '@/credentials/credentials.controller'; import '@/eventbus/event-bus.controller'; import '@/events/events.controller'; @@ -251,6 +252,7 @@ export class Server extends AbstractServer { JSON.stringify({ dsn: this.globalConfig.sentry.frontendDsn, environment: process.env.ENVIRONMENT || 'development', + serverName: process.env.DEPLOYMENT_NAME, release: N8N_VERSION, }), ); diff --git a/packages/cli/src/services/__tests__/orchestration.service.test.ts b/packages/cli/src/services/__tests__/orchestration.service.test.ts index f77dcd90cc..6c66573047 100644 --- a/packages/cli/src/services/__tests__/orchestration.service.test.ts +++ b/packages/cli/src/services/__tests__/orchestration.service.test.ts @@ -6,20 +6,12 @@ import Container from 'typedi'; import { ActiveWorkflowManager } from '@/active-workflow-manager'; import config from '@/config'; -import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { Push } from '@/push'; -import type { RedisServiceWorkerResponseObject } from '@/scaling/redis/redis-service-commands'; -import * as helpers from '@/services/orchestration/helpers'; -import { handleCommandMessageMain } from '@/services/orchestration/main/handle-command-message-main'; -import { handleWorkerResponseMessageMain } from '@/services/orchestration/main/handle-worker-response-message-main'; -import { OrchestrationHandlerMainService } from '@/services/orchestration/main/orchestration.handler.main.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { RedisClientService } from '@/services/redis-client.service'; import { mockInstance } from '@test/mocking'; -import type { MainResponseReceivedHandlerOptions } from '../orchestration/main/types'; - config.set('executions.mode', 'queue'); config.set('generic.instanceType', 'main'); @@ -29,24 +21,13 @@ const mockRedisClient = mock(); redisClientService.createClient.mockReturnValue(mockRedisClient); const os = Container.get(OrchestrationService); -const handler = Container.get(OrchestrationHandlerMainService); mockInstance(ActiveWorkflowManager); let queueModeId: string; -const workerRestartEventBusResponse: RedisServiceWorkerResponseObject = { - senderId: 'test', - workerId: 'test', - command: 'restartEventBus', - payload: { - result: 'success', - }, -}; - describe('Orchestration Service', () => { mockInstance(Push); mockInstance(ExternalSecretsManager); - const eventBus = mockInstance(MessageEventBus); beforeAll(async () => { queueModeId = config.get('redis.queueModeId'); @@ -65,73 +46,11 @@ describe('Orchestration Service', () => { test('should initialize', async () => { await os.init(); - await handler.init(); // @ts-expect-error Private field expect(os.publisher).toBeDefined(); - // @ts-expect-error Private field - expect(handler.subscriber).toBeDefined(); expect(queueModeId).toBeDefined(); }); - test('should handle worker responses', async () => { - const response = await handleWorkerResponseMessageMain( - JSON.stringify(workerRestartEventBusResponse), - mock(), - ); - expect(response?.command).toEqual('restartEventBus'); - }); - - test('should handle command messages from others', async () => { - const responseFalseId = await handleCommandMessageMain( - JSON.stringify({ - senderId: 'test', - command: 'reloadLicense', - }), - ); - expect(responseFalseId).toBeDefined(); - expect(responseFalseId!.command).toEqual('reloadLicense'); - expect(responseFalseId!.senderId).toEqual('test'); - }); - - test('should reject command messages from itself', async () => { - const response = await handleCommandMessageMain( - JSON.stringify({ ...workerRestartEventBusResponse, senderId: queueModeId }), - ); - expect(response).toBeDefined(); - expect(response!.command).toEqual('restartEventBus'); - expect(response!.senderId).toEqual(queueModeId); - expect(eventBus.restart).not.toHaveBeenCalled(); - }); - - test('should send command messages', async () => { - // @ts-expect-error Private field - jest.spyOn(os.publisher, 'publishCommand').mockImplementation(async () => {}); - await os.getWorkerIds(); - // @ts-expect-error Private field - expect(os.publisher.publishCommand).toHaveBeenCalled(); - // @ts-expect-error Private field - jest.spyOn(os.publisher, 'publishCommand').mockRestore(); - }); - - test('should prevent receiving commands too often', async () => { - jest.spyOn(helpers, 'debounceMessageReceiver'); - const res1 = await handleCommandMessageMain( - JSON.stringify({ - senderId: 'test', - command: 'reloadExternalSecretsProviders', - }), - ); - const res2 = await handleCommandMessageMain( - JSON.stringify({ - senderId: 'test', - command: 'reloadExternalSecretsProviders', - }), - ); - expect(helpers.debounceMessageReceiver).toHaveBeenCalledTimes(2); - expect(res1!.payload).toBeUndefined(); - expect(res2!.payload).toEqual({ result: 'debounced' }); - }); - describe('shouldAddWebhooks', () => { test('should return true for init', () => { // We want to ensure that webhooks are populated on init diff --git a/packages/cli/src/services/__tests__/workflow-statistics.service.test.ts b/packages/cli/src/services/__tests__/workflow-statistics.service.test.ts index a8d73cbfff..fdecb7ae5a 100644 --- a/packages/cli/src/services/__tests__/workflow-statistics.service.test.ts +++ b/packages/cli/src/services/__tests__/workflow-statistics.service.test.ts @@ -7,7 +7,7 @@ import { } from '@n8n/typeorm'; import { mocked } from 'jest-mock'; import { mock } from 'jest-mock-extended'; -import type { IRun, WorkflowExecuteMode } from 'n8n-workflow'; +import type { INode, IRun, WorkflowExecuteMode } from 'n8n-workflow'; import { Container } from 'typedi'; import config from '@/config'; @@ -167,6 +167,22 @@ describe('WorkflowStatisticsService', () => { }); }); + test('should emit event with no `userId` if workflow is owned by team project', async () => { + const workflowId = '123'; + ownershipService.getPersonalProjectOwnerCached.mockResolvedValueOnce(null); + const node = mock({ id: '123', type: 'n8n-nodes-base.noOp', credentials: {} }); + + await workflowStatisticsService.nodeFetchedData(workflowId, node); + + expect(eventService.emit).toHaveBeenCalledWith('first-workflow-data-loaded', { + userId: '', + project: fakeProject.id, + workflowId, + nodeType: node.type, + nodeId: node.id, + }); + }); + test('should create metrics with credentials when the db is updated', async () => { // Call the function with a production success result, ensure metrics hook gets called const workflowId = '1'; diff --git a/packages/cli/src/services/active-workflows.service.ts b/packages/cli/src/services/active-workflows.service.ts index f2aaf9293d..61aa875d1a 100644 --- a/packages/cli/src/services/active-workflows.service.ts +++ b/packages/cli/src/services/active-workflows.service.ts @@ -5,7 +5,7 @@ import type { User } from '@/databases/entities/user'; import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; @Service() export class ActiveWorkflowsService { diff --git a/packages/cli/src/services/ai-assistant.service.ts b/packages/cli/src/services/ai.service.ts similarity index 74% rename from packages/cli/src/services/ai-assistant.service.ts rename to packages/cli/src/services/ai.service.ts index 77234165c1..a7b07219b5 100644 --- a/packages/cli/src/services/ai-assistant.service.ts +++ b/packages/cli/src/services/ai.service.ts @@ -1,8 +1,8 @@ +import { GlobalConfig } from '@n8n/config'; import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk'; import { AiAssistantClient } from '@n8n_io/ai-assistant-sdk'; import { assert, type IUser } from 'n8n-workflow'; import { Service } from 'typedi'; -import type { Response } from 'undici'; import config from '@/config'; import type { AiAssistantRequest } from '@/requests'; @@ -11,10 +11,13 @@ import { N8N_VERSION } from '../constants'; import { License } from '../license'; @Service() -export class AiAssistantService { +export class AiService { private client: AiAssistantClient | undefined; - constructor(private readonly licenseService: License) {} + constructor( + private readonly licenseService: License, + private readonly globalConfig: GlobalConfig, + ) {} async init() { const aiAssistantEnabled = this.licenseService.isAiAssistantEnabled(); @@ -25,7 +28,7 @@ export class AiAssistantService { const licenseCert = await this.licenseService.loadCertStr(); const consumerId = this.licenseService.getConsumerId(); const baseUrl = config.get('aiAssistant.baseUrl'); - const logLevel = config.getEnv('logs.level'); + const logLevel = this.globalConfig.logging.level; this.client = new AiAssistantClient({ licenseCert, @@ -36,7 +39,7 @@ export class AiAssistantService { }); } - async chat(payload: AiAssistantSDK.ChatRequestPayload, user: IUser): Promise { + async chat(payload: AiAssistantSDK.ChatRequestPayload, user: IUser) { if (!this.client) { await this.init(); } @@ -53,4 +56,13 @@ export class AiAssistantService { return await this.client.applySuggestion(payload, { id: user.id }); } + + async askAi(payload: AiAssistantSDK.AskAiRequestPayload, user: IUser) { + if (!this.client) { + await this.init(); + } + assert(this.client, 'Assistant client not setup'); + + return await this.client.askAi(payload, { id: user.id }); + } } diff --git a/packages/cli/src/services/annotation-tag.service.ts b/packages/cli/src/services/annotation-tag.service.ee.ts similarity index 96% rename from packages/cli/src/services/annotation-tag.service.ts rename to packages/cli/src/services/annotation-tag.service.ee.ts index 27c93041b5..671395168c 100644 --- a/packages/cli/src/services/annotation-tag.service.ts +++ b/packages/cli/src/services/annotation-tag.service.ee.ts @@ -1,7 +1,7 @@ import { Service } from 'typedi'; -import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity'; -import { AnnotationTagRepository } from '@/databases/repositories/annotation-tag.repository'; +import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; +import { AnnotationTagRepository } from '@/databases/repositories/annotation-tag.repository.ee'; import { validateEntity } from '@/generic-helpers'; import type { IAnnotationTagDb, IAnnotationTagWithCountDb } from '@/interfaces'; diff --git a/packages/cli/src/services/cache/cache.service.ts b/packages/cli/src/services/cache/cache.service.ts index 3eda66ecb8..aefe9310fc 100644 --- a/packages/cli/src/services/cache/cache.service.ts +++ b/packages/cli/src/services/cache/cache.service.ts @@ -89,6 +89,9 @@ export class CacheService extends TypedEmitter { // storing // ---------------------------------- + /** + * @param ttl Time to live in milliseconds + */ async set(key: string, value: unknown, ttl?: number) { if (!this.cache) await this.init(); diff --git a/packages/cli/src/services/community-packages.service.ts b/packages/cli/src/services/community-packages.service.ts index 500518ae02..b157119cf2 100644 --- a/packages/cli/src/services/community-packages.service.ts +++ b/packages/cli/src/services/community-packages.service.ts @@ -22,7 +22,7 @@ import { FeatureNotLicensedError } from '@/errors/feature-not-licensed.error'; import type { CommunityPackages } from '@/interfaces'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { toError } from '@/utils'; import { OrchestrationService } from './orchestration.service'; diff --git a/packages/cli/src/services/credentials-tester.service.ts b/packages/cli/src/services/credentials-tester.service.ts index b66d4a474f..30504e464b 100644 --- a/packages/cli/src/services/credentials-tester.service.ts +++ b/packages/cli/src/services/credentials-tester.service.ts @@ -35,7 +35,7 @@ import { Service } from 'typedi'; import { CredentialTypes } from '@/credential-types'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index 6ac3a1863c..a83158e96e 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -17,7 +17,7 @@ import { getVariablesLimit } from '@/environments/variables/environment-helpers' import { getLdapLoginLabel } from '@/ldap/helpers.ee'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isApiEnabled } from '@/public-api'; import type { CommunityPackagesService } from '@/services/community-packages.service'; import { getSamlLoginLabel } from '@/sso/saml/saml-helpers'; @@ -88,6 +88,7 @@ export class FrontendService { endpointFormWaiting: this.globalConfig.endpoints.formWaiting, endpointWebhook: this.globalConfig.endpoints.webhook, endpointWebhookTest: this.globalConfig.endpoints.webhookTest, + endpointWebhookWaiting: this.globalConfig.endpoints.webhookWaiting, saveDataErrorExecution: config.getEnv('executions.saveDataOnError'), saveDataSuccessExecution: config.getEnv('executions.saveDataOnSuccess'), saveManualExecutions: config.getEnv('executions.saveDataManualExecutions'), @@ -123,7 +124,7 @@ export class FrontendService { apiKey: config.getEnv('diagnostics.config.posthog.apiKey'), autocapture: false, disableSessionRecording: config.getEnv('deployment.type') !== 'cloud', - debug: config.getEnv('logs.level') === 'debug', + debug: this.globalConfig.logging.level === 'debug', }, personalizationSurveyEnabled: config.getEnv('personalization.enabled') && config.getEnv('diagnostics.enabled'), @@ -153,7 +154,7 @@ export class FrontendService { }, }, workflowTagsDisabled: config.getEnv('workflowTagsDisabled'), - logLevel: config.getEnv('logs.level'), + logLevel: this.globalConfig.logging.level, hiringBannerEnabled: config.getEnv('hiringBanner.enabled'), aiAssistant: { enabled: false, @@ -211,8 +212,8 @@ export class FrontendService { banners: { dismissed: [], }, - ai: { - enabled: config.getEnv('ai.enabled'), + askAi: { + enabled: false, }, workflowHistory: { pruneTime: -1, @@ -273,6 +274,7 @@ export class FrontendService { const isS3Available = config.getEnv('binaryDataManager.availableModes').includes('s3'); const isS3Licensed = this.license.isBinaryDataS3Licensed(); const isAiAssistantEnabled = this.license.isAiAssistantEnabled(); + const isAskAiEnabled = this.license.isAskAiEnabled(); this.settings.license.planName = this.license.getPlanName(); this.settings.license.consumerId = this.license.getConsumerId(); @@ -329,6 +331,10 @@ export class FrontendService { this.settings.aiAssistant.enabled = isAiAssistantEnabled; } + if (isAskAiEnabled) { + this.settings.askAi.enabled = isAskAiEnabled; + } + this.settings.mfa.enabled = config.get('mfa.enabled'); this.settings.executionMode = config.getEnv('executions.mode'); diff --git a/packages/cli/src/services/orchestration.service.ts b/packages/cli/src/services/orchestration.service.ts index 80f428bb81..b8aba46285 100644 --- a/packages/cli/src/services/orchestration.service.ts +++ b/packages/cli/src/services/orchestration.service.ts @@ -3,15 +3,12 @@ import type { WorkflowActivateMode } from 'n8n-workflow'; import Container, { Service } from 'typedi'; import config from '@/config'; -import { Logger } from '@/logger'; +import type { PubSubCommandMap } from '@/events/maps/pub-sub.event-map'; +import { Logger } from '@/logging/logger.service'; import type { Publisher } from '@/scaling/pubsub/publisher.service'; import type { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { MultiMainSetup } from './orchestration/main/multi-main-setup.ee'; -import type { - RedisServiceBaseCommand, - RedisServiceCommand, -} from '../scaling/redis/redis-service-commands'; @Service() export class OrchestrationService { @@ -100,14 +97,18 @@ export class OrchestrationService { // pubsub // ---------------------------------- - async publish(command: RedisServiceCommand, data?: unknown) { + async publish( + commandKey: CommandKey, + payload?: PubSubCommandMap[CommandKey], + ) { if (!this.sanityCheck()) return; - const payload = data as RedisServiceBaseCommand['payload']; + this.logger.debug( + `[Instance ID ${this.instanceId}] Publishing command "${commandKey}"`, + payload, + ); - this.logger.debug(`[Instance ID ${this.instanceId}] Publishing command "${command}"`, payload); - - await this.publisher.publishCommand({ command, payload }); + await this.publisher.publishCommand({ command: commandKey, payload }); } // ---------------------------------- @@ -117,7 +118,7 @@ export class OrchestrationService { async getWorkerStatus(id?: string) { if (!this.sanityCheck()) return; - const command = 'getStatus'; + const command = 'get-worker-status'; this.logger.debug(`Sending "${command}" to command channel`); @@ -127,16 +128,6 @@ export class OrchestrationService { }); } - async getWorkerIds() { - if (!this.sanityCheck()) return; - - const command = 'getId'; - - this.logger.debug(`Sending "${command}" to command channel`); - - await this.publisher.publishCommand({ command }); - } - // ---------------------------------- // activations // ---------------------------------- diff --git a/packages/cli/src/services/orchestration/helpers.ts b/packages/cli/src/services/orchestration/helpers.ts deleted file mode 100644 index fd5e444cff..0000000000 --- a/packages/cli/src/services/orchestration/helpers.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { jsonParse } from 'n8n-workflow'; -import os from 'node:os'; -import { Container } from 'typedi'; - -import { Logger } from '@/logger'; -import { COMMAND_PUBSUB_CHANNEL } from '@/scaling/constants'; - -import type { RedisServiceCommandObject } from '../../scaling/redis/redis-service-commands'; - -export interface RedisServiceCommandLastReceived { - [date: string]: Date; -} - -export function messageToRedisServiceCommandObject(messageString: string) { - if (!messageString) return; - let message: RedisServiceCommandObject; - try { - message = jsonParse(messageString); - } catch { - Container.get(Logger).debug( - `Received invalid message via channel ${COMMAND_PUBSUB_CHANNEL}: "${messageString}"`, - ); - return; - } - return message; -} - -const lastReceived: RedisServiceCommandLastReceived = {}; - -export function debounceMessageReceiver(message: RedisServiceCommandObject, timeout: number = 100) { - const now = new Date(); - const lastReceivedDate = lastReceived[message.command]; - if (lastReceivedDate && now.getTime() - lastReceivedDate.getTime() < timeout) { - return false; - } - lastReceived[message.command] = now; - return true; -} - -export function getOsCpuString(): string { - const cpus = os.cpus(); - if (cpus.length === 0) return 'no CPU info'; - return `${cpus.length}x ${cpus[0].model} - speed: ${cpus[0].speed}`; -} diff --git a/packages/cli/src/services/orchestration/main/handle-command-message-main.ts b/packages/cli/src/services/orchestration/main/handle-command-message-main.ts deleted file mode 100644 index 15917e11b7..0000000000 --- a/packages/cli/src/services/orchestration/main/handle-command-message-main.ts +++ /dev/null @@ -1,251 +0,0 @@ -import { InstanceSettings } from 'n8n-core'; -import { Container } from 'typedi'; - -import { ActiveWorkflowManager } from '@/active-workflow-manager'; -import config from '@/config'; -import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; -import { License } from '@/license'; -import { Logger } from '@/logger'; -import { Push } from '@/push'; -import { CommunityPackagesService } from '@/services/community-packages.service'; -import { OrchestrationService } from '@/services/orchestration.service'; -import { TestWebhooks } from '@/webhooks/test-webhooks'; - -import { debounceMessageReceiver, messageToRedisServiceCommandObject } from '../helpers'; - -// eslint-disable-next-line complexity -export async function handleCommandMessageMain(messageString: string) { - const queueModeId = config.getEnv('redis.queueModeId'); - const isMainInstance = Container.get(InstanceSettings).instanceType === 'main'; - const message = messageToRedisServiceCommandObject(messageString); - const logger = Container.get(Logger); - - if (message) { - logger.debug( - `RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`, - ); - - const selfSendingAllowed = [ - 'add-webhooks-triggers-and-pollers', - 'remove-triggers-and-pollers', - ].includes(message.command); - - if ( - !selfSendingAllowed && - (message.senderId === queueModeId || - (message.targets && !message.targets.includes(queueModeId))) - ) { - // Skipping command message because it's not for this instance - logger.debug( - `Skipping command message ${message.command} because it's not for this instance.`, - ); - return message; - } - - const push = Container.get(Push); - - switch (message.command) { - case 'reloadLicense': - if (!debounceMessageReceiver(message, 500)) { - message.payload = { - result: 'debounced', - }; - return message; - } - - if (isMainInstance && !config.getEnv('multiMainSetup.enabled')) { - return message; // this main is the sender, so disregard - } - await Container.get(License).reload(); - break; - case 'restartEventBus': - if (!debounceMessageReceiver(message, 200)) { - message.payload = { - result: 'debounced', - }; - return message; - } - await Container.get(MessageEventBus).restart(); - case 'reloadExternalSecretsProviders': - if (!debounceMessageReceiver(message, 200)) { - message.payload = { - result: 'debounced', - }; - return message; - } - await Container.get(ExternalSecretsManager).reloadAllProviders(); - break; - case 'community-package-install': - case 'community-package-update': - case 'community-package-uninstall': - if (!debounceMessageReceiver(message, 200)) { - return message; - } - const { packageName, packageVersion } = message.payload; - const communityPackagesService = Container.get(CommunityPackagesService); - if (message.command === 'community-package-uninstall') { - await communityPackagesService.removeNpmPackage(packageName); - } else { - await communityPackagesService.installOrUpdateNpmPackage(packageName, packageVersion); - } - break; - - case 'add-webhooks-triggers-and-pollers': { - if (!debounceMessageReceiver(message, 100)) { - message.payload = { result: 'debounced' }; - return message; - } - - const orchestrationService = Container.get(OrchestrationService); - - if (orchestrationService.isFollower) break; - - if (typeof message.payload?.workflowId !== 'string') break; - - const { workflowId } = message.payload; - - try { - await Container.get(ActiveWorkflowManager).add(workflowId, 'activate', undefined, { - shouldPublish: false, // prevent leader re-publishing message - }); - - push.broadcast('workflowActivated', { workflowId }); - - // instruct followers to show activation in UI - await orchestrationService.publish('display-workflow-activation', { workflowId }); - } catch (error) { - if (error instanceof Error) { - await Container.get(WorkflowRepository).update(workflowId, { active: false }); - - Container.get(Push).broadcast('workflowFailedToActivate', { - workflowId, - errorMessage: error.message, - }); - - await Container.get(OrchestrationService).publish('workflow-failed-to-activate', { - workflowId, - errorMessage: error.message, - }); - } - } - - break; - } - - case 'remove-triggers-and-pollers': { - if (!debounceMessageReceiver(message, 100)) { - message.payload = { result: 'debounced' }; - return message; - } - - const orchestrationService = Container.get(OrchestrationService); - - if (orchestrationService.isFollower) break; - - if (typeof message.payload?.workflowId !== 'string') break; - - const { workflowId } = message.payload; - - const activeWorkflowManager = Container.get(ActiveWorkflowManager); - - await activeWorkflowManager.removeActivationError(workflowId); - await activeWorkflowManager.removeWorkflowTriggersAndPollers(workflowId); - - push.broadcast('workflowDeactivated', { workflowId }); - - // instruct followers to show workflow deactivation in UI - await orchestrationService.publish('display-workflow-deactivation', { workflowId }); - - break; - } - - case 'display-workflow-activation': { - if (!debounceMessageReceiver(message, 100)) { - message.payload = { result: 'debounced' }; - return message; - } - - const { workflowId } = message.payload ?? {}; - - if (typeof workflowId !== 'string') break; - - push.broadcast('workflowActivated', { workflowId }); - - break; - } - - case 'display-workflow-deactivation': { - if (!debounceMessageReceiver(message, 100)) { - message.payload = { result: 'debounced' }; - return message; - } - - const { workflowId } = message.payload ?? {}; - - if (typeof workflowId !== 'string') break; - - push.broadcast('workflowDeactivated', { workflowId }); - - break; - } - - case 'workflow-failed-to-activate': { - if (!debounceMessageReceiver(message, 100)) { - message.payload = { result: 'debounced' }; - return message; - } - - const { workflowId, errorMessage } = message.payload ?? {}; - - if (typeof workflowId !== 'string' || typeof errorMessage !== 'string') break; - - Container.get(Push).broadcast('workflowFailedToActivate', { workflowId, errorMessage }); - - break; - } - - case 'relay-execution-lifecycle-event': { - /** - * Do not debounce this - all events share the same message name. - */ - - const { type, args, pushRef } = message.payload; - - if (!push.getBackend().hasPushRef(pushRef)) break; - - push.send(type, args, pushRef); - - break; - } - - case 'clear-test-webhooks': { - if (!debounceMessageReceiver(message, 100)) { - // @ts-expect-error Legacy typing - message.payload = { result: 'debounced' }; - return message; - } - - const { webhookKey, workflowEntity, pushRef } = message.payload; - - if (!push.getBackend().hasPushRef(pushRef)) break; - - const testWebhooks = Container.get(TestWebhooks); - - testWebhooks.clearTimeout(webhookKey); - - const workflow = testWebhooks.toWorkflow(workflowEntity); - - await testWebhooks.deactivateWebhooks(workflow); - - break; - } - - default: - break; - } - return message; - } - return; -} diff --git a/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts b/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts deleted file mode 100644 index da3cf5a507..0000000000 --- a/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts +++ /dev/null @@ -1,45 +0,0 @@ -import type { WorkerStatus } from '@n8n/api-types'; -import { jsonParse } from 'n8n-workflow'; -import Container from 'typedi'; - -import { Logger } from '@/logger'; -import { WORKER_RESPONSE_PUBSUB_CHANNEL } from '@/scaling/constants'; - -import type { MainResponseReceivedHandlerOptions } from './types'; -import { Push } from '../../../push'; -import type { RedisServiceWorkerResponseObject } from '../../../scaling/redis/redis-service-commands'; - -export async function handleWorkerResponseMessageMain( - messageString: string, - options: MainResponseReceivedHandlerOptions, -) { - const workerResponse = jsonParse(messageString, { - fallbackValue: null, - }); - - if (!workerResponse) { - Container.get(Logger).debug( - `Received invalid message via channel ${WORKER_RESPONSE_PUBSUB_CHANNEL}: "${messageString}"`, - ); - return; - } - - if (workerResponse.targets && !workerResponse.targets.includes(options.queueModeId)) return; - - switch (workerResponse.command) { - case 'getStatus': - Container.get(Push).broadcast('sendWorkerStatusMessage', { - workerId: workerResponse.workerId, - status: workerResponse.payload as WorkerStatus, - }); - break; - case 'getId': - break; - default: - Container.get(Logger).debug( - `Received worker response ${workerResponse.command} from ${workerResponse.workerId}`, - ); - } - - return workerResponse; -} diff --git a/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts b/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts index 98dbce7fde..bb1b52519c 100644 --- a/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts +++ b/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts @@ -4,7 +4,7 @@ import { Service } from 'typedi'; import config from '@/config'; import { TIME } from '@/constants'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { Publisher } from '@/scaling/pubsub/publisher.service'; import { RedisClientService } from '@/services/redis-client.service'; import { TypedEmitter } from '@/typed-emitter'; diff --git a/packages/cli/src/services/orchestration/main/orchestration.handler.main.service.ts b/packages/cli/src/services/orchestration/main/orchestration.handler.main.service.ts deleted file mode 100644 index 7f4effdd4a..0000000000 --- a/packages/cli/src/services/orchestration/main/orchestration.handler.main.service.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Service } from 'typedi'; - -import { Subscriber } from '@/scaling/pubsub/subscriber.service'; - -import { handleCommandMessageMain } from './handle-command-message-main'; -import { handleWorkerResponseMessageMain } from './handle-worker-response-message-main'; -import type { MainResponseReceivedHandlerOptions } from './types'; -import { OrchestrationHandlerService } from '../../orchestration.handler.base.service'; - -@Service() -export class OrchestrationHandlerMainService extends OrchestrationHandlerService { - constructor(private readonly subscriber: Subscriber) { - super(); - } - - async initSubscriber(options: MainResponseReceivedHandlerOptions) { - await this.subscriber.subscribe('n8n.commands'); - await this.subscriber.subscribe('n8n.worker-response'); - - this.subscriber.setMessageHandler('n8n.worker-response', async (message: string) => { - await handleWorkerResponseMessageMain(message, options); - }); - - this.subscriber.setMessageHandler('n8n.commands', handleCommandMessageMain); - } -} diff --git a/packages/cli/src/services/orchestration/webhook/handle-command-message-webhook.ts b/packages/cli/src/services/orchestration/webhook/handle-command-message-webhook.ts deleted file mode 100644 index 542b8f1f52..0000000000 --- a/packages/cli/src/services/orchestration/webhook/handle-command-message-webhook.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { InstanceSettings } from 'n8n-core'; -import Container from 'typedi'; -import { Logger } from 'winston'; - -import config from '@/config'; -import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; -import { License } from '@/license'; -import { CommunityPackagesService } from '@/services/community-packages.service'; - -import { messageToRedisServiceCommandObject, debounceMessageReceiver } from '../helpers'; - -export async function handleCommandMessageWebhook(messageString: string) { - const queueModeId = config.getEnv('redis.queueModeId'); - const isMainInstance = Container.get(InstanceSettings).instanceType === 'main'; - const message = messageToRedisServiceCommandObject(messageString); - const logger = Container.get(Logger); - - if (message) { - logger.debug( - `RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`, - ); - - if ( - message.senderId === queueModeId || - (message.targets && !message.targets.includes(queueModeId)) - ) { - // Skipping command message because it's not for this instance - logger.debug( - `Skipping command message ${message.command} because it's not for this instance.`, - ); - return message; - } - - switch (message.command) { - case 'reloadLicense': - if (!debounceMessageReceiver(message, 500)) { - message.payload = { - result: 'debounced', - }; - return message; - } - - if (isMainInstance && !config.getEnv('multiMainSetup.enabled')) { - // at this point in time, only a single main instance is supported, thus this command _should_ never be caught currently - logger.error( - 'Received command to reload license via Redis, but this should not have happened and is not supported on the main instance yet.', - ); - return message; - } - await Container.get(License).reload(); - break; - case 'restartEventBus': - if (!debounceMessageReceiver(message, 200)) { - message.payload = { - result: 'debounced', - }; - return message; - } - await Container.get(MessageEventBus).restart(); - case 'reloadExternalSecretsProviders': - if (!debounceMessageReceiver(message, 200)) { - message.payload = { - result: 'debounced', - }; - return message; - } - await Container.get(ExternalSecretsManager).reloadAllProviders(); - break; - case 'community-package-install': - case 'community-package-update': - case 'community-package-uninstall': - if (!debounceMessageReceiver(message, 200)) { - return message; - } - const { packageName, packageVersion } = message.payload; - const communityPackagesService = Container.get(CommunityPackagesService); - if (message.command === 'community-package-uninstall') { - await communityPackagesService.removeNpmPackage(packageName); - } else { - await communityPackagesService.installOrUpdateNpmPackage(packageName, packageVersion); - } - break; - - default: - break; - } - - return message; - } - - return; -} diff --git a/packages/cli/src/services/orchestration/webhook/orchestration.handler.webhook.service.ts b/packages/cli/src/services/orchestration/webhook/orchestration.handler.webhook.service.ts deleted file mode 100644 index de7bded68e..0000000000 --- a/packages/cli/src/services/orchestration/webhook/orchestration.handler.webhook.service.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Service } from 'typedi'; - -import { Subscriber } from '@/scaling/pubsub/subscriber.service'; - -import { handleCommandMessageWebhook } from './handle-command-message-webhook'; -import { OrchestrationHandlerService } from '../../orchestration.handler.base.service'; - -@Service() -export class OrchestrationHandlerWebhookService extends OrchestrationHandlerService { - constructor(private readonly subscriber: Subscriber) { - super(); - } - - async initSubscriber() { - await this.subscriber.subscribe('n8n.commands'); - - this.subscriber.setMessageHandler('n8n.commands', handleCommandMessageWebhook); - } -} diff --git a/packages/cli/src/services/orchestration/worker/handle-command-message-worker.ts b/packages/cli/src/services/orchestration/worker/handle-command-message-worker.ts deleted file mode 100644 index 45b6bd57c9..0000000000 --- a/packages/cli/src/services/orchestration/worker/handle-command-message-worker.ts +++ /dev/null @@ -1,155 +0,0 @@ -import { jsonParse } from 'n8n-workflow'; -import os from 'node:os'; -import Container from 'typedi'; - -import { N8N_VERSION } from '@/constants'; -import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; -import { License } from '@/license'; -import { Logger } from '@/logger'; -import { COMMAND_PUBSUB_CHANNEL } from '@/scaling/constants'; -import type { RedisServiceCommandObject } from '@/scaling/redis/redis-service-commands'; -import { CommunityPackagesService } from '@/services/community-packages.service'; - -import type { WorkerCommandReceivedHandlerOptions } from './types'; -import { debounceMessageReceiver, getOsCpuString } from '../helpers'; - -// eslint-disable-next-line complexity -export async function getWorkerCommandReceivedHandler( - messageString: string, - options: WorkerCommandReceivedHandlerOptions, -) { - if (!messageString) return; - - const logger = Container.get(Logger); - let message: RedisServiceCommandObject; - try { - message = jsonParse(messageString); - } catch { - logger.debug( - `Received invalid message via channel ${COMMAND_PUBSUB_CHANNEL}: "${messageString}"`, - ); - return; - } - if (message) { - logger.debug( - `RedisCommandHandler(worker): Received command message ${message.command} from ${message.senderId}`, - ); - if (message.targets && !message.targets.includes(options.queueModeId)) { - return; // early return if the message is not for this worker - } - switch (message.command) { - case 'getStatus': - if (!debounceMessageReceiver(message, 500)) return; - await options.publisher.publishWorkerResponse({ - workerId: options.queueModeId, - command: 'getStatus', - payload: { - workerId: options.queueModeId, - runningJobsSummary: options.getRunningJobsSummary(), - freeMem: os.freemem(), - totalMem: os.totalmem(), - uptime: process.uptime(), - loadAvg: os.loadavg(), - cpus: getOsCpuString(), - arch: os.arch(), - platform: os.platform(), - hostname: os.hostname(), - interfaces: Object.values(os.networkInterfaces()).flatMap((interfaces) => - (interfaces ?? [])?.map((net) => ({ - family: net.family, - address: net.address, - internal: net.internal, - })), - ), - version: N8N_VERSION, - }, - }); - break; - case 'getId': - if (!debounceMessageReceiver(message, 500)) return; - await options.publisher.publishWorkerResponse({ - workerId: options.queueModeId, - command: 'getId', - }); - break; - case 'restartEventBus': - if (!debounceMessageReceiver(message, 500)) return; - try { - await Container.get(MessageEventBus).restart(); - await options.publisher.publishWorkerResponse({ - workerId: options.queueModeId, - command: 'restartEventBus', - payload: { - result: 'success', - }, - }); - } catch (error) { - await options.publisher.publishWorkerResponse({ - workerId: options.queueModeId, - command: 'restartEventBus', - payload: { - result: 'error', - error: (error as Error).message, - }, - }); - } - break; - case 'reloadExternalSecretsProviders': - if (!debounceMessageReceiver(message, 500)) return; - try { - await Container.get(ExternalSecretsManager).reloadAllProviders(); - await options.publisher.publishWorkerResponse({ - workerId: options.queueModeId, - command: 'reloadExternalSecretsProviders', - payload: { - result: 'success', - }, - }); - } catch (error) { - await options.publisher.publishWorkerResponse({ - workerId: options.queueModeId, - command: 'reloadExternalSecretsProviders', - payload: { - result: 'error', - error: (error as Error).message, - }, - }); - } - break; - case 'community-package-install': - case 'community-package-update': - case 'community-package-uninstall': - if (!debounceMessageReceiver(message, 500)) return; - const { packageName, packageVersion } = message.payload; - const communityPackagesService = Container.get(CommunityPackagesService); - if (message.command === 'community-package-uninstall') { - await communityPackagesService.removeNpmPackage(packageName); - } else { - await communityPackagesService.installOrUpdateNpmPackage(packageName, packageVersion); - } - break; - case 'reloadLicense': - if (!debounceMessageReceiver(message, 500)) return; - await Container.get(License).reload(); - break; - case 'stopWorker': - if (!debounceMessageReceiver(message, 500)) return; - // TODO: implement proper shutdown - // await this.stopProcess(); - break; - default: - if ( - message.command === 'relay-execution-lifecycle-event' || - message.command === 'clear-test-webhooks' - ) { - break; // meant only for main - } - - logger.debug( - `Received unknown command via channel ${COMMAND_PUBSUB_CHANNEL}: "${message.command}"`, - ); - break; - } - } -} diff --git a/packages/cli/src/services/orchestration/worker/orchestration.handler.worker.service.ts b/packages/cli/src/services/orchestration/worker/orchestration.handler.worker.service.ts deleted file mode 100644 index 06113d7344..0000000000 --- a/packages/cli/src/services/orchestration/worker/orchestration.handler.worker.service.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { Service } from 'typedi'; - -import { Subscriber } from '@/scaling/pubsub/subscriber.service'; - -import { getWorkerCommandReceivedHandler } from './handle-command-message-worker'; -import type { WorkerCommandReceivedHandlerOptions } from './types'; -import { OrchestrationHandlerService } from '../../orchestration.handler.base.service'; - -@Service() -export class OrchestrationHandlerWorkerService extends OrchestrationHandlerService { - constructor(private readonly subscriber: Subscriber) { - super(); - } - - async initSubscriber(options: WorkerCommandReceivedHandlerOptions) { - await this.subscriber.subscribe('n8n.commands'); - - this.subscriber.setMessageHandler('n8n.commands', async (message: string) => { - await getWorkerCommandReceivedHandler(message, options); - }); - } -} diff --git a/packages/cli/src/services/pruning.service.ts b/packages/cli/src/services/pruning.service.ts index b0ebc99dfd..48d4b0db3b 100644 --- a/packages/cli/src/services/pruning.service.ts +++ b/packages/cli/src/services/pruning.service.ts @@ -6,7 +6,7 @@ import config from '@/config'; import { inTest, TIME } from '@/constants'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { OnShutdown } from '@/decorators/on-shutdown'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from './orchestration.service'; diff --git a/packages/cli/src/services/public-api-key.service.ts b/packages/cli/src/services/public-api-key.service.ts new file mode 100644 index 0000000000..e689f3c019 --- /dev/null +++ b/packages/cli/src/services/public-api-key.service.ts @@ -0,0 +1,80 @@ +import { randomBytes } from 'node:crypto'; +import Container, { Service } from 'typedi'; + +import { ApiKey } from '@/databases/entities/api-key'; +import type { User } from '@/databases/entities/user'; +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; +import { UserRepository } from '@/databases/repositories/user.repository'; + +export const API_KEY_PREFIX = 'n8n_api_'; + +@Service() +export class PublicApiKeyService { + constructor(private readonly apiKeyRepository: ApiKeyRepository) {} + + /** + * Creates a new public API key for the specified user. + * @param user - The user for whom the API key is being created. + * @returns A promise that resolves to the newly created API key. + */ + async createPublicApiKeyForUser(user: User) { + const apiKey = this.createApiKeyString(); + await this.apiKeyRepository.upsert( + this.apiKeyRepository.create({ + userId: user.id, + apiKey, + label: 'My API Key', + }), + ['apiKey'], + ); + + return await this.apiKeyRepository.findOneByOrFail({ apiKey }); + } + + /** + * Retrieves and redacts API keys for a given user. + * @param user - The user for whom to retrieve and redact API keys. + * @returns A promise that resolves to an array of objects containing redacted API keys. + */ + async getRedactedApiKeysForUser(user: User) { + const apiKeys = await this.apiKeyRepository.findBy({ userId: user.id }); + return apiKeys.map((apiKeyRecord) => ({ + ...apiKeyRecord, + apiKey: this.redactApiKey(apiKeyRecord.apiKey), + })); + } + + async deleteApiKeyForUser(user: User, apiKeyId: string) { + await this.apiKeyRepository.delete({ userId: user.id, id: apiKeyId }); + } + + async getUserForApiKey(apiKey: string) { + return await Container.get(UserRepository) + .createQueryBuilder('user') + .innerJoin(ApiKey, 'apiKey', 'apiKey.userId = user.id') + .where('apiKey.apiKey = :apiKey', { apiKey }) + .select('user') + .getOne(); + } + + /** + * Redacts an API key by keeping the first few characters and replacing the rest with asterisks. + * @param apiKey - The API key to be redacted. If null, the function returns undefined. + * @returns The redacted API key with a fixed prefix and asterisks replacing the rest of the characters. + * @example + * ```typescript + * const redactedKey = PublicApiKeyService.redactApiKey('12345-abcdef-67890'); + * console.log(redactedKey); // Output: '12345-*****' + * ``` + */ + redactApiKey(apiKey: string) { + const keepLength = 5; + return ( + API_KEY_PREFIX + + apiKey.slice(API_KEY_PREFIX.length, API_KEY_PREFIX.length + keepLength) + + '*'.repeat(apiKey.length - API_KEY_PREFIX.length - keepLength) + ); + } + + createApiKeyString = () => `${API_KEY_PREFIX}${randomBytes(40).toString('hex')}`; +} diff --git a/packages/cli/src/services/redis-client.service.ts b/packages/cli/src/services/redis-client.service.ts index dc0d3b8cde..5eaa6edc1d 100644 --- a/packages/cli/src/services/redis-client.service.ts +++ b/packages/cli/src/services/redis-client.service.ts @@ -3,18 +3,46 @@ import ioRedis from 'ioredis'; import type { Cluster, RedisOptions } from 'ioredis'; import { Service } from 'typedi'; -import { Logger } from '@/logger'; +import { Debounce } from '@/decorators/debounce'; +import { Logger } from '@/logging/logger.service'; +import { TypedEmitter } from '@/typed-emitter'; import type { RedisClientType } from '../scaling/redis/redis.types'; +type RedisEventMap = { + 'connection-lost': number; + 'connection-recovered': never; +}; + @Service() -export class RedisClientService { +export class RedisClientService extends TypedEmitter { private readonly clients = new Set(); + private readonly config = { + /** How long (in ms) to try to reconnect for before exiting. */ + maxTimeout: this.globalConfig.queue.bull.redis.timeoutThreshold, + + /** How long (in ms) to wait between reconnection attempts. */ + retryInterval: 1000, + + /** How long (in ms) to wait before resetting the cumulative timeout. */ + resetLength: 30_000, + }; + + /** Whether any client has lost connection to Redis. */ + private lostConnection = false; + constructor( private readonly logger: Logger, private readonly globalConfig: GlobalConfig, - ) {} + ) { + super(); + this.registerListeners(); + } + + isConnected() { + return !this.lostConnection; + } createClient(arg: { type: RedisClientType; extraOptions?: RedisOptions }) { const client = @@ -22,6 +50,19 @@ export class RedisClientService { ? this.createClusterClient(arg) : this.createRegularClient(arg); + client.on('error', (error) => { + if ('code' in error && error.code === 'ECONNREFUSED') return; // handled by retryStrategy + + this.logger.error(`[Redis client] ${error.message}`, { error }); + }); + + client.on('ready', () => { + if (this.lostConnection) { + this.emit('connection-recovered'); + this.lostConnection = false; + } + }); + this.clients.add(client); return client; @@ -118,32 +159,29 @@ export class RedisClientService { * Reset the cumulative timeout if >30s between reconnection attempts. */ private retryStrategy() { - const RETRY_INTERVAL = 500; // ms - const RESET_LENGTH = 30_000; // ms - const MAX_TIMEOUT = this.globalConfig.queue.bull.redis.timeoutThreshold; - let lastAttemptTs = 0; let cumulativeTimeout = 0; return () => { const nowTs = Date.now(); - if (nowTs - lastAttemptTs > RESET_LENGTH) { + if (nowTs - lastAttemptTs > this.config.resetLength) { cumulativeTimeout = 0; lastAttemptTs = nowTs; } else { cumulativeTimeout += nowTs - lastAttemptTs; lastAttemptTs = nowTs; - if (cumulativeTimeout > MAX_TIMEOUT) { - this.logger.error(`[Redis] Unable to connect after max timeout of ${MAX_TIMEOUT} ms`); - this.logger.error('Exiting process...'); + if (cumulativeTimeout > this.config.maxTimeout) { + const maxTimeout = Math.round(this.config.maxTimeout / 1000) + 's'; + this.logger.error(`Unable to connect to Redis after trying to connect for ${maxTimeout}`); + this.logger.error('Exiting process due to Redis connection error'); process.exit(1); } } - this.logger.warn('Redis unavailable - trying to reconnect...'); + this.emit('connection-lost', cumulativeTimeout); - return RETRY_INTERVAL; + return this.config.retryInterval; }; } @@ -156,4 +194,40 @@ export class RedisClientService { return { host, port: parseInt(port) }; }); } + + @Debounce(1000) + emit( + event: Event, + ...args: Array + ): boolean { + return super.emit(event, ...args); + } + + private registerListeners() { + const { maxTimeout: maxTimeoutMs, retryInterval: retryIntervalMs } = this.config; + + const retryInterval = this.formatTimeout(retryIntervalMs); + const maxTimeout = this.formatTimeout(maxTimeoutMs); + + this.on('connection-lost', (cumulativeTimeoutMs) => { + const cumulativeTimeout = this.formatTimeout(cumulativeTimeoutMs); + const reconnectionMsg = `Trying to reconnect in ${retryInterval}...`; + const timeoutDetails = `${cumulativeTimeout}/${maxTimeout}`; + + this.logger.warn(`Lost Redis connection. ${reconnectionMsg} (${timeoutDetails})`); + + this.lostConnection = true; + }); + + this.on('connection-recovered', () => { + this.logger.info('Recovered Redis connection'); + }); + } + + private formatTimeout(timeoutMs: number) { + const timeoutSeconds = timeoutMs / 1000; + const roundedTimeout = Math.round(timeoutSeconds * 10) / 10; + + return roundedTimeout + 's'; + } } diff --git a/packages/cli/src/services/user.service.ts b/packages/cli/src/services/user.service.ts index 259a30666c..1668878a8c 100644 --- a/packages/cli/src/services/user.service.ts +++ b/packages/cli/src/services/user.service.ts @@ -7,7 +7,7 @@ import { UserRepository } from '@/databases/repositories/user.repository'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { EventService } from '@/events/event.service'; import type { Invitation, PublicUser } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { PostHogClient } from '@/posthog'; import type { UserRequest } from '@/requests'; import { UrlService } from '@/services/url.service'; @@ -58,7 +58,7 @@ export class UserService { withScopes?: boolean; }, ) { - const { password, updatedAt, apiKey, authIdentities, ...rest } = user; + const { password, updatedAt, authIdentities, ...rest } = user; const ldapIdentity = authIdentities?.find((i) => i.providerType === 'ldap'); diff --git a/packages/cli/src/services/workflow-import.service.ts b/packages/cli/src/services/workflow-import.service.ts index c509dfbc12..86ba742ef6 100644 --- a/packages/cli/src/services/workflow-import.service.ts +++ b/packages/cli/src/services/workflow-import.service.ts @@ -11,7 +11,7 @@ import { CredentialsRepository } from '@/databases/repositories/credentials.repo import { TagRepository } from '@/databases/repositories/tag.repository'; import * as Db from '@/db'; import type { ICredentialsDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { replaceInvalidCredentials } from '@/workflow-helpers'; @Service() @@ -88,8 +88,7 @@ export class WorkflowImportService { try { await replaceInvalidCredentials(workflow); } catch (e) { - const error = e instanceof Error ? e : new Error(`${e}`); - this.logger.error('Failed to replace invalid credential', error); + this.logger.error('Failed to replace invalid credential', { error: e }); } } diff --git a/packages/cli/src/services/workflow-statistics.service.ts b/packages/cli/src/services/workflow-statistics.service.ts index 42c6d14638..53cbac5094 100644 --- a/packages/cli/src/services/workflow-statistics.service.ts +++ b/packages/cli/src/services/workflow-statistics.service.ts @@ -4,7 +4,7 @@ import { Service } from 'typedi'; import { StatisticsNames } from '@/databases/entities/workflow-statistics'; import { WorkflowStatisticsRepository } from '@/databases/repositories/workflow-statistics.repository'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UserService } from '@/services/user.service'; import { TypedEmitter } from '@/typed-emitter'; @@ -110,7 +110,7 @@ export class WorkflowStatisticsService extends TypedEmitter Promise | void; export type ServiceClass = Class>; diff --git a/packages/cli/src/sse-channel.d.ts b/packages/cli/src/sse-channel.d.ts deleted file mode 100644 index 6ea435b361..0000000000 --- a/packages/cli/src/sse-channel.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { PushRequest, PushResponse } from './push/types'; - -declare module 'sse-channel' { - declare class Channel { - constructor(); - - on(event: string, handler: (channel: string, res: PushResponse) => void): void; - - removeClient: (res: PushResponse) => void; - - addClient: (req: PushRequest, res: PushResponse) => void; - - send: (msg: string, clients?: PushResponse[]) => void; - } - - export = Channel; -} diff --git a/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts b/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts index 418f29c688..5dda04dc18 100644 --- a/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts +++ b/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts @@ -2,7 +2,7 @@ import type express from 'express'; import { mock } from 'jest-mock-extended'; import type { IdentityProviderInstance, ServiceProviderInstance } from 'samlify'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; import * as samlHelpers from '@/sso/saml/saml-helpers'; import { SamlService } from '@/sso/saml/saml.service.ee'; diff --git a/packages/cli/src/sso/saml/saml-validator.ts b/packages/cli/src/sso/saml/saml-validator.ts index 6705320bee..06a93cc4fb 100644 --- a/packages/cli/src/sso/saml/saml-validator.ts +++ b/packages/cli/src/sso/saml/saml-validator.ts @@ -1,7 +1,7 @@ import { Container } from 'typedi'; import type { XMLFileInfo } from 'xmllint-wasm'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; let xml: XMLFileInfo; let xmldsigCore: XMLFileInfo; diff --git a/packages/cli/src/sso/saml/saml.service.ee.ts b/packages/cli/src/sso/saml/saml.service.ee.ts index f30fa6ff8e..6b07919730 100644 --- a/packages/cli/src/sso/saml/saml.service.ee.ts +++ b/packages/cli/src/sso/saml/saml.service.ee.ts @@ -12,7 +12,7 @@ import { SettingsRepository } from '@/databases/repositories/settings.repository import { UserRepository } from '@/databases/repositories/user.repository'; import { AuthError } from '@/errors/response-errors/auth.error'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; import { SAML_PREFERENCES_DB_KEY } from './constants'; diff --git a/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts b/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts index f630392280..cf9b122e72 100644 --- a/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts +++ b/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts @@ -5,7 +5,7 @@ import { Service } from 'typedi'; import type { Project } from '@/databases/entities/project'; import { SubworkflowPolicyDenialError } from '@/errors/subworkflow-policy-denial.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { AccessService } from '@/services/access.service'; import { OwnershipService } from '@/services/ownership.service'; import { UrlService } from '@/services/url.service'; diff --git a/packages/cli/src/telemetry/__tests__/telemetry.test.ts b/packages/cli/src/telemetry/__tests__/telemetry.test.ts index 3851e4523a..04a6cecfca 100644 --- a/packages/cli/src/telemetry/__tests__/telemetry.test.ts +++ b/packages/cli/src/telemetry/__tests__/telemetry.test.ts @@ -1,3 +1,4 @@ +import type { GlobalConfig } from '@n8n/config'; import type RudderStack from '@rudderstack/rudder-sdk-node'; import { mock } from 'jest-mock-extended'; import { InstanceSettings } from 'n8n-core'; @@ -41,10 +42,17 @@ describe('Telemetry', () => { beforeEach(async () => { spyTrack.mockClear(); - const postHog = new PostHogClient(instanceSettings); + const postHog = new PostHogClient(instanceSettings, mock()); await postHog.init(); - telemetry = new Telemetry(mock(), postHog, mock(), instanceSettings, mock()); + telemetry = new Telemetry( + mock(), + postHog, + mock(), + instanceSettings, + mock(), + mock({ logging: { level: 'info', outputs: ['console'] } }), + ); // @ts-expect-error Assigning to private property telemetry.rudderStack = mockRudderStack; }); @@ -259,6 +267,44 @@ describe('Telemetry', () => { expect(execBuffer['2'].prod_success?.first).toEqual(execTime1); }); }); + + describe('Rudderstack', () => { + test("should call rudderStack.identify() with a fake IP address to instruct Rudderstack to not use the user's IP address", () => { + const traits = { + name: 'Test User', + age: 30, + isActive: true, + }; + + telemetry.identify(traits); + + const expectedArgs = { + userId: instanceId, + traits: { ...traits, instanceId }, + context: { + ip: '0.0.0.0', // RudderStack anonymized IP + }, + }; + + expect(mockRudderStack.identify).toHaveBeenCalledWith(expectedArgs); + }); + + test("should call rudderStack.track() with a fake IP address to instruct Rudderstack to not use the user's IP address", () => { + const eventName = 'Test Event'; + const properties = { user_id: '1234' }; + + telemetry.track(eventName, properties); + + expect(mockRudderStack.track).toHaveBeenCalledWith( + expect.objectContaining({ + event: eventName, + context: { + ip: '0.0.0.0', // RudderStack anonymized IP + }, + }), + ); + }); + }); }); const fakeJestSystemTime = (dateTime: string | Date): Date => { diff --git a/packages/cli/src/telemetry/index.ts b/packages/cli/src/telemetry/index.ts index 8ffca478c1..d9a8e590f4 100644 --- a/packages/cli/src/telemetry/index.ts +++ b/packages/cli/src/telemetry/index.ts @@ -1,3 +1,4 @@ +import { GlobalConfig } from '@n8n/config'; import type RudderStack from '@rudderstack/rudder-sdk-node'; import axios from 'axios'; import { InstanceSettings } from 'n8n-core'; @@ -13,7 +14,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { OnShutdown } from '@/decorators/on-shutdown'; import type { IExecutionTrackProperties } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PostHogClient } from '@/posthog'; import { SourceControlPreferencesService } from '../environments/source-control/source-control-preferences.service.ee'; @@ -49,6 +50,7 @@ export class Telemetry { private readonly license: License, private readonly instanceSettings: InstanceSettings, private readonly workflowRepository: WorkflowRepository, + private readonly globalConfig: GlobalConfig, ) {} async init() { @@ -62,7 +64,7 @@ export class Telemetry { return; } - const logLevel = config.getEnv('logs.level'); + const logLevel = this.globalConfig.logging.level; const { default: RudderStack } = await import('@rudderstack/rudder-sdk-node'); const axiosInstance = axios.create(); @@ -186,6 +188,10 @@ export class Telemetry { this.rudderStack.identify({ userId: instanceId, traits: { ...traits, instanceId }, + context: { + // provide a fake IP address to instruct RudderStack to not use the user's IP address + ip: '0.0.0.0', + }, }); } @@ -210,13 +216,18 @@ export class Telemetry { userId: `${instanceId}${user_id ? `#${user_id}` : ''}`, event: eventName, properties: updatedProperties, + context: {}, }; if (withPostHog) { this.postHog?.track(payload); } - return this.rudderStack.track(payload); + return this.rudderStack.track({ + ...payload, + // provide a fake IP address to instruct RudderStack to not use the user's IP address + context: { ...payload.context, ip: '0.0.0.0' }, + }); } // test helpers diff --git a/packages/cli/src/user-management/email/node-mailer.ts b/packages/cli/src/user-management/email/node-mailer.ts index cfc7247546..661c3fed7f 100644 --- a/packages/cli/src/user-management/email/node-mailer.ts +++ b/packages/cli/src/user-management/email/node-mailer.ts @@ -7,7 +7,7 @@ import { createTransport } from 'nodemailer'; import type SMTPConnection from 'nodemailer/lib/smtp-connection'; import { Service } from 'typedi'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { MailData, SendEmailResult } from './interfaces'; diff --git a/packages/cli/src/user-management/email/user-management-mailer.ts b/packages/cli/src/user-management/email/user-management-mailer.ts index 6703354f07..b5df958d7d 100644 --- a/packages/cli/src/user-management/email/user-management-mailer.ts +++ b/packages/cli/src/user-management/email/user-management-mailer.ts @@ -11,7 +11,7 @@ import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { UserRepository } from '@/databases/repositories/user.repository'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; import { toError } from '@/utils'; diff --git a/packages/cli/src/utils/__tests__/path-util.test.ts b/packages/cli/src/utils/__tests__/path-util.test.ts new file mode 100644 index 0000000000..d67e97f9e0 --- /dev/null +++ b/packages/cli/src/utils/__tests__/path-util.test.ts @@ -0,0 +1,24 @@ +import { isContainedWithin } from '../path-util'; + +describe('isContainedWithin', () => { + it('should return true when parent and child paths are the same', () => { + expect(isContainedWithin('/some/parent/folder', '/some/parent/folder')).toBe(true); + }); + + test.each([ + ['/some/parent/folder', '/some/parent/folder/subfolder/file.txt'], + ['/some/parent/folder', '/some/parent/folder/../folder/subfolder/file.txt'], + ['/some/parent/folder/', '/some/parent/folder/subfolder/file.txt'], + ['/some/parent/folder', '/some/parent/folder/subfolder/'], + ])('should return true for parent %s and child %s', (parent, child) => { + expect(isContainedWithin(parent, child)).toBe(true); + }); + + test.each([ + ['/some/parent/folder', '/some/other/folder/file.txt'], + ['/some/parent/folder', '/some/parent/folder_but_not_really'], + ['/one/path', '/another/path'], + ])('should return false for parent %s and child %s', (parent, child) => { + expect(isContainedWithin(parent, child)).toBe(false); + }); +}); diff --git a/packages/cli/src/utils/path-util.ts b/packages/cli/src/utils/path-util.ts new file mode 100644 index 0000000000..f42dc01890 --- /dev/null +++ b/packages/cli/src/utils/path-util.ts @@ -0,0 +1,16 @@ +import * as path from 'node:path'; + +/** + * Checks if the given childPath is contained within the parentPath. Resolves + * the paths before comparing them, so that relative paths are also supported. + */ +export function isContainedWithin(parentPath: string, childPath: string): boolean { + parentPath = path.resolve(parentPath); + childPath = path.resolve(childPath); + + if (parentPath === childPath) { + return true; + } + + return childPath.startsWith(parentPath + path.sep); +} diff --git a/packages/cli/src/utlity.types.ts b/packages/cli/src/utlity.types.ts new file mode 100644 index 0000000000..c126a31717 --- /dev/null +++ b/packages/cli/src/utlity.types.ts @@ -0,0 +1,6 @@ +/** + * Display an intersection type without implementation details. + * @doc https://effectivetypescript.com/2022/02/25/gentips-4-display/ + */ +// eslint-disable-next-line @typescript-eslint/ban-types +export type Resolve = T extends Function ? T : { [K in keyof T]: T[K] }; diff --git a/packages/cli/src/wait-tracker.ts b/packages/cli/src/wait-tracker.ts index 7792cf5285..e999c30401 100644 --- a/packages/cli/src/wait-tracker.ts +++ b/packages/cli/src/wait-tracker.ts @@ -6,7 +6,7 @@ import { import { Service } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { OwnershipService } from '@/services/ownership.service'; import { WorkflowRunner } from '@/workflow-runner'; @@ -28,7 +28,9 @@ export class WaitTracker { private readonly ownershipService: OwnershipService, private readonly workflowRunner: WorkflowRunner, private readonly orchestrationService: OrchestrationService, - ) {} + ) { + this.logger = this.logger.withScope('executions'); + } has(executionId: string) { return this.waitingExecutions[executionId] !== undefined; @@ -50,7 +52,7 @@ export class WaitTracker { } private startTracking() { - this.logger.debug('Wait tracker started tracking waiting executions'); + this.logger.debug('Started tracking waiting executions'); // Poll every 60 seconds a list of upcoming executions this.mainTimer = setInterval(() => { @@ -61,7 +63,7 @@ export class WaitTracker { } async getWaitingExecutions() { - this.logger.debug('Wait tracker querying database for waiting executions'); + this.logger.debug('Querying database for waiting executions'); const executions = await this.executionRepository.getWaitingExecutions(); @@ -71,7 +73,7 @@ export class WaitTracker { const executionIds = executions.map((execution) => execution.id).join(', '); this.logger.debug( - `Wait tracker found ${executions.length} executions. Setting timer for IDs: ${executionIds}`, + `Found ${executions.length} executions. Setting timer for IDs: ${executionIds}`, ); // Add timers for each waiting execution that they get started at the correct time @@ -99,7 +101,7 @@ export class WaitTracker { } startExecution(executionId: string) { - this.logger.debug(`Wait tracker resuming execution ${executionId}`, { executionId }); + this.logger.debug(`Resuming execution ${executionId}`, { executionId }); delete this.waitingExecutions[executionId]; (async () => { @@ -141,7 +143,7 @@ export class WaitTracker { } stopTracking() { - this.logger.debug('Wait tracker shutting down'); + this.logger.debug('Shutting down wait tracking'); clearInterval(this.mainTimer); Object.keys(this.waitingExecutions).forEach((executionId) => { diff --git a/packages/cli/src/webhooks/__tests__/waiting-webhooks.test.ts b/packages/cli/src/webhooks/__tests__/waiting-webhooks.test.ts index 7a8dd8b854..892d87e773 100644 --- a/packages/cli/src/webhooks/__tests__/waiting-webhooks.test.ts +++ b/packages/cli/src/webhooks/__tests__/waiting-webhooks.test.ts @@ -63,7 +63,7 @@ describe('WaitingWebhooks', () => { * Arrange */ executionRepository.findSingleExecution.mockResolvedValue( - mock({ finished: true }), + mock({ finished: true, workflowData: { nodes: [] } }), ); /** diff --git a/packages/cli/src/webhooks/live-webhooks.ts b/packages/cli/src/webhooks/live-webhooks.ts index e9314060d7..458701caee 100644 --- a/packages/cli/src/webhooks/live-webhooks.ts +++ b/packages/cli/src/webhooks/live-webhooks.ts @@ -1,12 +1,12 @@ import type { Response } from 'express'; -import { Workflow, NodeHelpers } from 'n8n-workflow'; +import { Workflow, NodeHelpers, CHAT_TRIGGER_NODE_TYPE } from 'n8n-workflow'; import type { INode, IWebhookData, IHttpRequestMethods } from 'n8n-workflow'; import { Service } from 'typedi'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { WebhookNotFoundError } from '@/errors/response-errors/webhook-not-found.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WebhookHelpers from '@/webhooks/webhook-helpers'; import { WebhookService } from '@/webhooks/webhook.service'; @@ -47,12 +47,18 @@ export class LiveWebhooks implements IWebhookManager { select: ['nodes'], }); + const isChatWebhookNode = (type: string, webhookId?: string) => + type === CHAT_TRIGGER_NODE_TYPE && `${webhookId}/chat` === path; + const nodes = workflowData?.nodes; const webhookNode = nodes?.find( - ({ type, parameters, typeVersion }) => - parameters?.path === path && - (parameters?.httpMethod ?? 'GET') === httpMethod && - 'webhook' in this.nodeTypes.getByNameAndVersion(type, typeVersion), + ({ type, parameters, typeVersion, webhookId }) => + (parameters?.path === path && + (parameters?.httpMethod ?? 'GET') === httpMethod && + 'webhook' in this.nodeTypes.getByNameAndVersion(type, typeVersion)) || + // Chat Trigger has doesn't have configurable path and is always using POST, so + // we need to use webhookId for matching + isChatWebhookNode(type, webhookId), ); return webhookNode?.parameters?.options as WebhookAccessControlOptions; } @@ -154,8 +160,9 @@ export class LiveWebhooks implements IWebhookManager { } const webhook = await this.webhookService.findWebhook(httpMethod, path); + const webhookMethods = await this.getWebhookMethods(path); if (webhook === null) { - throw new WebhookNotFoundError({ path, httpMethod }, { hint: 'production' }); + throw new WebhookNotFoundError({ path, httpMethod, webhookMethods }, { hint: 'production' }); } return webhook; diff --git a/packages/cli/src/webhooks/waiting-webhooks.ts b/packages/cli/src/webhooks/waiting-webhooks.ts index 6493fde981..e644c065f3 100644 --- a/packages/cli/src/webhooks/waiting-webhooks.ts +++ b/packages/cli/src/webhooks/waiting-webhooks.ts @@ -1,12 +1,18 @@ import type express from 'express'; -import { NodeHelpers, Workflow } from 'n8n-workflow'; +import { + type INodes, + type IWorkflowBase, + NodeHelpers, + SEND_AND_WAIT_OPERATION, + Workflow, +} from 'n8n-workflow'; import { Service } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { ConflictError } from '@/errors/response-errors/conflict.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import type { IExecutionResponse, IWorkflowDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WebhookHelpers from '@/webhooks/webhook-helpers'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; @@ -42,6 +48,29 @@ export class WaitingWebhooks implements IWebhookManager { execution.data.executionData!.nodeExecutionStack[0].node.disabled = true; } + private isSendAndWaitRequest(nodes: INodes, suffix: string | undefined) { + return ( + suffix && + Object.keys(nodes).some( + (node) => + nodes[node].id === suffix && nodes[node].parameters.operation === SEND_AND_WAIT_OPERATION, + ) + ); + } + + private getWorkflow(workflowData: IWorkflowBase) { + return new Workflow({ + id: workflowData.id, + name: workflowData.name, + nodes: workflowData.nodes, + connections: workflowData.connections, + active: workflowData.active, + nodeTypes: this.nodeTypes, + staticData: workflowData.staticData, + settings: workflowData.settings, + }); + } + async executeWebhook( req: WaitingWebhookRequest, res: express.Response, @@ -66,10 +95,21 @@ export class WaitingWebhooks implements IWebhookManager { throw new ConflictError(`The execution "${executionId} is running already.`); } - if (execution.finished || execution.data.resultData.error) { + if (execution.data?.resultData?.error) { throw new ConflictError(`The execution "${executionId} has finished already.`); } + if (execution.finished) { + const { workflowData } = execution; + const { nodes } = this.getWorkflow(workflowData); + if (this.isSendAndWaitRequest(nodes, suffix)) { + res.render('send-and-wait-no-action-required', { isTestWebhook: false }); + return { noWebhookResponse: true }; + } else { + throw new ConflictError(`The execution "${executionId} has finished already.`); + } + } + const lastNodeExecuted = execution.data.resultData.lastNodeExecuted as string; // Set the node as disabled so that the data does not get executed again as it would result @@ -83,17 +123,7 @@ export class WaitingWebhooks implements IWebhookManager { execution.data.resultData.runData[lastNodeExecuted].pop(); const { workflowData } = execution; - - const workflow = new Workflow({ - id: workflowData.id, - name: workflowData.name, - nodes: workflowData.nodes, - connections: workflowData.connections, - active: workflowData.active, - nodeTypes: this.nodeTypes, - staticData: workflowData.staticData, - settings: workflowData.settings, - }); + const workflow = this.getWorkflow(workflowData); const workflowStartNode = workflow.getNode(lastNodeExecuted); if (workflowStartNode === null) { @@ -116,8 +146,13 @@ export class WaitingWebhooks implements IWebhookManager { if (webhookData === undefined) { // If no data got found it means that the execution can not be started via a webhook. // Return 404 because we do not want to give any data if the execution exists or not. - const errorMessage = `The workflow for execution "${executionId}" does not contain a waiting webhook with a matching path/method.`; - throw new NotFoundError(errorMessage); + if (this.isSendAndWaitRequest(workflow.nodes, suffix)) { + res.render('send-and-wait-no-action-required', { isTestWebhook: false }); + return { noWebhookResponse: true }; + } else { + const errorMessage = `The workflow for execution "${executionId}" does not contain a waiting webhook with a matching path/method.`; + throw new NotFoundError(errorMessage); + } } const runExecutionData = execution.data; diff --git a/packages/cli/src/webhooks/webhook-helpers.ts b/packages/cli/src/webhooks/webhook-helpers.ts index c2c3b59bff..5ff770acfb 100644 --- a/packages/cli/src/webhooks/webhook-helpers.ts +++ b/packages/cli/src/webhooks/webhook-helpers.ts @@ -45,7 +45,7 @@ import { InternalServerError } from '@/errors/response-errors/internal-server.er import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { UnprocessableRequestError } from '@/errors/response-errors/unprocessable.error'; import type { IExecutionDb, IWorkflowDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { parseBody } from '@/middlewares'; import { OwnershipService } from '@/services/ownership.service'; import { WorkflowStatisticsService } from '@/services/workflow-statistics.service'; diff --git a/packages/cli/src/workflow-execute-additional-data.ts b/packages/cli/src/workflow-execute-additional-data.ts index 4d3bd7a223..2deae842fc 100644 --- a/packages/cli/src/workflow-execute-additional-data.ts +++ b/packages/cli/src/workflow-execute-additional-data.ts @@ -6,6 +6,13 @@ import type { PushType } from '@n8n/api-types'; import { GlobalConfig } from '@n8n/config'; import { WorkflowExecute } from 'n8n-core'; +import { + ApplicationError, + ErrorReporterProxy as ErrorReporter, + NodeOperationError, + Workflow, + WorkflowHooks, +} from 'n8n-workflow'; import type { IDataObject, IExecuteData, @@ -24,15 +31,11 @@ import type { WorkflowExecuteMode, ExecutionStatus, ExecutionError, + IExecuteFunctions, + ITaskDataConnections, ExecuteWorkflowOptions, IWorkflowExecutionDataProcess, -} from 'n8n-workflow'; -import { - ApplicationError, - ErrorReporterProxy as ErrorReporter, - NodeOperationError, - Workflow, - WorkflowHooks, + EnvProviderState, } from 'n8n-workflow'; import { Container } from 'typedi'; @@ -40,8 +43,13 @@ import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; import { CredentialsHelper } from '@/credentials-helper'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; +import type { AiEventMap, AiEventPayload } from '@/events/maps/ai.event-map'; import { ExternalHooks } from '@/external-hooks'; -import type { IWorkflowExecuteProcess, IWorkflowErrorData, ExecutionPayload } from '@/interfaces'; +import type { + IWorkflowExecuteProcess, + IWorkflowErrorData, + UpdateExecutionPayload, +} from '@/interfaces'; import { NodeTypes } from '@/node-types'; import { Push } from '@/push'; import { WorkflowStatisticsService } from '@/services/workflow-statistics.service'; @@ -49,7 +57,6 @@ import { findSubworkflowStart, isWorkflowIdValid } from '@/utils'; import * as WorkflowHelpers from '@/workflow-helpers'; import { WorkflowRepository } from './databases/repositories/workflow.repository'; -import type { AiEventMap, AiEventPayload } from './events/ai-event-map'; import { EventService } from './events/event.service'; import { restoreBinaryDataId } from './execution-lifecycle-hooks/restore-binary-data-id'; import { saveExecutionProgress } from './execution-lifecycle-hooks/save-execution-progress'; @@ -59,7 +66,8 @@ import { updateExistingExecution, } from './execution-lifecycle-hooks/shared/shared-hook-functions'; import { toSaveSettings } from './execution-lifecycle-hooks/to-save-settings'; -import { Logger } from './logger'; +import { Logger } from './logging/logger.service'; +import { TaskManager } from './runners/task-managers/task-manager'; import { SecretsHelper } from './secrets-helpers'; import { OwnershipService } from './services/ownership.service'; import { UrlService } from './services/url.service'; @@ -759,7 +767,7 @@ export async function getWorkflowData( /** * Executes the workflow with the given ID */ -async function executeWorkflow( +export async function executeWorkflow( workflowInfo: IExecuteWorkflowInfo, additionalData: IWorkflowExecuteAdditionalData, options: ExecuteWorkflowOptions, @@ -791,7 +799,13 @@ async function executeWorkflow( const runData = options.loadedRunData ?? (await getRunData(workflowData, options.inputData)); const executionId = await activeExecutions.add(runData); - await executionRepository.updateStatus(executionId, 'running'); + + /** + * A subworkflow execution in queue mode is not enqueued, but rather runs in the + * same worker process as the parent execution. Hence ensure the subworkflow + * execution is marked as started as well. + */ + await executionRepository.setRunning(executionId); Container.get(EventService).emit('workflow-pre-execute', { executionId, data: runData }); @@ -865,7 +879,7 @@ async function executeWorkflow( // Therefore, database might not contain finished errors. // Force an update to db as there should be no harm doing this - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: UpdateExecutionPayload = { data: fullRunData.data, mode: fullRunData.mode, finished: fullRunData.finished ? fullRunData.finished : false, @@ -980,6 +994,49 @@ export async function getBase( setExecutionStatus, variables, secretsHelpers: Container.get(SecretsHelper), + async startAgentJob( + additionalData: IWorkflowExecuteAdditionalData, + jobType: string, + settings: unknown, + executeFunctions: IExecuteFunctions, + inputData: ITaskDataConnections, + node: INode, + workflow: Workflow, + runExecutionData: IRunExecutionData, + runIndex: number, + itemIndex: number, + activeNodeName: string, + connectionInputData: INodeExecutionData[], + siblingParameters: INodeParameters, + mode: WorkflowExecuteMode, + envProviderState: EnvProviderState, + executeData?: IExecuteData, + defaultReturnRunIndex?: number, + selfData?: IDataObject, + contextNodeName?: string, + ) { + return await Container.get(TaskManager).startTask( + additionalData, + jobType, + settings, + executeFunctions, + inputData, + node, + workflow, + runExecutionData, + runIndex, + itemIndex, + activeNodeName, + connectionInputData, + siblingParameters, + mode, + envProviderState, + executeData, + defaultReturnRunIndex, + selfData, + contextNodeName, + ); + }, logAiEvent: (eventName: keyof AiEventMap, payload: AiEventPayload) => eventService.emit(eventName, payload), }; diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index a4dd344b62..8d1e147e85 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -26,7 +26,7 @@ import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { ExternalHooks } from '@/external-hooks'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import type { ScalingService } from '@/scaling/scaling.service'; import type { Job, JobData, JobResult } from '@/scaling/scaling.types'; @@ -245,7 +245,7 @@ export class WorkflowRunner { { executionId }, ); let workflowExecution: PCancelable; - await this.executionRepository.updateStatus(executionId, 'running'); + await this.executionRepository.setRunning(executionId); // write try { additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId); @@ -376,22 +376,12 @@ export class WorkflowRunner { this.scalingService = Container.get(ScalingService); } - let priority = 100; - if (realtime === true) { - // Jobs which require a direct response get a higher priority - priority = 50; - } // TODO: For realtime jobs should probably also not do retry or not retry if they are older than x seconds. // Check if they get retried by default and how often. - const jobOptions = { - priority, - removeOnComplete: true, - removeOnFail: true, - }; let job: Job; let hooks: WorkflowHooks; try { - job = await this.scalingService.addJob(jobData, jobOptions); + job = await this.scalingService.addJob(jobData, { priority: realtime ? 50 : 100 }); hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain( data.executionMode, diff --git a/packages/cli/src/workflows/workflow-execution.service.ts b/packages/cli/src/workflows/workflow-execution.service.ts index 4dc6d00f34..dd8480d759 100644 --- a/packages/cli/src/workflows/workflow-execution.service.ts +++ b/packages/cli/src/workflows/workflow-execution.service.ts @@ -22,8 +22,8 @@ import type { Project } from '@/databases/entities/project'; import type { User } from '@/databases/entities/user'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import type { ExecutionPayload, IWorkflowDb, IWorkflowErrorData } from '@/interfaces'; -import { Logger } from '@/logger'; +import type { CreateExecutionPayload, IWorkflowDb, IWorkflowErrorData } from '@/interfaces'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service'; import { TestWebhooks } from '@/webhooks/test-webhooks'; @@ -206,11 +206,10 @@ export class WorkflowExecutionService { initialNode, ); - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: CreateExecutionPayload = { data: fakeExecution.data, mode: fakeExecution.mode, finished: false, - startedAt: new Date(), stoppedAt: new Date(), workflowData, waitTill: null, diff --git a/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts b/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts index 8bc54318bf..a2a48587f0 100644 --- a/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts +++ b/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts @@ -3,13 +3,12 @@ import { mockClear } from 'jest-mock-extended'; import { User } from '@/databases/entities/user'; import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository'; -import { Logger } from '@/logger'; import { WorkflowHistoryService } from '@/workflows/workflow-history/workflow-history.service.ee'; -import { mockInstance } from '@test/mocking'; +import { mockInstance, mockLogger } from '@test/mocking'; import { getWorkflow } from '@test-integration/workflow'; const workflowHistoryRepository = mockInstance(WorkflowHistoryRepository); -const logger = mockInstance(Logger); +const logger = mockLogger(); const sharedWorkflowRepository = mockInstance(SharedWorkflowRepository); const workflowHistoryService = new WorkflowHistoryService( logger, @@ -106,10 +105,6 @@ describe('WorkflowHistoryService', () => { // Assert expect(workflowHistoryRepository.insert).toHaveBeenCalled(); - expect(logger.error).toHaveBeenCalledWith( - 'Failed to save workflow history version for workflow 123', - expect.any(Error), - ); }); }); }); diff --git a/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts b/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts index 7d40c38ec0..3b171e3422 100644 --- a/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts +++ b/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts @@ -1,3 +1,4 @@ +import { ensureError } from 'n8n-workflow'; import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; @@ -7,7 +8,7 @@ import { SharedWorkflowRepository } from '@/databases/repositories/shared-workfl import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository'; import { SharedWorkflowNotFoundError } from '@/errors/shared-workflow-not-found.error'; import { WorkflowHistoryVersionNotFoundError } from '@/errors/workflow-history-version-not-found.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isWorkflowHistoryEnabled } from './workflow-history-helper.ee'; @@ -79,10 +80,10 @@ export class WorkflowHistoryService { workflowId, }); } catch (e) { - this.logger.error( - `Failed to save workflow history version for workflow ${workflowId}`, - e as Error, - ); + const error = ensureError(e); + this.logger.error(`Failed to save workflow history version for workflow ${workflowId}`, { + error, + }); } } } diff --git a/packages/cli/src/workflows/workflow-static-data.service.ts b/packages/cli/src/workflows/workflow-static-data.service.ts index f639345a1c..10655b77c7 100644 --- a/packages/cli/src/workflows/workflow-static-data.service.ts +++ b/packages/cli/src/workflows/workflow-static-data.service.ts @@ -3,7 +3,7 @@ import { type IDataObject, type Workflow, ErrorReporterProxy as ErrorReporter } import { Service } from 'typedi'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isWorkflowIdValid } from '@/utils'; @Service() diff --git a/packages/cli/src/workflows/workflow.service.ee.ts b/packages/cli/src/workflows/workflow.service.ee.ts index 5456ac6268..90a8af90b1 100644 --- a/packages/cli/src/workflows/workflow.service.ee.ts +++ b/packages/cli/src/workflows/workflow.service.ee.ts @@ -17,7 +17,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { TransferWorkflowError } from '@/errors/response-errors/transfer-workflow.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OwnershipService } from '@/services/ownership.service'; import { ProjectService } from '@/services/project.service'; @@ -285,14 +285,6 @@ export class EnterpriseWorkflowService { "You can't transfer a workflow into the project that's already owning it.", ); } - if (sourceProject.type !== 'team' && sourceProject.type !== 'personal') { - throw new TransferWorkflowError( - 'You can only transfer workflows out of personal or team projects.', - ); - } - if (destinationProject.type !== 'team') { - throw new TransferWorkflowError('You can only transfer workflows into team projects.'); - } // 6. deactivate workflow if necessary const wasActive = workflow.active; diff --git a/packages/cli/src/workflows/workflow.service.ts b/packages/cli/src/workflows/workflow.service.ts index 03ab425a24..bce8770303 100644 --- a/packages/cli/src/workflows/workflow.service.ts +++ b/packages/cli/src/workflows/workflow.service.ts @@ -24,7 +24,7 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import { validateEntity } from '@/generic-helpers'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { hasSharing, type ListQuery } from '@/requests'; import { OrchestrationService } from '@/services/orchestration.service'; import { OwnershipService } from '@/services/ownership.service'; diff --git a/packages/cli/src/workflows/workflows.controller.ts b/packages/cli/src/workflows/workflows.controller.ts index 30ca5c9773..59f53e0df1 100644 --- a/packages/cli/src/workflows/workflows.controller.ts +++ b/packages/cli/src/workflows/workflows.controller.ts @@ -27,7 +27,7 @@ import { ExternalHooks } from '@/external-hooks'; import { validateEntity } from '@/generic-helpers'; import type { IWorkflowResponse } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { listQueryMiddleware } from '@/middlewares'; import * as ResponseHelper from '@/response-helper'; import { NamingService } from '@/services/naming.service'; diff --git a/packages/cli/templates/form-trigger.handlebars b/packages/cli/templates/form-trigger.handlebars index 67818629f5..5493a76e7f 100644 --- a/packages/cli/templates/form-trigger.handlebars +++ b/packages/cli/templates/form-trigger.handlebars @@ -735,6 +735,14 @@ } return; + }).then(() => { + window.addEventListener('storage', function(event) { + if (event.key === 'n8n_redirect_to_next_form_test_page' && event.newValue) { + const newUrl = event.newValue; + localStorage.removeItem('n8n_redirect_to_next_form_test_page'); + window.location.replace(newUrl); + } + }); }) .catch(function (error) { console.error('Error:', error); diff --git a/packages/cli/templates/send-and-wait-no-action-required.handlebars b/packages/cli/templates/send-and-wait-no-action-required.handlebars new file mode 100644 index 0000000000..7dcf99f10b --- /dev/null +++ b/packages/cli/templates/send-and-wait-no-action-required.handlebars @@ -0,0 +1,73 @@ + + + + + + + + No action required + + + + +
+
+
+
+

No action required

+
+
+ +
+
+ + + \ No newline at end of file diff --git a/packages/cli/test/integration/api-keys.api.test.ts b/packages/cli/test/integration/api-keys.api.test.ts new file mode 100644 index 0000000000..f577e0cf78 --- /dev/null +++ b/packages/cli/test/integration/api-keys.api.test.ts @@ -0,0 +1,178 @@ +import { GlobalConfig } from '@n8n/config'; +import { Container } from 'typedi'; + +import type { ApiKey } from '@/databases/entities/api-key'; +import type { User } from '@/databases/entities/user'; +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; +import { PublicApiKeyService } from '@/services/public-api-key.service'; +import { mockInstance } from '@test/mocking'; + +import { createOwnerWithApiKey, createUser, createUserShell } from './shared/db/users'; +import { randomValidPassword } from './shared/random'; +import * as testDb from './shared/test-db'; +import type { SuperAgentTest } from './shared/types'; +import * as utils from './shared/utils/'; + +const testServer = utils.setupTestServer({ endpointGroups: ['apiKeys'] }); +let publicApiKeyService: PublicApiKeyService; + +beforeAll(() => { + publicApiKeyService = Container.get(PublicApiKeyService); +}); + +beforeEach(async () => { + await testDb.truncate(['User']); + mockInstance(GlobalConfig, { publicApi: { disabled: false } }); +}); + +describe('When public API is disabled', () => { + let owner: User; + let authAgent: SuperAgentTest; + + beforeEach(async () => { + owner = await createOwnerWithApiKey(); + + authAgent = testServer.authAgentFor(owner); + mockInstance(GlobalConfig, { publicApi: { disabled: true } }); + }); + + test('POST /api-keys should 404', async () => { + await authAgent.post('/api-keys').expect(404); + }); + + test('GET /api-keys should 404', async () => { + await authAgent.get('/api-keys').expect(404); + }); + + test('DELETE /api-key/:id should 404', async () => { + await authAgent.delete(`/api-keys/${1}`).expect(404); + }); +}); + +describe('Owner shell', () => { + let ownerShell: User; + + beforeEach(async () => { + ownerShell = await createUserShell('global:owner'); + }); + + test('POST /api-keys should create an api key', async () => { + const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + + const newApiKey = newApiKeyResponse.body.data as ApiKey; + + expect(newApiKeyResponse.statusCode).toBe(200); + expect(newApiKey).toBeDefined(); + + const newStoredApiKey = await Container.get(ApiKeyRepository).findOneByOrFail({ + userId: ownerShell.id, + }); + + expect(newStoredApiKey).toEqual({ + id: expect.any(String), + label: 'My API Key', + userId: ownerShell.id, + apiKey: newApiKey.apiKey, + createdAt: expect.any(Date), + updatedAt: expect.any(Date), + }); + }); + + test('GET /api-keys should fetch the api key redacted', async () => { + const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + + const retrieveAllApiKeysResponse = await testServer.authAgentFor(ownerShell).get('/api-keys'); + + expect(retrieveAllApiKeysResponse.statusCode).toBe(200); + + expect(retrieveAllApiKeysResponse.body.data[0]).toEqual({ + id: newApiKeyResponse.body.data.id, + label: 'My API Key', + userId: ownerShell.id, + apiKey: publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.apiKey), + createdAt: expect.any(String), + updatedAt: expect.any(String), + }); + }); + + test('DELETE /api-keys/:id should delete the api key', async () => { + const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + + const deleteApiKeyResponse = await testServer + .authAgentFor(ownerShell) + .delete(`/api-keys/${newApiKeyResponse.body.data.id}`); + + const retrieveAllApiKeysResponse = await testServer.authAgentFor(ownerShell).get('/api-keys'); + + expect(deleteApiKeyResponse.body.data.success).toBe(true); + expect(retrieveAllApiKeysResponse.body.data.length).toBe(0); + }); +}); + +describe('Member', () => { + const memberPassword = randomValidPassword(); + let member: User; + + beforeEach(async () => { + member = await createUser({ + password: memberPassword, + role: 'global:member', + }); + await utils.setInstanceOwnerSetUp(true); + }); + + test('POST /api-keys should create an api key', async () => { + const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + + expect(newApiKeyResponse.statusCode).toBe(200); + expect(newApiKeyResponse.body.data.apiKey).toBeDefined(); + expect(newApiKeyResponse.body.data.apiKey).not.toBeNull(); + + const newStoredApiKey = await Container.get(ApiKeyRepository).findOneByOrFail({ + userId: member.id, + }); + + expect(newStoredApiKey).toEqual({ + id: expect.any(String), + label: 'My API Key', + userId: member.id, + apiKey: newApiKeyResponse.body.data.apiKey, + createdAt: expect.any(Date), + updatedAt: expect.any(Date), + }); + }); + + test('GET /api-keys should fetch the api key redacted', async () => { + const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + + const retrieveAllApiKeysResponse = await testServer.authAgentFor(member).get('/api-keys'); + + expect(retrieveAllApiKeysResponse.statusCode).toBe(200); + + expect(retrieveAllApiKeysResponse.body.data[0]).toEqual({ + id: newApiKeyResponse.body.data.id, + label: 'My API Key', + userId: member.id, + apiKey: publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.apiKey), + createdAt: expect.any(String), + updatedAt: expect.any(String), + }); + + expect(newApiKeyResponse.body.data.apiKey).not.toEqual( + retrieveAllApiKeysResponse.body.data[0].apiKey, + ); + }); + + test('DELETE /api-keys/:id should delete the api key', async () => { + const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + + const deleteApiKeyResponse = await testServer + .authAgentFor(member) + .delete(`/api-keys/${newApiKeyResponse.body.data.id}`); + + const retrieveAllApiKeysResponse = await testServer.authAgentFor(member).get('/api-keys'); + + expect(deleteApiKeyResponse.body.data.success).toBe(true); + expect(retrieveAllApiKeysResponse.body.data.length).toBe(0); + }); +}); diff --git a/packages/cli/test/integration/commands/worker.cmd.test.ts b/packages/cli/test/integration/commands/worker.cmd.test.ts index 726c78537e..2326ed595a 100644 --- a/packages/cli/test/integration/commands/worker.cmd.test.ts +++ b/packages/cli/test/integration/commands/worker.cmd.test.ts @@ -5,14 +5,17 @@ import { BinaryDataService } from 'n8n-core'; import { Worker } from '@/commands/worker'; import config from '@/config'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay'; +import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import { ExternalHooks } from '@/external-hooks'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; +import { Push } from '@/push'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; +import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { ScalingService } from '@/scaling/scaling.service'; -import { OrchestrationHandlerWorkerService } from '@/services/orchestration/worker/orchestration.handler.worker.service'; import { OrchestrationWorkerService } from '@/services/orchestration/worker/orchestration.worker.service'; +import { Telemetry } from '@/telemetry'; import { setupTestCommand } from '@test-integration/utils/test-command'; import { mockInstance } from '../../shared/mocking'; @@ -26,15 +29,17 @@ const externalSecretsManager = mockInstance(ExternalSecretsManager); const license = mockInstance(License, { loadCertStr: async () => '' }); const messageEventBus = mockInstance(MessageEventBus); const logStreamingEventRelay = mockInstance(LogStreamingEventRelay); -const orchestrationHandlerWorkerService = mockInstance(OrchestrationHandlerWorkerService); const scalingService = mockInstance(ScalingService); const orchestrationWorkerService = mockInstance(OrchestrationWorkerService); +mockInstance(Publisher); +mockInstance(Subscriber); +mockInstance(Telemetry); +mockInstance(Push); const command = setupTestCommand(Worker); test('worker initializes all its components', async () => { const worker = await command.run(); - expect(worker.queueModeId).toBeDefined(); expect(worker.queueModeId).toContain('worker'); expect(worker.queueModeId.length).toBeGreaterThan(15); @@ -47,6 +52,5 @@ test('worker initializes all its components', async () => { expect(scalingService.setupWorker).toHaveBeenCalledTimes(1); expect(logStreamingEventRelay.init).toHaveBeenCalledTimes(1); expect(orchestrationWorkerService.init).toHaveBeenCalledTimes(1); - expect(orchestrationHandlerWorkerService.initWithOptions).toHaveBeenCalledTimes(1); expect(messageEventBus.send).toHaveBeenCalledTimes(1); }); diff --git a/packages/cli/test/integration/controllers/invitation/assertions.ts b/packages/cli/test/integration/controllers/invitation/assertions.ts index 3e24a53222..daa40586f2 100644 --- a/packages/cli/test/integration/controllers/invitation/assertions.ts +++ b/packages/cli/test/integration/controllers/invitation/assertions.ts @@ -10,7 +10,6 @@ export function assertReturnedUserProps(user: User) { expect(user.personalizationAnswers).toBeNull(); expect(user.password).toBeUndefined(); expect(user.isPending).toBe(false); - expect(user.apiKey).not.toBeDefined(); expect(user.globalScopes).toBeDefined(); expect(user.globalScopes).not.toHaveLength(0); } diff --git a/packages/cli/test/integration/credentials/credentials.api.ee.test.ts b/packages/cli/test/integration/credentials/credentials.api.ee.test.ts index b1c0bfab75..5428cafbd4 100644 --- a/packages/cli/test/integration/credentials/credentials.api.ee.test.ts +++ b/packages/cli/test/integration/credentials/credentials.api.ee.test.ts @@ -3,6 +3,7 @@ import { Container } from 'typedi'; import config from '@/config'; import type { Project } from '@/databases/entities/project'; +import type { ProjectRole } from '@/databases/entities/project-relation'; import type { User } from '@/databases/entities/user'; import { ProjectRepository } from '@/databases/repositories/project.repository'; import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository'; @@ -1118,18 +1119,6 @@ describe('PUT /:credentialId/transfer', () => { .expect(400); }); - test('cannot transfer into a personal project', async () => { - const credential = await saveCredential(randomCredentialPayload(), { - user: member, - }); - - await testServer - .authAgentFor(member) - .put(`/credentials/${credential.id}/transfer`) - .send({ destinationProjectId: memberPersonalProject.id }) - .expect(400); - }); - test('cannot transfer somebody elses credential', async () => { const destinationProject = await createTeamProject('Destination Project', member); @@ -1158,187 +1147,139 @@ describe('PUT /:credentialId/transfer', () => { .expect(404); }); - test('project:editors cannot transfer credentials', async () => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Source Project'); - await linkUserToProject(member, sourceProject, 'project:editor'); - - const credential = await saveCredential(randomCredentialPayload(), { - project: sourceProject, - }); - - const destinationProject = await createTeamProject('Destination Project', member); - - // - // ACT & ASSERT - // - await testServer - .authAgentFor(member) - .put(`/credentials/${credential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(403); - }); - - test('transferring from a personal project to a team project severs all sharings', async () => { - // - // ARRANGE - // - const credential = await saveCredential(randomCredentialPayload(), { user: member }); - - // these sharings should be deleted by the transfer - await shareCredentialWithUsers(credential, [anotherMember, owner]); - - const destinationProject = await createTeamProject('Destination Project', member); - - // - // ACT - // - const response = await testServer - .authAgentFor(member) - .put(`/credentials/${credential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response.body).toEqual({}); - - const allSharings = await getCredentialSharings(credential); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - credentialsId: credential.id, - role: 'credential:owner', - }); - }); - - test('can transfer from team to another team project', async () => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Team Project 1', member); - const credential = await saveCredential(randomCredentialPayload(), { - project: sourceProject, - }); - - const destinationProject = await createTeamProject('Team Project 2', member); - - // - // ACT - // - const response = await testServer - .authAgentFor(member) - .put(`/credentials/${credential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response.body).toEqual({}); - - const allSharings = await getCredentialSharings(credential); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - credentialsId: credential.id, - role: 'credential:owner', - }); - }); - - test.each([ - ['owners', () => owner], - ['admins', () => admin], - ])( - '%s can always transfer from any personal or team project into any team project', - async (_name, actor) => { + test.each(['project:editor', 'project:viewer'])( + '%ss cannot transfer credentials', + async (projectRole) => { // // ARRANGE // - const sourceProject = await createTeamProject('Source Project', member); - const teamCredential = await saveCredential(randomCredentialPayload(), { + const sourceProject = await createTeamProject('Source Project'); + await linkUserToProject(member, sourceProject, projectRole); + + const credential = await saveCredential(randomCredentialPayload(), { project: sourceProject, }); - const personalCredential = await saveCredential(randomCredentialPayload(), { user: member }); - const destinationProject = await createTeamProject('Destination Project', member); // + // ACT & ASSERT + // + await testServer + .authAgentFor(member) + .put(`/credentials/${credential.id}/transfer`) + .send({ destinationProjectId: destinationProject.id }) + .expect(403); + }, + ); + + test.each< + [ + // user role + 'owners' | 'admins', + // source project type + 'team' | 'personal', + // destination project type + 'team' | 'personal', + // actor + () => User, + // source project + () => Promise | Project, + // destination project + () => Promise | Project, + ] + >([ + // owner + [ + 'owners', + 'team', + 'team', + () => owner, + async () => await createTeamProject('Source Project'), + async () => await createTeamProject('Destination Project'), + ], + [ + 'owners', + 'team', + 'personal', + () => owner, + async () => await createTeamProject('Source Project'), + () => memberPersonalProject, + ], + [ + 'owners', + 'personal', + 'team', + () => owner, + () => memberPersonalProject, + async () => await createTeamProject('Destination Project'), + ], + + // admin + [ + 'admins', + 'team', + 'team', + () => admin, + async () => await createTeamProject('Source Project'), + async () => await createTeamProject('Destination Project'), + ], + [ + 'admins', + 'team', + 'personal', + () => admin, + async () => await createTeamProject('Source Project'), + () => memberPersonalProject, + ], + [ + 'admins', + 'personal', + 'team', + () => admin, + () => memberPersonalProject, + async () => await createTeamProject('Destination Project'), + ], + ])( + '%s can always transfer from a %s project to a %s project', + async ( + _roleName, + _sourceProjectName, + _destinationProjectName, + getUser, + getSourceProject, + getDestinationProject, + ) => { + // ARRANGE + const user = getUser(); + const sourceProject = await getSourceProject(); + const destinationProject = await getDestinationProject(); + + const credential = await saveCredential(randomCredentialPayload(), { + project: sourceProject, + }); + // ACT - // - const response1 = await testServer - .authAgentFor(actor()) - .put(`/credentials/${teamCredential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - const response2 = await testServer - .authAgentFor(actor()) - .put(`/credentials/${personalCredential.id}/transfer`) + const response = await testServer + .authAgentFor(user) + .put(`/credentials/${credential.id}/transfer`) .send({ destinationProjectId: destinationProject.id }) .expect(200); - // // ASSERT - // - expect(response1.body).toEqual({}); - expect(response2.body).toEqual({}); + expect(response.body).toEqual({}); { - const allSharings = await getCredentialSharings(teamCredential); + const allSharings = await getCredentialSharings(credential); expect(allSharings).toHaveLength(1); expect(allSharings[0]).toMatchObject({ projectId: destinationProject.id, - credentialsId: teamCredential.id, - role: 'credential:owner', - }); - } - - { - const allSharings = await getCredentialSharings(personalCredential); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - credentialsId: personalCredential.id, + credentialsId: credential.id, role: 'credential:owner', }); } }, ); - - test.each([ - ['owners', () => owner], - ['admins', () => admin], - ])('%s cannot transfer into personal projects', async (_name, actor) => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Source Project', member); - const teamCredential = await saveCredential(randomCredentialPayload(), { - project: sourceProject, - }); - - const personalCredential = await saveCredential(randomCredentialPayload(), { user: member }); - - const destinationProject = anotherMemberPersonalProject; - - // - // ACT & ASSERT - // - await testServer - .authAgentFor(actor()) - .put(`/credentials/${teamCredential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(400); - await testServer - .authAgentFor(actor()) - .put(`/credentials/${personalCredential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(400); - }); }); function validateMainCredentialData(credential: ListQuery.Credentials.WithOwnedByAndSharedWith) { diff --git a/packages/cli/test/integration/deduplication/deduplication-helper.test.ts b/packages/cli/test/integration/deduplication/deduplication-helper.test.ts new file mode 100644 index 0000000000..2859bb363c --- /dev/null +++ b/packages/cli/test/integration/deduplication/deduplication-helper.test.ts @@ -0,0 +1,532 @@ +import { DataDeduplicationService } from 'n8n-core'; +import type { ICheckProcessedContextData, INodeTypeData } from 'n8n-workflow'; +import type { IDeduplicationOutput, INode, DeduplicationItemTypes } from 'n8n-workflow'; +import { Workflow } from 'n8n-workflow'; + +import { getDataDeduplicationService } from '@/deduplication'; +import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; +import { NodeTypes } from '@/node-types'; +import { mockInstance } from '@test/mocking'; +import { createWorkflow } from '@test-integration/db/workflows'; + +import * as testDb from '../shared/test-db'; + +let workflow: Workflow; + +jest.mock('../../../src/telemetry'); + +const MOCK_NODE_TYPES_DATA = mockNodeTypesData(['set']); +mockInstance(LoadNodesAndCredentials, { + loaded: { + nodes: MOCK_NODE_TYPES_DATA, + credentials: {}, + }, +}); +function mockNodeTypesData( + nodeNames: string[], + options?: { + addTrigger?: boolean; + }, +) { + return nodeNames.reduce((acc, nodeName) => { + return ( + (acc[`n8n-nodes-base.${nodeName}`] = { + sourcePath: '', + type: { + description: { + displayName: nodeName, + name: nodeName, + group: [], + description: '', + version: 1, + defaults: {}, + inputs: [], + outputs: [], + properties: [], + }, + trigger: options?.addTrigger ? async () => undefined : undefined, + }, + }), + acc + ); + }, {}); +} +const node: INode = { + id: 'uuid-1234', + parameters: {}, + name: 'test', + type: 'test.set', + typeVersion: 1, + position: [0, 0], +}; + +beforeAll(async () => { + await testDb.init(); + + const nodeTypes = mockInstance(NodeTypes); + const workflowEntityOriginal = await createWorkflow(); + + workflow = new Workflow({ + id: workflowEntityOriginal.id, + nodes: [node], + connections: {}, + active: false, + nodeTypes, + }); + + const dataDeduplicationService = getDataDeduplicationService(); + await DataDeduplicationService.init(dataDeduplicationService); +}); + +beforeEach(async () => { + await testDb.truncate(['ProcessedData']); +}); + +afterAll(async () => { + await testDb.terminate(); +}); + +describe('Deduplication.DeduplicationHelper', () => { + test('Deduplication (mode: entries): DeduplicationHelper should record and check data correctly', async () => { + const context = 'node'; + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + let processedData: IDeduplicationOutput; + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b'], + context, + contextData, + { mode: 'entries' }, + ); + + // 'a' & 'b' got only checked before, so still has to be new + expect(processedData).toEqual({ new: ['a', 'b'], processed: [] }); + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c', 'd'], + context, + contextData, + { mode: 'entries' }, + ); + + // 'a' & 'b' got recorded before, 'c' only checked bfeore and 'd' has never been seen + expect(processedData).toEqual({ new: ['c', 'd'], processed: ['a', 'b'] }); + + await DataDeduplicationService.getInstance().removeProcessed(['b', 'd'], context, contextData, { + mode: 'entries', + }); + }); + + test('Deduplication (mode: entries): DeduplicationHelper different contexts should not interfere with each other', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + let processedData: IDeduplicationOutput; + + // Add data with context "node" + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b'], + 'node', + contextData, + { mode: 'entries' }, + ); + + // No data exists yet for context "node" so has to be new + expect(processedData).toEqual({ new: ['a', 'b'], processed: [] }); + + // Add data with context "workflow" + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextData, + { mode: 'entries' }, + ); + + // No data exists yet for context 'worklow' so has to be new + expect(processedData).toEqual({ new: ['a', 'b', 'c'], processed: [] }); + + await DataDeduplicationService.getInstance().removeProcessed(['a'], 'node', contextData, { + mode: 'entries', + }); + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'node', + contextData, + { mode: 'entries' }, + ); + + // 'a' got removed for the context 'node' and 'c' never got saved, so only 'b' should be known + expect(processedData).toEqual({ new: ['a', 'c'], processed: ['b'] }); + + await DataDeduplicationService.getInstance().removeProcessed(['b'], 'workflow', contextData, { + mode: 'entries', + }); + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c', 'd'], + 'workflow', + contextData, + { mode: 'entries' }, + ); + + // 'b' got removed for the context 'workflow' and 'd' never got saved for that reason new + // 'a' and 'c' should should be known + expect(processedData).toEqual({ new: ['b', 'd'], processed: ['a', 'c'] }); + }); + + test('Deduplication (mode: entries): DeduplicationHelper check maxEntries', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + let processedData: IDeduplicationOutput; + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['0', '1', '2', '3'], + 'node', + contextData, + { mode: 'entries', maxEntries: 5 }, + ); + + // All data should be new + expect(processedData).toEqual({ new: ['0', '1', '2', '3'], processed: [] }); + + // Add data with context "workflow" + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['4', '5', '6'], + 'node', + contextData, + { mode: 'entries', maxEntries: 5 }, + ); + + // All given data should be new + expect(processedData).toEqual({ new: ['4', '5', '6'], processed: [] }); + + // This should not make a difference, removing an item which does not exist + await DataDeduplicationService.getInstance().removeProcessed(['a'], 'node', contextData, { + mode: 'entries', + }); + + processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['0', '1', '2', '3', '4', '5', '6', '7'], + 'node', + contextData, + { mode: 'entries', maxEntries: 5 }, + ); + + // '7' should be new and '0' and '1' also because they got been pruned as max 5 get saved + expect(processedData).toEqual({ new: ['0', '1', '7'], processed: ['2', '3', '4', '5', '6'] }); + }); + + describe('Deduplication (mode: latestIncrementalKey): DeduplicationHelper should record and check data correctly', () => { + const tests: Array<{ + description: string; + data: Array<{ + operation: 'checkProcessedAndRecord'; + input: DeduplicationItemTypes[]; + output: IDeduplicationOutput; + }>; + }> = [ + { + description: 'dates', + data: [ + { + operation: 'checkProcessedAndRecord', + input: [new Date('2022-01-02').toISOString(), new Date('2022-01-03').toISOString()], + output: { + new: [new Date('2022-01-02').toISOString(), new Date('2022-01-03').toISOString()], + processed: [], + }, + }, + { + operation: 'checkProcessedAndRecord', + input: [ + new Date('2022-01-02').toISOString(), + new Date('2022-01-03').toISOString(), + new Date('2022-01-04').toISOString(), + new Date('2022-01-05').toISOString(), + ], + output: { + new: [new Date('2022-01-04').toISOString(), new Date('2022-01-05').toISOString()], + processed: [ + new Date('2022-01-02').toISOString(), + new Date('2022-01-03').toISOString(), + ], + }, + }, + ], + }, + { + description: 'numbers', + data: [ + { + operation: 'checkProcessedAndRecord', + input: [2, 3], + output: { new: [2, 3], processed: [] }, + }, + { + operation: 'checkProcessedAndRecord', + input: [2, 3, 4, 5], + output: { new: [4, 5], processed: [2, 3] }, + }, + ], + }, + ]; + + for (const testData of tests) { + test(testData.description, async () => { + const context = 'node'; + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + const mode = testData.description === 'dates' ? 'latestDate' : 'latestIncrementalKey'; + + let processedData: IDeduplicationOutput; + + for (const data of testData.data) { + processedData = await DataDeduplicationService.getInstance()[data.operation]( + data.input, + context, + contextData, + { mode }, + ); + + expect(processedData).toEqual(data.output); + } + }); + } + }); + + test('removeProcessed should throw error for latest modes', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + await expect( + DataDeduplicationService.getInstance().removeProcessed(['2022-01-01'], 'node', contextData, { + mode: 'latestDate', + }), + ).rejects.toThrow('Removing processed data is not possible in mode "latest"'); + + await expect( + DataDeduplicationService.getInstance().removeProcessed([1], 'node', contextData, { + mode: 'latestIncrementalKey', + }), + ).rejects.toThrow('Removing processed data is not possible in mode "latest"'); + }); + + test('clearAllProcessedItems should delete all processed items for workflow scope', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + }; + + // First, add some data + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextData, + { mode: 'entries' }, + ); + + // Clear all processed items + await DataDeduplicationService.getInstance().clearAllProcessedItems('workflow', contextData, { + mode: 'entries', + }); + + // Check that all items are now considered new + const processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextData, + { mode: 'entries' }, + ); + + expect(processedData).toEqual({ new: ['a', 'b', 'c'], processed: [] }); + }); + + test('clearAllProcessedItems should delete all processed items for node scope', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + // First, add some data + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'node', + contextData, + { mode: 'entries' }, + ); + + // Clear all processed items + await DataDeduplicationService.getInstance().clearAllProcessedItems('node', contextData, { + mode: 'entries', + }); + + // Check that all items are now considered new + const processedData = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'node', + contextData, + { mode: 'entries' }, + ); + + expect(processedData).toEqual({ new: ['a', 'b', 'c'], processed: [] }); + }); + + test('clearAllProcessedItems should not clear workflow processed items when clearing node scope', async () => { + const contextDataWorkflow: ICheckProcessedContextData = { + workflow, + }; + + const contextDataNode: ICheckProcessedContextData = { + workflow, + node, + }; + + // Add data for workflow scope + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextDataWorkflow, + { mode: 'entries' }, + ); + + // Add data for node scope + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['d', 'e', 'f'], + 'node', + contextDataNode, + { mode: 'entries' }, + ); + + // Clear all processed items for node scope + await DataDeduplicationService.getInstance().clearAllProcessedItems('node', contextDataNode, { + mode: 'entries', + }); + + // Ensure workflow processed items are still intact + const processedDataWorkflow = + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextDataWorkflow, + { mode: 'entries' }, + ); + + // Workflow items should still be considered processed + expect(processedDataWorkflow).toEqual({ new: [], processed: ['a', 'b', 'c'] }); + + // Ensure node processed items have been cleared + const processedDataNode = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['d', 'e', 'f'], + 'node', + contextDataNode, + { mode: 'entries' }, + ); + + // Node items should be considered new + expect(processedDataNode).toEqual({ new: ['d', 'e', 'f'], processed: [] }); + }); + + test('clearAllProcessedItems should not clear node processed items when clearing workflow scope', async () => { + const contextDataWorkflow: ICheckProcessedContextData = { + workflow, + }; + + const contextDataNode: ICheckProcessedContextData = { + workflow, + node, + }; + + // Add data for workflow scope + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextDataWorkflow, + { mode: 'entries' }, + ); + + // Add data for node scope + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['d', 'e', 'f'], + 'node', + contextDataNode, + { mode: 'entries' }, + ); + + // Clear all processed items for workflow scope + await DataDeduplicationService.getInstance().clearAllProcessedItems( + 'workflow', + contextDataWorkflow, + { + mode: 'entries', + }, + ); + + // Ensure node processed items are still intact + const processedDataNode = await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['d', 'e', 'f'], + 'node', + contextDataNode, + { mode: 'entries' }, + ); + + // Node items should still be considered processed + expect(processedDataNode).toEqual({ new: [], processed: ['d', 'e', 'f'] }); + + // Ensure workflow processed items have been cleared + const processedDataWorkflow = + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'workflow', + contextDataWorkflow, + { mode: 'entries' }, + ); + + // Workflow items should be considered new + expect(processedDataWorkflow).toEqual({ new: ['a', 'b', 'c'], processed: [] }); + }); + + test('getProcessedDataCount should return correct count for different modes', async () => { + const contextData: ICheckProcessedContextData = { + workflow, + node, + }; + + // Test for 'entries' mode + await DataDeduplicationService.getInstance().checkProcessedAndRecord( + ['a', 'b', 'c'], + 'node', + contextData, + { mode: 'entries' }, + ); + + const entriesCount = await DataDeduplicationService.getInstance().getProcessedDataCount( + 'node', + contextData, + { mode: 'entries' }, + ); + + expect(entriesCount).toBe(3); + + // Test for other modes (should return 0) + const latestCount = await DataDeduplicationService.getInstance().getProcessedDataCount( + 'node', + contextData, + { mode: 'latestDate' }, + ); + + expect(latestCount).toBe(0); + }); +}); diff --git a/packages/cli/test/integration/execution.service.integration.test.ts b/packages/cli/test/integration/execution.service.integration.test.ts index 05061e536e..22d0d65754 100644 --- a/packages/cli/test/integration/execution.service.integration.test.ts +++ b/packages/cli/test/integration/execution.service.integration.test.ts @@ -6,6 +6,7 @@ import { ExecutionRepository } from '@/databases/repositories/execution.reposito import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { ExecutionService } from '@/executions/execution.service'; import type { ExecutionSummaries } from '@/executions/execution.types'; +import { createTeamProject } from '@test-integration/db/projects'; import { annotateExecution, createAnnotationTags, createExecution } from './shared/db/executions'; import { createWorkflow } from './shared/db/workflows'; @@ -69,6 +70,7 @@ describe('ExecutionService', () => { mode: expect.any(String), retryOf: null, status: expect.any(String), + createdAt: expect.any(String), startedAt: expect.any(String), stoppedAt: expect.any(String), waitTill: null, @@ -294,6 +296,37 @@ describe('ExecutionService', () => { }); }); + test('should filter executions by `projectId`', async () => { + const firstProject = await createTeamProject(); + const secondProject = await createTeamProject(); + + const firstWorkflow = await createWorkflow(undefined, firstProject); + const secondWorkflow = await createWorkflow(undefined, secondProject); + + await createExecution({ status: 'success' }, firstWorkflow); + await createExecution({ status: 'success' }, firstWorkflow); + await createExecution({ status: 'success' }, secondWorkflow); // to filter out + + const query: ExecutionSummaries.RangeQuery = { + kind: 'range', + range: { limit: 20 }, + accessibleWorkflowIds: [firstWorkflow.id], + projectId: firstProject.id, + }; + + const output = await executionService.findRangeWithCount(query); + + expect(output).toEqual({ + count: 2, + estimated: false, + results: expect.arrayContaining([ + expect.objectContaining({ workflowId: firstWorkflow.id }), + expect.objectContaining({ workflowId: firstWorkflow.id }), + // execution for workflow in second project was filtered out + ]), + }); + }); + test('should exclude executions by inaccessible `workflowId`', async () => { const accessibleWorkflow = await createWorkflow(); const inaccessibleWorkflow = await createWorkflow(); @@ -478,6 +511,7 @@ describe('ExecutionService', () => { mode: expect.any(String), retryOf: null, status: expect.any(String), + createdAt: expect.any(String), startedAt: expect.any(String), stoppedAt: expect.any(String), waitTill: null, diff --git a/packages/cli/test/integration/me.api.test.ts b/packages/cli/test/integration/me.api.test.ts index df9b7c48b6..a29f158a32 100644 --- a/packages/cli/test/integration/me.api.test.ts +++ b/packages/cli/test/integration/me.api.test.ts @@ -1,5 +1,4 @@ import { GlobalConfig } from '@n8n/config'; -import { IsNull } from '@n8n/typeorm'; import type { IPersonalizationSurveyAnswersV4 } from 'n8n-workflow'; import { Container } from 'typedi'; import validator from 'validator'; @@ -10,8 +9,8 @@ import { UserRepository } from '@/databases/repositories/user.repository'; import { mockInstance } from '@test/mocking'; import { SUCCESS_RESPONSE_BODY } from './shared/constants'; -import { addApiKey, createOwner, createUser, createUserShell } from './shared/db/users'; -import { randomApiKey, randomEmail, randomName, randomValidPassword } from './shared/random'; +import { createUser, createUserShell } from './shared/db/users'; +import { randomEmail, randomName, randomValidPassword } from './shared/random'; import * as testDb from './shared/test-db'; import type { SuperAgentTest } from './shared/types'; import * as utils from './shared/utils/'; @@ -23,37 +22,12 @@ beforeEach(async () => { mockInstance(GlobalConfig, { publicApi: { disabled: false } }); }); -describe('When public API is disabled', () => { - let owner: User; - let authAgent: SuperAgentTest; - - beforeEach(async () => { - owner = await createOwner(); - await addApiKey(owner); - authAgent = testServer.authAgentFor(owner); - mockInstance(GlobalConfig, { publicApi: { disabled: true } }); - }); - - test('POST /me/api-key should 404', async () => { - await authAgent.post('/me/api-key').expect(404); - }); - - test('GET /me/api-key should 404', async () => { - await authAgent.get('/me/api-key').expect(404); - }); - - test('DELETE /me/api-key should 404', async () => { - await authAgent.delete('/me/api-key').expect(404); - }); -}); - describe('Owner shell', () => { let ownerShell: User; let authOwnerShellAgent: SuperAgentTest; beforeEach(async () => { ownerShell = await createUserShell('global:owner'); - await addApiKey(ownerShell); authOwnerShellAgent = testServer.authAgentFor(ownerShell); }); @@ -63,17 +37,8 @@ describe('Owner shell', () => { expect(response.statusCode).toBe(200); - const { - id, - email, - firstName, - lastName, - personalizationAnswers, - role, - password, - isPending, - apiKey, - } = response.body.data; + const { id, email, firstName, lastName, personalizationAnswers, role, password, isPending } = + response.body.data; expect(validator.isUUID(id)).toBe(true); expect(email).toBe(validPayload.email.toLowerCase()); @@ -83,7 +48,6 @@ describe('Owner shell', () => { expect(password).toBeUndefined(); expect(isPending).toBe(false); expect(role).toBe('global:owner'); - expect(apiKey).toBeUndefined(); const storedOwnerShell = await Container.get(UserRepository).findOneByOrFail({ id }); @@ -160,39 +124,6 @@ describe('Owner shell', () => { expect(storedShellOwner.personalizationAnswers).toEqual(validPayload); } }); - - test('POST /me/api-key should create an api key', async () => { - const response = await authOwnerShellAgent.post('/me/api-key'); - - expect(response.statusCode).toBe(200); - expect(response.body.data.apiKey).toBeDefined(); - expect(response.body.data.apiKey).not.toBeNull(); - - const storedShellOwner = await Container.get(UserRepository).findOneOrFail({ - where: { email: IsNull() }, - }); - - expect(storedShellOwner.apiKey).toEqual(response.body.data.apiKey); - }); - - test('GET /me/api-key should fetch the api key redacted', async () => { - const response = await authOwnerShellAgent.get('/me/api-key'); - - expect(response.statusCode).toBe(200); - expect(response.body.data.apiKey).not.toEqual(ownerShell.apiKey); - }); - - test('DELETE /me/api-key should delete the api key', async () => { - const response = await authOwnerShellAgent.delete('/me/api-key'); - - expect(response.statusCode).toBe(200); - - const storedShellOwner = await Container.get(UserRepository).findOneOrFail({ - where: { email: IsNull() }, - }); - - expect(storedShellOwner.apiKey).toBeNull(); - }); }); describe('Member', () => { @@ -204,10 +135,8 @@ describe('Member', () => { member = await createUser({ password: memberPassword, role: 'global:member', - apiKey: randomApiKey(), }); authMemberAgent = testServer.authAgentFor(member); - await utils.setInstanceOwnerSetUp(true); }); @@ -215,17 +144,8 @@ describe('Member', () => { for (const validPayload of VALID_PATCH_ME_PAYLOADS) { const response = await authMemberAgent.patch('/me').send(validPayload).expect(200); - const { - id, - email, - firstName, - lastName, - personalizationAnswers, - role, - password, - isPending, - apiKey, - } = response.body.data; + const { id, email, firstName, lastName, personalizationAnswers, role, password, isPending } = + response.body.data; expect(validator.isUUID(id)).toBe(true); expect(email).toBe(validPayload.email.toLowerCase()); @@ -235,7 +155,6 @@ describe('Member', () => { expect(password).toBeUndefined(); expect(isPending).toBe(false); expect(role).toBe('global:member'); - expect(apiKey).toBeUndefined(); const storedMember = await Container.get(UserRepository).findOneByOrFail({ id }); @@ -275,6 +194,7 @@ describe('Member', () => { }; const response = await authMemberAgent.patch('/me/password').send(validPayload); + expect(response.statusCode).toBe(200); expect(response.body).toEqual(SUCCESS_RESPONSE_BODY); @@ -314,35 +234,6 @@ describe('Member', () => { expect(storedAnswers).toEqual(validPayload); } }); - - test('POST /me/api-key should create an api key', async () => { - const response = await testServer.authAgentFor(member).post('/me/api-key'); - - expect(response.statusCode).toBe(200); - expect(response.body.data.apiKey).toBeDefined(); - expect(response.body.data.apiKey).not.toBeNull(); - - const storedMember = await Container.get(UserRepository).findOneByOrFail({ id: member.id }); - - expect(storedMember.apiKey).toEqual(response.body.data.apiKey); - }); - - test('GET /me/api-key should fetch the api key redacted', async () => { - const response = await testServer.authAgentFor(member).get('/me/api-key'); - - expect(response.statusCode).toBe(200); - expect(response.body.data.apiKey).not.toEqual(member.apiKey); - }); - - test('DELETE /me/api-key should delete the api key', async () => { - const response = await testServer.authAgentFor(member).delete('/me/api-key'); - - expect(response.statusCode).toBe(200); - - const storedMember = await Container.get(UserRepository).findOneByOrFail({ id: member.id }); - - expect(storedMember.apiKey).toBeNull(); - }); }); describe('Owner', () => { diff --git a/packages/cli/test/integration/pruning.service.test.ts b/packages/cli/test/integration/pruning.service.test.ts index 990d0aec3a..c4d1957de0 100644 --- a/packages/cli/test/integration/pruning.service.test.ts +++ b/packages/cli/test/integration/pruning.service.test.ts @@ -8,7 +8,7 @@ import { TIME } from '@/constants'; import type { ExecutionEntity } from '@/databases/entities/execution-entity'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PruningService } from '@/services/pruning.service'; import { diff --git a/packages/cli/test/integration/public-api/credentials.test.ts b/packages/cli/test/integration/public-api/credentials.test.ts index 7323de391e..5574d4f3bf 100644 --- a/packages/cli/test/integration/public-api/credentials.test.ts +++ b/packages/cli/test/integration/public-api/credentials.test.ts @@ -7,8 +7,8 @@ import { SharedCredentialsRepository } from '@/databases/repositories/shared-cre import { createTeamProject } from '@test-integration/db/projects'; import { affixRoleToSaveCredential, createCredentials } from '../shared/db/credentials'; -import { addApiKey, createUser, createUserShell } from '../shared/db/users'; -import { randomApiKey, randomName } from '../shared/random'; +import { createMemberWithApiKey, createOwnerWithApiKey } from '../shared/db/users'; +import { randomName } from '../shared/random'; import * as testDb from '../shared/test-db'; import type { CredentialPayload, SaveCredentialFunction } from '../shared/types'; import type { SuperAgentTest } from '../shared/types'; @@ -24,8 +24,8 @@ let saveCredential: SaveCredentialFunction; const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] }); beforeAll(async () => { - owner = await addApiKey(await createUserShell('global:owner')); - member = await createUser({ role: 'global:member', apiKey: randomApiKey() }); + owner = await createOwnerWithApiKey(); + member = await createMemberWithApiKey(); authOwnerAgent = testServer.publicApiAgentFor(owner); authMemberAgent = testServer.publicApiAgentFor(member); @@ -156,10 +156,7 @@ describe('DELETE /credentials/:id', () => { }); test('should delete owned cred for member but leave others untouched', async () => { - const anotherMember = await createUser({ - role: 'global:member', - apiKey: randomApiKey(), - }); + const anotherMember = await createMemberWithApiKey(); const savedCredential = await saveCredential(dbCredential(), { user: member }); const notToBeChangedCredential = await saveCredential(dbCredential(), { user: member }); diff --git a/packages/cli/test/integration/public-api/executions.test.ts b/packages/cli/test/integration/public-api/executions.test.ts index 019f69adc5..13324ed55c 100644 --- a/packages/cli/test/integration/public-api/executions.test.ts +++ b/packages/cli/test/integration/public-api/executions.test.ts @@ -12,13 +12,12 @@ import { createSuccessfulExecution, createWaitingExecution, } from '../shared/db/executions'; -import { createUser } from '../shared/db/users'; +import { createMemberWithApiKey, createOwnerWithApiKey } from '../shared/db/users'; import { createManyWorkflows, createWorkflow, shareWorkflowWithUsers, } from '../shared/db/workflows'; -import { randomApiKey } from '../shared/random'; import * as testDb from '../shared/test-db'; import type { SuperAgentTest } from '../shared/types'; import * as utils from '../shared/utils/'; @@ -36,9 +35,9 @@ mockInstance(Telemetry); const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] }); beforeAll(async () => { - owner = await createUser({ role: 'global:owner', apiKey: randomApiKey() }); - user1 = await createUser({ role: 'global:member', apiKey: randomApiKey() }); - user2 = await createUser({ role: 'global:member', apiKey: randomApiKey() }); + owner = await createOwnerWithApiKey(); + user1 = await createMemberWithApiKey(); + user2 = await createMemberWithApiKey(); // TODO: mock BinaryDataService instead await utils.initBinaryDataService(); diff --git a/packages/cli/test/integration/public-api/projects.test.ts b/packages/cli/test/integration/public-api/projects.test.ts index 2bc8e9346b..f815d9d07b 100644 --- a/packages/cli/test/integration/public-api/projects.test.ts +++ b/packages/cli/test/integration/public-api/projects.test.ts @@ -2,7 +2,7 @@ import { FeatureNotLicensedError } from '@/errors/feature-not-licensed.error'; import { Telemetry } from '@/telemetry'; import { mockInstance } from '@test/mocking'; import { createTeamProject, getProjectByNameOrFail } from '@test-integration/db/projects'; -import { createMember, createOwner } from '@test-integration/db/users'; +import { createMemberWithApiKey, createOwnerWithApiKey } from '@test-integration/db/users'; import { setupTestServer } from '@test-integration/utils'; import * as testDb from '../shared/test-db'; @@ -26,7 +26,7 @@ describe('Projects in Public API', () => { */ testServer.license.setQuota('quota:maxTeamProjects', -1); testServer.license.enable('feat:projectRole:admin'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const projects = await Promise.all([ createTeamProject(), createTeamProject(), @@ -53,15 +53,10 @@ describe('Projects in Public API', () => { }); it('if not authenticated, should reject', async () => { - /** - * Arrange - */ - const owner = await createOwner({ withApiKey: false }); - /** * Act */ - const response = await testServer.publicApiAgentFor(owner).get('/projects'); + const response = await testServer.publicApiAgentWithoutApiKey().get('/projects'); /** * Assert @@ -74,7 +69,7 @@ describe('Projects in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); /** * Act @@ -97,12 +92,12 @@ describe('Projects in Public API', () => { */ testServer.license.setQuota('quota:maxTeamProjects', -1); testServer.license.enable('feat:projectRole:admin'); - const owner = await createMember({ withApiKey: true }); + const member = await createMemberWithApiKey(); /** * Act */ - const response = await testServer.publicApiAgentFor(owner).get('/projects'); + const response = await testServer.publicApiAgentFor(member).get('/projects'); /** * Assert @@ -119,7 +114,7 @@ describe('Projects in Public API', () => { */ testServer.license.setQuota('quota:maxTeamProjects', -1); testServer.license.enable('feat:projectRole:admin'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const projectPayload = { name: 'some-project' }; /** @@ -150,14 +145,13 @@ describe('Projects in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: false }); const projectPayload = { name: 'some-project' }; /** * Act */ const response = await testServer - .publicApiAgentFor(owner) + .publicApiAgentWithoutApiKey() .post('/projects') .send(projectPayload); @@ -172,7 +166,7 @@ describe('Projects in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const projectPayload = { name: 'some-project' }; /** @@ -199,7 +193,7 @@ describe('Projects in Public API', () => { */ testServer.license.setQuota('quota:maxTeamProjects', -1); testServer.license.enable('feat:projectRole:admin'); - const member = await createMember({ withApiKey: true }); + const member = await createMemberWithApiKey(); const projectPayload = { name: 'some-project' }; /** @@ -225,7 +219,7 @@ describe('Projects in Public API', () => { */ testServer.license.setQuota('quota:maxTeamProjects', -1); testServer.license.enable('feat:projectRole:admin'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const project = await createTeamProject(); /** @@ -244,13 +238,14 @@ describe('Projects in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: false }); const project = await createTeamProject(); /** * Act */ - const response = await testServer.publicApiAgentFor(owner).delete(`/projects/${project.id}`); + const response = await testServer + .publicApiAgentWithoutApiKey() + .delete(`/projects/${project.id}`); /** * Assert @@ -263,7 +258,7 @@ describe('Projects in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const project = await createTeamProject(); /** @@ -287,13 +282,13 @@ describe('Projects in Public API', () => { */ testServer.license.setQuota('quota:maxTeamProjects', -1); testServer.license.enable('feat:projectRole:admin'); - const member = await createMember({ withApiKey: true }); + const owner = await createMemberWithApiKey(); const project = await createTeamProject(); /** * Act */ - const response = await testServer.publicApiAgentFor(member).delete(`/projects/${project.id}`); + const response = await testServer.publicApiAgentFor(owner).delete(`/projects/${project.id}`); /** * Assert @@ -310,7 +305,7 @@ describe('Projects in Public API', () => { */ testServer.license.setQuota('quota:maxTeamProjects', -1); testServer.license.enable('feat:projectRole:admin'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const project = await createTeamProject('old-name'); /** @@ -332,14 +327,13 @@ describe('Projects in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: false }); const project = await createTeamProject(); /** * Act */ const response = await testServer - .publicApiAgentFor(owner) + .publicApiAgentWithoutApiKey() .put(`/projects/${project.id}`) .send({ name: 'new-name' }); @@ -354,7 +348,7 @@ describe('Projects in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const project = await createTeamProject(); /** @@ -381,7 +375,7 @@ describe('Projects in Public API', () => { */ testServer.license.setQuota('quota:maxTeamProjects', -1); testServer.license.enable('feat:projectRole:admin'); - const member = await createMember({ withApiKey: true }); + const member = await createMemberWithApiKey(); const project = await createTeamProject(); /** diff --git a/packages/cli/test/integration/public-api/tags.test.ts b/packages/cli/test/integration/public-api/tags.test.ts index 776d79d368..c2e25cc3f6 100644 --- a/packages/cli/test/integration/public-api/tags.test.ts +++ b/packages/cli/test/integration/public-api/tags.test.ts @@ -4,8 +4,7 @@ import type { User } from '@/databases/entities/user'; import { TagRepository } from '@/databases/repositories/tag.repository'; import { createTag } from '../shared/db/tags'; -import { createUser } from '../shared/db/users'; -import { randomApiKey } from '../shared/random'; +import { createMemberWithApiKey, createOwnerWithApiKey } from '../shared/db/users'; import * as testDb from '../shared/test-db'; import type { SuperAgentTest } from '../shared/types'; import * as utils from '../shared/utils/'; @@ -18,15 +17,8 @@ let authMemberAgent: SuperAgentTest; const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] }); beforeAll(async () => { - owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); - - member = await createUser({ - role: 'global:member', - apiKey: randomApiKey(), - }); + owner = await createOwnerWithApiKey(); + member = await createMemberWithApiKey(); }); beforeEach(async () => { diff --git a/packages/cli/test/integration/public-api/users.ee.test.ts b/packages/cli/test/integration/public-api/users.ee.test.ts index 04649403d9..08161a41d8 100644 --- a/packages/cli/test/integration/public-api/users.ee.test.ts +++ b/packages/cli/test/integration/public-api/users.ee.test.ts @@ -6,8 +6,13 @@ import { License } from '@/license'; import { createTeamProject, linkUserToProject } from '@test-integration/db/projects'; import { mockInstance } from '../../shared/mocking'; -import { createOwner, createUser, createUserShell } from '../shared/db/users'; -import { randomApiKey } from '../shared/random'; +import { + createMember, + createMemberWithApiKey, + createOwnerWithApiKey, + createUser, + createUserShell, +} from '../shared/db/users'; import * as testDb from '../shared/test-db'; import type { SuperAgentTest } from '../shared/types'; import * as utils from '../shared/utils/'; @@ -25,32 +30,23 @@ beforeEach(async () => { describe('With license unlimited quota:users', () => { describe('GET /users', () => { test('should fail due to missing API Key', async () => { - const owner = await createUser({ role: 'global:owner' }); - const authOwnerAgent = testServer.publicApiAgentFor(owner); + const authOwnerAgent = testServer.publicApiAgentWithoutApiKey(); await authOwnerAgent.get('/users').expect(401); }); test('should fail due to invalid API Key', async () => { - const owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); - owner.apiKey = 'invalid-key'; - const authOwnerAgent = testServer.publicApiAgentFor(owner); + const authOwnerAgent = testServer.publicApiAgentWithApiKey('invalid-key'); await authOwnerAgent.get('/users').expect(401); }); test('should fail due to member trying to access owner only endpoint', async () => { - const member = await createUser({ apiKey: randomApiKey() }); + const member = await createMemberWithApiKey(); const authMemberAgent = testServer.publicApiAgentFor(member); await authMemberAgent.get('/users').expect(403); }); test('should return all users', async () => { - const owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); + const owner = await createOwnerWithApiKey(); const authOwnerAgent = testServer.publicApiAgentFor(owner); @@ -92,10 +88,10 @@ describe('With license unlimited quota:users', () => { * Arrange */ const [owner, firstMember, secondMember, thirdMember] = await Promise.all([ - createOwner({ withApiKey: true }), - createUser({ role: 'global:member' }), - createUser({ role: 'global:member' }), - createUser({ role: 'global:member' }), + createOwnerWithApiKey(), + createMember(), + createMember(), + createMember(), ]); const [firstProject, secondProject] = await Promise.all([ @@ -130,40 +126,30 @@ describe('With license unlimited quota:users', () => { describe('GET /users/:id', () => { test('should fail due to missing API Key', async () => { - const owner = await createUser({ role: 'global:owner' }); - const authOwnerAgent = testServer.publicApiAgentFor(owner); + const owner = await createOwnerWithApiKey(); + const authOwnerAgent = testServer.publicApiAgentWithoutApiKey(); await authOwnerAgent.get(`/users/${owner.id}`).expect(401); }); test('should fail due to invalid API Key', async () => { - const owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); - owner.apiKey = 'invalid-key'; - const authOwnerAgent = testServer.publicApiAgentFor(owner); + const owner = await createOwnerWithApiKey(); + const authOwnerAgent = testServer.publicApiAgentWithApiKey('invalid-key'); await authOwnerAgent.get(`/users/${owner.id}`).expect(401); }); test('should fail due to member trying to access owner only endpoint', async () => { - const member = await createUser({ apiKey: randomApiKey() }); + const member = await createMemberWithApiKey(); const authMemberAgent = testServer.publicApiAgentFor(member); await authMemberAgent.get(`/users/${member.id}`).expect(403); }); test('should return 404 for non-existing id ', async () => { - const owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); + const owner = await createOwnerWithApiKey(); const authOwnerAgent = testServer.publicApiAgentFor(owner); await authOwnerAgent.get(`/users/${uuid()}`).expect(404); }); test('should return a pending user', async () => { - const owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); + const owner = await createOwnerWithApiKey(); const { id: memberId } = await createUserShell('global:member'); @@ -199,20 +185,13 @@ describe('With license unlimited quota:users', () => { describe('GET /users/:email', () => { test('with non-existing email should return 404', async () => { - const owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); + const owner = await createOwnerWithApiKey(); const authOwnerAgent = testServer.publicApiAgentFor(owner); await authOwnerAgent.get('/users/jhondoe@gmail.com').expect(404); }); test('should return a user', async () => { - const owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); - + const owner = await createOwnerWithApiKey(); const authOwnerAgent = testServer.publicApiAgentFor(owner); const response = await authOwnerAgent.get(`/users/${owner.email}`).expect(200); @@ -249,10 +228,7 @@ describe('With license without quota:users', () => { beforeEach(async () => { mockInstance(License, { getUsersLimit: jest.fn().mockReturnValue(null) }); - const owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); + const owner = await createOwnerWithApiKey(); authOwnerAgent = testServer.publicApiAgentFor(owner); }); diff --git a/packages/cli/test/integration/public-api/users.test.ts b/packages/cli/test/integration/public-api/users.test.ts index 48003d838c..0abfee9b1f 100644 --- a/packages/cli/test/integration/public-api/users.test.ts +++ b/packages/cli/test/integration/public-api/users.test.ts @@ -1,7 +1,12 @@ import { FeatureNotLicensedError } from '@/errors/feature-not-licensed.error'; import { Telemetry } from '@/telemetry'; import { mockInstance } from '@test/mocking'; -import { createMember, createOwner, getUserById } from '@test-integration/db/users'; +import { + createMember, + createMemberWithApiKey, + createOwnerWithApiKey, + getUserById, +} from '@test-integration/db/users'; import { setupTestServer } from '@test-integration/utils'; import * as testDb from '../shared/test-db'; @@ -23,13 +28,12 @@ describe('Users in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: false }); const payload = { email: 'test@test.com', role: 'global:admin' }; /** * Act */ - const response = await testServer.publicApiAgentFor(owner).post('/users').send(payload); + const response = await testServer.publicApiAgentWithApiKey('').post('/users').send(payload); /** * Assert @@ -42,7 +46,7 @@ describe('Users in Public API', () => { * Arrange */ testServer.license.enable('feat:advancedPermissions'); - const member = await createMember({ withApiKey: true }); + const member = await createMemberWithApiKey(); const payload = [{ email: 'test@test.com', role: 'global:admin' }]; /** @@ -62,7 +66,8 @@ describe('Users in Public API', () => { * Arrange */ testServer.license.enable('feat:advancedPermissions'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); + await createOwnerWithApiKey(); const payload = [{ email: 'test@test.com', role: 'global:admin' }]; /** @@ -99,13 +104,12 @@ describe('Users in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: false }); const member = await createMember(); /** * Act */ - const response = await testServer.publicApiAgentFor(owner).delete(`/users/${member.id}`); + const response = await testServer.publicApiAgentWithApiKey('').delete(`/users/${member.id}`); /** * Assert @@ -118,14 +122,14 @@ describe('Users in Public API', () => { * Arrange */ testServer.license.enable('feat:advancedPermissions'); - const firstMember = await createMember({ withApiKey: true }); + const member = await createMemberWithApiKey(); const secondMember = await createMember(); /** * Act */ const response = await testServer - .publicApiAgentFor(firstMember) + .publicApiAgentFor(member) .delete(`/users/${secondMember.id}`); /** @@ -140,7 +144,7 @@ describe('Users in Public API', () => { * Arrange */ testServer.license.enable('feat:advancedPermissions'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const member = await createMember(); /** @@ -161,13 +165,14 @@ describe('Users in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: false }); const member = await createMember(); /** * Act */ - const response = await testServer.publicApiAgentFor(owner).patch(`/users/${member.id}/role`); + const response = await testServer + .publicApiAgentWithApiKey('') + .patch(`/users/${member.id}/role`); /** * Assert @@ -179,7 +184,7 @@ describe('Users in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const member = await createMember(); const payload = { newRoleName: 'global:admin' }; @@ -206,7 +211,7 @@ describe('Users in Public API', () => { * Arrange */ testServer.license.enable('feat:advancedPermissions'); - const firstMember = await createMember({ withApiKey: true }); + const member = await createMemberWithApiKey(); const secondMember = await createMember(); const payload = { newRoleName: 'global:admin' }; @@ -214,7 +219,7 @@ describe('Users in Public API', () => { * Act */ const response = await testServer - .publicApiAgentFor(firstMember) + .publicApiAgentFor(member) .patch(`/users/${secondMember.id}/role`) .send(payload); @@ -230,7 +235,7 @@ describe('Users in Public API', () => { * Arrange */ testServer.license.enable('feat:advancedPermissions'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const member = await createMember(); const payload = { newRoleName: 'invalid' }; @@ -253,7 +258,7 @@ describe('Users in Public API', () => { * Arrange */ testServer.license.enable('feat:advancedPermissions'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const member = await createMember(); const payload = { newRoleName: 'global:admin' }; diff --git a/packages/cli/test/integration/public-api/variables.test.ts b/packages/cli/test/integration/public-api/variables.test.ts index c7f6ba341c..61f75d4641 100644 --- a/packages/cli/test/integration/public-api/variables.test.ts +++ b/packages/cli/test/integration/public-api/variables.test.ts @@ -1,5 +1,5 @@ import { FeatureNotLicensedError } from '@/errors/feature-not-licensed.error'; -import { createOwner } from '@test-integration/db/users'; +import { createOwnerWithApiKey } from '@test-integration/db/users'; import { createVariable, getVariableOrFail } from '@test-integration/db/variables'; import { setupTestServer } from '@test-integration/utils'; @@ -22,7 +22,7 @@ describe('Variables in Public API', () => { * Arrange */ testServer.license.enable('feat:variables'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const variables = await Promise.all([createVariable(), createVariable(), createVariable()]); /** @@ -48,7 +48,8 @@ describe('Variables in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: true }); + + const owner = await createOwnerWithApiKey(); /** * Act @@ -72,7 +73,7 @@ describe('Variables in Public API', () => { * Arrange */ testServer.license.enable('feat:variables'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const variablePayload = { key: 'key', value: 'value' }; /** @@ -96,7 +97,7 @@ describe('Variables in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const variablePayload = { key: 'key', value: 'value' }; /** @@ -124,7 +125,7 @@ describe('Variables in Public API', () => { * Arrange */ testServer.license.enable('feat:variables'); - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const variable = await createVariable(); /** @@ -145,7 +146,7 @@ describe('Variables in Public API', () => { /** * Arrange */ - const owner = await createOwner({ withApiKey: true }); + const owner = await createOwnerWithApiKey(); const variable = await createVariable(); /** diff --git a/packages/cli/test/integration/public-api/workflows.test.ts b/packages/cli/test/integration/public-api/workflows.test.ts index 5bb661eaa3..687b29da6a 100644 --- a/packages/cli/test/integration/public-api/workflows.test.ts +++ b/packages/cli/test/integration/public-api/workflows.test.ts @@ -17,9 +17,8 @@ import { createTeamProject } from '@test-integration/db/projects'; import { mockInstance } from '../../shared/mocking'; import { createTag } from '../shared/db/tags'; -import { createUser } from '../shared/db/users'; +import { createMemberWithApiKey, createOwnerWithApiKey } from '../shared/db/users'; import { createWorkflow, createWorkflowWithTrigger } from '../shared/db/workflows'; -import { randomApiKey } from '../shared/random'; import * as testDb from '../shared/test-db'; import type { SuperAgentTest } from '../shared/types'; import * as utils from '../shared/utils/'; @@ -40,18 +39,13 @@ const license = testServer.license; mockInstance(ExecutionService); beforeAll(async () => { - owner = await createUser({ - role: 'global:owner', - apiKey: randomApiKey(), - }); + owner = await createOwnerWithApiKey(); ownerPersonalProject = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail( owner.id, ); - member = await createUser({ - role: 'global:member', - apiKey: randomApiKey(), - }); + member = await createMemberWithApiKey(); + memberPersonalProject = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail( member.id, ); @@ -1518,6 +1512,10 @@ describe('PUT /workflows/:id/transfer', () => { const secondProject = await createTeamProject('second-project', member); const workflow = await createWorkflow({}, firstProject); + // Make data more similar to real world scenario by injecting additional records into the database + await createTeamProject('third-project', member); + await createWorkflow({}, firstProject); + /** * Act */ @@ -1529,6 +1527,13 @@ describe('PUT /workflows/:id/transfer', () => { * Assert */ expect(response.statusCode).toBe(204); + + const workflowsInProjectResponse = await authMemberAgent + .get(`/workflows?projectId=${secondProject.id}`) + .send(); + + expect(workflowsInProjectResponse.statusCode).toBe(200); + expect(workflowsInProjectResponse.body.data[0].id).toBe(workflow.id); }); test('if no destination project, should reject', async () => { diff --git a/packages/cli/test/integration/runners/task-runner-process.test.ts b/packages/cli/test/integration/runners/task-runner-process.test.ts new file mode 100644 index 0000000000..f517ee6398 --- /dev/null +++ b/packages/cli/test/integration/runners/task-runner-process.test.ts @@ -0,0 +1,91 @@ +import { GlobalConfig } from '@n8n/config'; +import Container from 'typedi'; + +import { TaskRunnerService } from '@/runners/runner-ws-server'; +import { TaskBroker } from '@/runners/task-broker.service'; +import { TaskRunnerProcess } from '@/runners/task-runner-process'; +import { TaskRunnerServer } from '@/runners/task-runner-server'; +import { retryUntil } from '@test-integration/retry-until'; + +describe('TaskRunnerProcess', () => { + const authToken = 'token'; + const globalConfig = Container.get(GlobalConfig); + globalConfig.taskRunners.authToken = authToken; + globalConfig.taskRunners.port = 0; // Use any port + const taskRunnerServer = Container.get(TaskRunnerServer); + + const runnerProcess = Container.get(TaskRunnerProcess); + const taskBroker = Container.get(TaskBroker); + const taskRunnerService = Container.get(TaskRunnerService); + + beforeAll(async () => { + await taskRunnerServer.start(); + // Set the port to the actually used port + globalConfig.taskRunners.port = taskRunnerServer.port; + }); + + afterAll(async () => { + await taskRunnerServer.stop(); + }); + + afterEach(async () => { + await runnerProcess.stop(); + }); + + const getNumConnectedRunners = () => taskRunnerService.runnerConnections.size; + const getNumRegisteredRunners = () => taskBroker.getKnownRunners().size; + + it('should start and connect the task runner', async () => { + // Act + await runnerProcess.start(); + + // Assert + expect(runnerProcess.isRunning).toBeTruthy(); + + // Wait until the runner has connected + await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); + expect(getNumRegisteredRunners()).toBe(1); + }); + + it('should stop an disconnect the task runner', async () => { + // Arrange + await runnerProcess.start(); + + // Wait until the runner has connected + await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); + expect(getNumRegisteredRunners()).toBe(1); + + // Act + await runnerProcess.stop(); + + // Assert + // Wait until the runner has disconnected + await retryUntil(() => expect(getNumConnectedRunners()).toBe(0)); + + expect(runnerProcess.isRunning).toBeFalsy(); + expect(getNumRegisteredRunners()).toBe(0); + }); + + it('should restart the task runner if it exits', async () => { + // Arrange + await runnerProcess.start(); + + // Wait until the runner has connected + await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); + const processId = runnerProcess.pid; + + // Act + // @ts-expect-error private property + runnerProcess.process?.kill('SIGKILL'); + + // Assert + // Wait until the runner is running again + await retryUntil(() => expect(runnerProcess.isRunning).toBeTruthy()); + expect(runnerProcess.pid).not.toBe(processId); + + // Wait until the runner has connected again + await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); + expect(getNumConnectedRunners()).toBe(1); + expect(getNumRegisteredRunners()).toBe(1); + }); +}); diff --git a/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts b/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts index 8da1f3e1bf..4513beb6bb 100644 --- a/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts +++ b/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts @@ -159,6 +159,7 @@ test('should report credential in not recently executed workflow', async () => { const savedExecution = await Container.get(ExecutionRepository).save({ finished: true, mode: 'manual', + createdAt: date, startedAt: date, stoppedAt: date, workflowId: workflow.id, @@ -227,6 +228,7 @@ test('should not report credentials in recently executed workflow', async () => const savedExecution = await Container.get(ExecutionRepository).save({ finished: true, mode: 'manual', + createdAt: date, startedAt: date, stoppedAt: date, workflowId: workflow.id, diff --git a/packages/cli/test/integration/shared/db/executions.ts b/packages/cli/test/integration/shared/db/executions.ts index dac3124681..4dd0b4fa76 100644 --- a/packages/cli/test/integration/shared/db/executions.ts +++ b/packages/cli/test/integration/shared/db/executions.ts @@ -4,7 +4,7 @@ import Container from 'typedi'; import type { ExecutionData } from '@/databases/entities/execution-data'; import type { ExecutionEntity } from '@/databases/entities/execution-entity'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; -import { AnnotationTagRepository } from '@/databases/repositories/annotation-tag.repository'; +import { AnnotationTagRepository } from '@/databases/repositories/annotation-tag.repository.ee'; import { ExecutionDataRepository } from '@/databases/repositories/execution-data.repository'; import { ExecutionMetadataRepository } from '@/databases/repositories/execution-metadata.repository'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; @@ -39,6 +39,7 @@ export async function createExecution( const execution = await Container.get(ExecutionRepository).save({ finished: finished ?? true, mode: mode ?? 'manual', + createdAt: new Date(), startedAt: startedAt ?? new Date(), ...(workflow !== undefined && { workflowId: workflow.id }), stoppedAt: stoppedAt ?? new Date(), diff --git a/packages/cli/test/integration/shared/db/users.ts b/packages/cli/test/integration/shared/db/users.ts index 83192822f9..62f9f39a05 100644 --- a/packages/cli/test/integration/shared/db/users.ts +++ b/packages/cli/test/integration/shared/db/users.ts @@ -1,8 +1,10 @@ import { hash } from 'bcryptjs'; +import { randomString } from 'n8n-workflow'; import Container from 'typedi'; import { AuthIdentity } from '@/databases/entities/auth-identity'; import { type GlobalRole, type User } from '@/databases/entities/user'; +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; import { AuthIdentityRepository } from '@/databases/repositories/auth-identity.repository'; import { AuthUserRepository } from '@/databases/repositories/auth-user.repository'; import { UserRepository } from '@/databases/repositories/user.repository'; @@ -79,19 +81,38 @@ export async function createUserWithMfaEnabled( }; } -export async function createOwner({ withApiKey } = { withApiKey: false }) { - if (withApiKey) { - return await addApiKey(await createUser({ role: 'global:owner' })); - } +const createApiKeyEntity = (user: User) => { + const apiKey = randomApiKey(); + return Container.get(ApiKeyRepository).create({ + userId: user.id, + label: randomString(10), + apiKey, + }); +}; +export const addApiKey = async (user: User) => { + return await Container.get(ApiKeyRepository).save(createApiKeyEntity(user)); +}; + +export async function createOwnerWithApiKey() { + const owner = await createOwner(); + const apiKey = await addApiKey(owner); + owner.apiKeys = [apiKey]; + return owner; +} + +export async function createMemberWithApiKey() { + const member = await createMember(); + const apiKey = await addApiKey(member); + member.apiKeys = [apiKey]; + return member; +} + +export async function createOwner() { return await createUser({ role: 'global:owner' }); } -export async function createMember({ withApiKey } = { withApiKey: false }) { - if (withApiKey) { - return await addApiKey(await createUser({ role: 'global:member' })); - } - +export async function createMember() { return await createUser({ role: 'global:member' }); } @@ -128,11 +149,6 @@ export async function createManyUsers( return result.map((result) => result.user); } -export async function addApiKey(user: User): Promise { - user.apiKey = randomApiKey(); - return await Container.get(UserRepository).save(user); -} - export const getAllUsers = async () => await Container.get(UserRepository).find({ relations: ['authIdentities'], diff --git a/packages/cli/test/integration/shared/retry-until.ts b/packages/cli/test/integration/shared/retry-until.ts new file mode 100644 index 0000000000..f469149b31 --- /dev/null +++ b/packages/cli/test/integration/shared/retry-until.ts @@ -0,0 +1,32 @@ +/** + * Retries the given assertion until it passes or the timeout is reached + * + * @example + * await retryUntil( + * () => expect(service.someState).toBe(true) + * ); + */ +export const retryUntil = async ( + assertion: () => Promise | void, + { interval = 20, timeout = 1000 } = {}, +) => { + return await new Promise((resolve, reject) => { + const startTime = Date.now(); + + const tryAgain = () => { + setTimeout(async () => { + try { + resolve(await assertion()); + } catch (error) { + if (Date.now() - startTime > timeout) { + reject(error); + } else { + tryAgain(); + } + } + }, interval); + }; + + tryAgain(); + }); +}; diff --git a/packages/cli/test/integration/shared/test-db.ts b/packages/cli/test/integration/shared/test-db.ts index 365bc81fa8..7faaa3f6eb 100644 --- a/packages/cli/test/integration/shared/test-db.ts +++ b/packages/cli/test/integration/shared/test-db.ts @@ -67,6 +67,7 @@ const repositories = [ 'Project', 'ProjectRelation', 'Role', + 'ProcessedData', 'Project', 'ProjectRelation', 'Settings', @@ -80,6 +81,7 @@ const repositories = [ 'WorkflowHistory', 'WorkflowStatistics', 'WorkflowTagMapping', + 'ApiKey', ] as const; /** @@ -87,9 +89,18 @@ const repositories = [ */ export async function truncate(names: Array<(typeof repositories)[number]>) { for (const name of names) { - const RepositoryClass: Class> = - // eslint-disable-next-line n8n-local-rules/no-dynamic-import-template - (await import(`@/databases/repositories/${kebabCase(name)}.repository`))[`${name}Repository`]; + let RepositoryClass: Class>; + + try { + RepositoryClass = (await import(`@/databases/repositories/${kebabCase(name)}.repository`))[ + `${name}Repository` + ]; + } catch (e) { + RepositoryClass = (await import(`@/databases/repositories/${kebabCase(name)}.repository.ee`))[ + `${name}Repository` + ]; + } + await Container.get(RepositoryClass).delete({}); } } diff --git a/packages/cli/test/integration/shared/types.ts b/packages/cli/test/integration/shared/types.ts index 66ca2d016f..8dc922dda2 100644 --- a/packages/cli/test/integration/shared/types.ts +++ b/packages/cli/test/integration/shared/types.ts @@ -40,7 +40,8 @@ type EndpointGroup = | 'debug' | 'project' | 'role' - | 'dynamic-node-parameters'; + | 'dynamic-node-parameters' + | 'apiKeys'; export interface SetupProps { endpointGroups?: EndpointGroup[]; @@ -55,6 +56,8 @@ export interface TestServer { httpServer: Server; authAgentFor: (user: User) => TestAgent; publicApiAgentFor: (user: User) => TestAgent; + publicApiAgentWithApiKey: (apiKey: string) => TestAgent; + publicApiAgentWithoutApiKey: () => TestAgent; authlessAgent: TestAgent; restlessAgent: TestAgent; license: LicenseMocker; diff --git a/packages/cli/test/integration/shared/utils/test-command.ts b/packages/cli/test/integration/shared/utils/test-command.ts index 82effd1818..d0737ddcc1 100644 --- a/packages/cli/test/integration/shared/utils/test-command.ts +++ b/packages/cli/test/integration/shared/utils/test-command.ts @@ -4,7 +4,7 @@ import type { Class } from 'n8n-core'; import type { BaseCommand } from '@/commands/base-command'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { TelemetryEventRelay } from '@/events/telemetry-event-relay'; +import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay'; import { mockInstance } from '@test/mocking'; import * as testDb from '../test-db'; diff --git a/packages/cli/test/integration/shared/utils/test-server.ts b/packages/cli/test/integration/shared/utils/test-server.ts index f16bbf9833..b69f21499a 100644 --- a/packages/cli/test/integration/shared/utils/test-server.ts +++ b/packages/cli/test/integration/shared/utils/test-server.ts @@ -11,7 +11,7 @@ import { AUTH_COOKIE_NAME } from '@/constants'; import type { User } from '@/databases/entities/user'; import { ControllerRegistry } from '@/decorators'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { rawBodyReader, bodyParser } from '@/middlewares'; import { PostHogClient } from '@/posthog'; import { Push } from '@/push'; @@ -62,17 +62,30 @@ function createAgent( return agent; } -function publicApiAgent( +const userDoesNotHaveApiKey = (user: User) => { + return !user.apiKeys || !Array.from(user.apiKeys) || user.apiKeys.length === 0; +}; + +const publicApiAgent = ( app: express.Application, - { user, version = 1 }: { user: User; version?: number }, -) { + { user, apiKey, version = 1 }: { user?: User; apiKey?: string; version?: number }, +) => { + if (user && apiKey) { + throw new Error('Cannot provide both user and API key'); + } + + if (user && userDoesNotHaveApiKey(user)) { + throw new Error('User does not have an API key'); + } + + const agentApiKey = apiKey ?? user?.apiKeys[0].apiKey; + const agent = request.agent(app); void agent.use(prefix(`${PUBLIC_API_REST_PATH_SEGMENT}/v${version}`)); - if (user.apiKey) { - void agent.set({ 'X-N8N-API-KEY': user.apiKey }); - } + if (!user && !apiKey) return agent; + void agent.set({ 'X-N8N-API-KEY': agentApiKey }); return agent; -} +}; export const setupTestServer = ({ endpointGroups, @@ -100,6 +113,8 @@ export const setupTestServer = ({ authlessAgent: createAgent(app), restlessAgent: createAgent(app, { auth: false, noRest: true }), publicApiAgentFor: (user) => publicApiAgent(app, { user }), + publicApiAgentWithApiKey: (apiKey) => publicApiAgent(app, { apiKey }), + publicApiAgentWithoutApiKey: () => publicApiAgent(app, {}), license: new LicenseMocker(), }; @@ -140,7 +155,7 @@ export const setupTestServer = ({ for (const group of endpointGroups) { switch (group) { case 'annotationTags': - await import('@/controllers/annotation-tags.controller'); + await import('@/controllers/annotation-tags.controller.ee'); break; case 'credentials': @@ -258,6 +273,10 @@ export const setupTestServer = ({ case 'dynamic-node-parameters': await import('@/controllers/dynamic-node-parameters.controller'); break; + + case 'apiKeys': + await import('@/controllers/api-keys.controller'); + break; } } diff --git a/packages/cli/test/integration/workflows/workflows.controller.ee.test.ts b/packages/cli/test/integration/workflows/workflows.controller.ee.test.ts index c8f2db889f..2002843bfe 100644 --- a/packages/cli/test/integration/workflows/workflows.controller.ee.test.ts +++ b/packages/cli/test/integration/workflows/workflows.controller.ee.test.ts @@ -5,6 +5,7 @@ import { v4 as uuid } from 'uuid'; import { ActiveWorkflowManager } from '@/active-workflow-manager'; import config from '@/config'; import type { Project } from '@/databases/entities/project'; +import type { ProjectRole } from '@/databases/entities/project-relation'; import type { User } from '@/databases/entities/user'; import { ProjectRepository } from '@/databases/repositories/project.repository'; import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository'; @@ -1385,18 +1386,6 @@ describe('PUT /:workflowId/transfer', () => { .expect(400); }); - test('cannot transfer into a personal project', async () => { - const sourceProject = await createTeamProject('Team Project', member); - - const workflow = await createWorkflow({}, sourceProject); - - await testServer - .authAgentFor(member) - .put(`/workflows/${workflow.id}/transfer`) - .send({ destinationProjectId: memberPersonalProject.id }) - .expect(400); - }); - test('cannot transfer somebody elses workflow', async () => { const destinationProject = await createTeamProject('Team Project', member); @@ -1421,180 +1410,133 @@ describe('PUT /:workflowId/transfer', () => { .expect(404); }); - test('project:editors cannot transfer workflows', async () => { - // - // ARRANGE - // - const sourceProject = await createTeamProject(); - await linkUserToProject(member, sourceProject, 'project:editor'); - - const workflow = await createWorkflow({}, sourceProject); - - const destinationProject = await createTeamProject(); - await linkUserToProject(member, destinationProject, 'project:admin'); - - // - // ACT & ASSERT - // - await testServer - .authAgentFor(member) - .put(`/workflows/${workflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(403); - }); - - test('transferring from a personal project to a team project severs all sharings', async () => { - // - // ARRANGE - // - const workflow = await createWorkflow({}, member); - - // these sharings should be deleted by the transfer - await shareWorkflowWithUsers(workflow, [anotherMember, owner]); - - const destinationProject = await createTeamProject('Team Project', member); - - // - // ACT - // - const response = await testServer - .authAgentFor(member) - .put(`/workflows/${workflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response.body).toEqual({}); - - const allSharings = await getWorkflowSharing(workflow); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - workflowId: workflow.id, - role: 'workflow:owner', - }); - }); - - test('can transfer from team to another team project', async () => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Team Project 1', member); - const workflow = await createWorkflow({}, sourceProject); - - const destinationProject = await createTeamProject('Team Project 2', member); - - // - // ACT - // - const response = await testServer - .authAgentFor(member) - .put(`/workflows/${workflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response.body).toEqual({}); - - const allSharings = await getWorkflowSharing(workflow); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - workflowId: workflow.id, - role: 'workflow:owner', - }); - }); - - test.each([ - ['owners', () => owner], - ['admins', () => admin], - ])( - 'global %s can always transfer from any personal or team project into any team project', - async (_name, actor) => { + test.each(['project:editor', 'project:viewer'])( + '%ss cannot transfer workflows', + async (projectRole) => { // // ARRANGE // - const sourceProject = await createTeamProject('Source Project', member); - const teamWorkflow = await createWorkflow({}, sourceProject); + const sourceProject = await createTeamProject(); + await linkUserToProject(member, sourceProject, projectRole); - const personalWorkflow = await createWorkflow({}, member); + const workflow = await createWorkflow({}, sourceProject); - const destinationProject = await createTeamProject('Destination Project', member); + const destinationProject = await createTeamProject(); + await linkUserToProject(member, destinationProject, 'project:admin'); // - // ACT + // ACT & ASSERT // - const response1 = await testServer - .authAgentFor(actor()) - .put(`/workflows/${teamWorkflow.id}/transfer`) + await testServer + .authAgentFor(member) + .put(`/workflows/${workflow.id}/transfer`) .send({ destinationProjectId: destinationProject.id }) - .expect(200); - const response2 = await testServer - .authAgentFor(actor()) - .put(`/workflows/${personalWorkflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response1.body).toEqual({}); - expect(response2.body).toEqual({}); - - { - const allSharings = await getWorkflowSharing(teamWorkflow); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - workflowId: teamWorkflow.id, - role: 'workflow:owner', - }); - } - - { - const allSharings = await getWorkflowSharing(personalWorkflow); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - workflowId: personalWorkflow.id, - role: 'workflow:owner', - }); - } + .expect(403); }, ); - test.each([ - ['owners', () => owner], - ['admins', () => admin], - ])('global %s cannot transfer into personal projects', async (_name, actor) => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Source Project', member); - const teamWorkflow = await createWorkflow({}, sourceProject); + test.each< + [ + // user role + 'owners' | 'admins', + // source project type + 'team' | 'personal', + // destination project type + 'team' | 'personal', + // actor + () => User, + // source project + () => Promise | Project, + // destination project + () => Promise | Project, + ] + >([ + // owner + [ + 'owners', + 'team', + 'team', + () => owner, + async () => await createTeamProject('Source Project'), + async () => await createTeamProject('Destination Project'), + ], + [ + 'owners', + 'team', + 'personal', + () => owner, + async () => await createTeamProject('Source Project'), + () => memberPersonalProject, + ], + [ + 'owners', + 'personal', + 'team', + () => owner, + () => memberPersonalProject, + async () => await createTeamProject('Destination Project'), + ], - const personalWorkflow = await createWorkflow({}, member); + // admin + [ + 'admins', + 'team', + 'team', + () => admin, + async () => await createTeamProject('Source Project'), + async () => await createTeamProject('Destination Project'), + ], + [ + 'admins', + 'team', + 'personal', + () => admin, + async () => await createTeamProject('Source Project'), + () => memberPersonalProject, + ], + [ + 'admins', + 'personal', + 'team', + () => admin, + () => memberPersonalProject, + async () => await createTeamProject('Destination Project'), + ], + ])( + 'global %s can transfer workflows from a %s project to a %s project', + async ( + _roleName, + _sourceProjectName, + _destinationProjectName, + getActor, + getSourceProject, + getDestinationProject, + ) => { + // ARRANGE + const actor = getActor(); + const sourceProject = await getSourceProject(); + const destinationProject = await getDestinationProject(); + const workflow = await createWorkflow({}, sourceProject); - const destinationProject = anotherMemberPersonalProject; + // ACT + const response = await testServer + .authAgentFor(actor) + .put(`/workflows/${workflow.id}/transfer`) + .send({ destinationProjectId: destinationProject.id }) + .expect(200); - // - // ACT & ASSERT - // - await testServer - .authAgentFor(actor()) - .put(`/workflows/${teamWorkflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(400); - await testServer - .authAgentFor(actor()) - .put(`/workflows/${personalWorkflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(400); - }); + // ASSERT + expect(response.body).toEqual({}); + + const allSharings = await getWorkflowSharing(workflow); + expect(allSharings).toHaveLength(1); + expect(allSharings[0]).toMatchObject({ + projectId: destinationProject.id, + workflowId: workflow.id, + role: 'workflow:owner', + }); + }, + ); test('removes and re-adds the workflow from the active workflow manager during the transfer', async () => { // diff --git a/packages/cli/test/shared/mocking.ts b/packages/cli/test/shared/mocking.ts index 60b712b115..099988a896 100644 --- a/packages/cli/test/shared/mocking.ts +++ b/packages/cli/test/shared/mocking.ts @@ -4,6 +4,8 @@ import type { Class } from 'n8n-core'; import type { DeepPartial } from 'ts-essentials'; import { Container } from 'typedi'; +import type { Logger } from '@/logging/logger.service'; + export const mockInstance = ( serviceClass: Class, data: DeepPartial | undefined = undefined, @@ -22,3 +24,6 @@ export const mockEntityManager = (entityClass: Class) => { Object.assign(entityManager, { connection: dataSource }); return entityManager; }; + +export const mockLogger = () => + mock({ withScope: jest.fn().mockReturnValue(mock()) }); diff --git a/packages/core/.eslintrc.js b/packages/core/.eslintrc.js index cd962bad5c..9bfae8a9eb 100644 --- a/packages/core/.eslintrc.js +++ b/packages/core/.eslintrc.js @@ -18,7 +18,6 @@ module.exports = { complexity: 'error', // TODO: Remove this - 'import/order': 'off', '@typescript-eslint/ban-ts-comment': ['error', { 'ts-ignore': true }], }, }; diff --git a/packages/core/bin/generate-ui-types b/packages/core/bin/generate-ui-types index 8cecb6b054..f73ca87a15 100755 --- a/packages/core/bin/generate-ui-types +++ b/packages/core/bin/generate-ui-types @@ -30,18 +30,6 @@ function findReferencedMethods(obj, refs = {}, latestName = '') { const loader = new PackageDirectoryLoader(packageDir); await loader.loadAll(); - const knownCredentials = loader.known.credentials; - const credentialTypes = Object.values(loader.credentialTypes).map((data) => { - const credentialType = data.type; - if ( - knownCredentials[credentialType.name].supportedNodes?.length > 0 && - credentialType.httpRequestNode - ) { - credentialType.httpRequestNode.hidden = true; - } - return credentialType; - }); - const loaderNodeTypes = Object.values(loader.nodeTypes); const definedMethods = loaderNodeTypes.reduce((acc, cur) => { @@ -76,6 +64,36 @@ function findReferencedMethods(obj, refs = {}, latestName = '') { }), ); + const knownCredentials = loader.known.credentials; + const credentialTypes = Object.values(loader.credentialTypes).map((data) => { + const credentialType = data.type; + const supportedNodes = knownCredentials[credentialType.name].supportedNodes ?? []; + if (supportedNodes.length > 0 && credentialType.httpRequestNode) { + credentialType.httpRequestNode.hidden = true; + } + + credentialType.supportedNodes = supportedNodes; + + if (!credentialType.iconUrl && !credentialType.icon) { + for (const supportedNode of supportedNodes) { + const nodeType = loader.nodeTypes[supportedNode]?.type.description; + + if (!nodeType) continue; + if (nodeType.icon) { + credentialType.icon = nodeType.icon; + credentialType.iconColor = nodeType.iconColor; + break; + } + if (nodeType.iconUrl) { + credentialType.iconUrl = nodeType.iconUrl; + break; + } + } + } + + return credentialType; + }); + const referencedMethods = findReferencedMethods(nodeTypes); await Promise.all([ diff --git a/packages/core/package.json b/packages/core/package.json index 74e3483943..95cf23efa6 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "1.61.0", + "version": "1.63.0", "description": "Core functionality of n8n", "main": "dist/index", "types": "dist/index.d.ts", diff --git a/packages/core/src/ActiveWorkflows.ts b/packages/core/src/ActiveWorkflows.ts index bfc6319626..93e67488d5 100644 --- a/packages/core/src/ActiveWorkflows.ts +++ b/packages/core/src/ActiveWorkflows.ts @@ -1,5 +1,3 @@ -import { Service } from 'typedi'; - import type { IGetExecutePollFunctions, IGetExecuteTriggerFunctions, @@ -20,9 +18,10 @@ import { WorkflowActivationError, WorkflowDeactivationError, } from 'n8n-workflow'; +import { Service } from 'typedi'; -import { ScheduledTaskManager } from './ScheduledTaskManager'; import type { IWorkflowData } from './Interfaces'; +import { ScheduledTaskManager } from './ScheduledTaskManager'; @Service() export class ActiveWorkflows { diff --git a/packages/core/src/Agent/index.ts b/packages/core/src/Agent/index.ts new file mode 100644 index 0000000000..ed842d99ee --- /dev/null +++ b/packages/core/src/Agent/index.ts @@ -0,0 +1,61 @@ +import type { + IExecuteFunctions, + Workflow, + IRunExecutionData, + INodeExecutionData, + ITaskDataConnections, + INode, + IWorkflowExecuteAdditionalData, + WorkflowExecuteMode, + INodeParameters, + IExecuteData, + IDataObject, + Result, +} from 'n8n-workflow'; +import { createEnvProviderState } from 'n8n-workflow'; + +export const createAgentStartJob = ( + additionalData: IWorkflowExecuteAdditionalData, + inputData: ITaskDataConnections, + node: INode, + workflow: Workflow, + runExecutionData: IRunExecutionData, + runIndex: number, + activeNodeName: string, + connectionInputData: INodeExecutionData[], + siblingParameters: INodeParameters, + mode: WorkflowExecuteMode, + executeData?: IExecuteData, + defaultReturnRunIndex?: number, + selfData?: IDataObject, + contextNodeName?: string, +): IExecuteFunctions['startJob'] => { + return async function startJob( + this: IExecuteFunctions, + jobType: string, + settings: unknown, + itemIndex: number, + ): Promise> { + return await additionalData.startAgentJob( + additionalData, + jobType, + settings, + this, + inputData, + node, + workflow, + runExecutionData, + runIndex, + itemIndex, + activeNodeName, + connectionInputData, + siblingParameters, + mode, + createEnvProviderState(), + executeData, + defaultReturnRunIndex, + selfData, + contextNodeName, + ); + }; +}; diff --git a/packages/core/src/BinaryData/BinaryData.service.ts b/packages/core/src/BinaryData/BinaryData.service.ts index 20903aa71c..556a3176de 100644 --- a/packages/core/src/BinaryData/BinaryData.service.ts +++ b/packages/core/src/BinaryData/BinaryData.service.ts @@ -1,14 +1,14 @@ +import { BINARY_ENCODING } from 'n8n-workflow'; +import type { INodeExecutionData, IBinaryData } from 'n8n-workflow'; import { readFile, stat } from 'node:fs/promises'; import prettyBytes from 'pretty-bytes'; -import Container, { Service } from 'typedi'; -import { BINARY_ENCODING } from 'n8n-workflow'; -import { InvalidModeError } from '../errors/invalid-mode.error'; -import { areConfigModes, binaryToBuffer } from './utils'; - import type { Readable } from 'stream'; +import Container, { Service } from 'typedi'; + import type { BinaryData } from './types'; -import type { INodeExecutionData, IBinaryData } from 'n8n-workflow'; +import { areConfigModes, binaryToBuffer } from './utils'; import { InvalidManagerError } from '../errors/invalid-manager.error'; +import { InvalidModeError } from '../errors/invalid-mode.error'; @Service() export class BinaryDataService { diff --git a/packages/core/src/BinaryData/FileSystem.manager.ts b/packages/core/src/BinaryData/FileSystem.manager.ts index 5b7250d9eb..f49e6e5c02 100644 --- a/packages/core/src/BinaryData/FileSystem.manager.ts +++ b/packages/core/src/BinaryData/FileSystem.manager.ts @@ -1,13 +1,13 @@ +import { jsonParse } from 'n8n-workflow'; import { createReadStream } from 'node:fs'; import fs from 'node:fs/promises'; import path from 'node:path'; +import type { Readable } from 'stream'; import { v4 as uuid } from 'uuid'; -import { jsonParse } from 'n8n-workflow'; + +import type { BinaryData } from './types'; import { assertDir, doesNotExist } from './utils'; import { DisallowedFilepathError } from '../errors/disallowed-filepath.error'; - -import type { Readable } from 'stream'; -import type { BinaryData } from './types'; import { FileNotFoundError } from '../errors/file-not-found.error'; const EXECUTION_ID_EXTRACTOR = diff --git a/packages/core/src/BinaryData/ObjectStore.manager.ts b/packages/core/src/BinaryData/ObjectStore.manager.ts index 6f7bb3ef29..65827d4ef0 100644 --- a/packages/core/src/BinaryData/ObjectStore.manager.ts +++ b/packages/core/src/BinaryData/ObjectStore.manager.ts @@ -1,12 +1,12 @@ import fs from 'node:fs/promises'; +import type { Readable } from 'node:stream'; import { Service } from 'typedi'; import { v4 as uuid } from 'uuid'; + +import type { BinaryData } from './types'; import { binaryToBuffer } from './utils'; import { ObjectStoreService } from '../ObjectStore/ObjectStore.service.ee'; -import type { Readable } from 'node:stream'; -import type { BinaryData } from './types'; - @Service() export class ObjectStoreManager implements BinaryData.Manager { constructor(private readonly objectStoreService: ObjectStoreService) {} diff --git a/packages/core/src/BinaryData/utils.ts b/packages/core/src/BinaryData/utils.ts index c64cb4315b..bedda5be12 100644 --- a/packages/core/src/BinaryData/utils.ts +++ b/packages/core/src/BinaryData/utils.ts @@ -1,7 +1,8 @@ +import concatStream from 'concat-stream'; import fs from 'node:fs/promises'; import type { Readable } from 'node:stream'; + import type { BinaryData } from './types'; -import concatStream from 'concat-stream'; export const CONFIG_MODES = ['default', 'filesystem', 's3'] as const; diff --git a/packages/core/src/Cipher.ts b/packages/core/src/Cipher.ts index 4e1b649bed..d9ed93ddb6 100644 --- a/packages/core/src/Cipher.ts +++ b/packages/core/src/Cipher.ts @@ -1,5 +1,6 @@ -import { Service } from 'typedi'; import { createHash, createCipheriv, createDecipheriv, randomBytes } from 'crypto'; +import { Service } from 'typedi'; + import { InstanceSettings } from './InstanceSettings'; // Data encrypted by CryptoJS always starts with these bytes diff --git a/packages/core/src/CreateNodeAsTool.ts b/packages/core/src/CreateNodeAsTool.ts index 21e1b6352a..1466f57d09 100644 --- a/packages/core/src/CreateNodeAsTool.ts +++ b/packages/core/src/CreateNodeAsTool.ts @@ -1,296 +1,435 @@ -/** - * @module NodeAsTool - * @description This module converts n8n nodes into LangChain tools by analyzing node parameters, - * identifying placeholders, and generating a Zod schema. It then creates a DynamicStructuredTool - * that can be used in LangChain workflows. - * - * General approach: - * 1. Recursively traverse node parameters to find placeholders, including in nested structures - * 2. Generate a Zod schema based on these placeholders, preserving the nested structure - * 3. Create a DynamicStructuredTool with the schema and a function that executes the n8n node - * - * Example: - * - Node parameters: - * { - * "inputText": "{{ '__PLACEHOLDER: Enter main text to process' }}", - * "options": { - * "language": "{{ '__PLACEHOLDER: Specify language' }}", - * "advanced": { - * "maxLength": "{{ '__PLACEHOLDER: Enter maximum length' }}" - * } - * } - * } - * - * - Generated Zod schema: - * z.object({ - * "inputText": z.string().describe("Enter main text to process"), - * "options__language": z.string().describe("Specify language"), - * "options__advanced__maxLength": z.string().describe("Enter maximum length") - * }).required() - * - * - Resulting tool can be called with: - * { - * "inputText": "Hello, world!", - * "options__language": "en", - * "options__advanced__maxLength": "100" - * } - * - * Note: Nested properties are flattened with double underscores in the schema, - * but the tool reconstructs the original nested structure when executing the node. - */ - import { DynamicStructuredTool } from '@langchain/core/tools'; -import { - NodeConnectionType, - type IExecuteFunctions, - type INodeParameters, - type INodeType, -} from 'n8n-workflow'; +import type { IExecuteFunctions, INodeParameters, INodeType } from 'n8n-workflow'; +import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { z } from 'zod'; -/** Represents a nested object structure */ -type NestedObject = { [key: string]: unknown }; - -/** - * Encodes a dot-notated key to a format safe for use as an object key. - * @param {string} key - The dot-notated key to encode. - * @returns {string} The encoded key. - */ -function encodeDotNotation(key: string): string { - // Replace dots with double underscores, then handle special case for '__value' for complicated params - return key.replace(/\./g, '__').replace('__value', ''); +type AllowedTypes = 'string' | 'number' | 'boolean' | 'json'; +interface FromAIArgument { + key: string; + description?: string; + type?: AllowedTypes; + defaultValue?: string | number | boolean | Record; } /** - * Decodes an encoded key back to its original dot-notated form. - * @param {string} key - The encoded key to decode. - * @returns {string} The decoded, dot-notated key. + * AIParametersParser + * + * This class encapsulates the logic for parsing node parameters, extracting $fromAI calls, + * generating Zod schemas, and creating LangChain tools. */ -function decodeDotNotation(key: string): string { - // Simply replace double underscores with dots - return key.replace(/__/g, '.'); -} +class AIParametersParser { + private ctx: IExecuteFunctions; -/** - * Recursively traverses an object to find placeholder values. - * @param {NestedObject} obj - The object to traverse. - * @param {string[]} path - The current path in the object. - * @param {Map} results - Map to store found placeholders. - * @returns {Map} Updated map of placeholders. - */ -function traverseObject( - obj: NestedObject, - path: string[] = [], - results: Map = new Map(), -): Map { - for (const [key, value] of Object.entries(obj)) { - const currentPath = [...path, key]; - const fullPath = currentPath.join('.'); + /** + * Constructs an instance of AIParametersParser. + * @param ctx The execution context. + */ + constructor(ctx: IExecuteFunctions) { + this.ctx = ctx; + } - if (typeof value === 'string' && value.startsWith("{{ '__PLACEHOLDER")) { - // Store placeholder values with their full path - results.set(encodeDotNotation(fullPath), value); - } else if (Array.isArray(value)) { - // Recursively traverse arrays - // eslint-disable-next-line @typescript-eslint/no-use-before-define - traverseArray(value, currentPath, results); - } else if (typeof value === 'object' && value !== null) { - // Recursively traverse nested objects, but only if they're not empty - if (Object.keys(value).length > 0) { - traverseObject(value as NestedObject, currentPath, results); - } + /** + * Generates a Zod schema based on the provided FromAIArgument placeholder. + * @param placeholder The FromAIArgument object containing key, type, description, and defaultValue. + * @returns A Zod schema corresponding to the placeholder's type and constraints. + */ + private generateZodSchema(placeholder: FromAIArgument): z.ZodTypeAny { + let schema: z.ZodTypeAny; + + switch (placeholder.type?.toLowerCase()) { + case 'string': + schema = z.string(); + break; + case 'number': + schema = z.number(); + break; + case 'boolean': + schema = z.boolean(); + break; + case 'json': + schema = z.record(z.any()); + break; + default: + schema = z.string(); + } + + if (placeholder.description) { + schema = schema.describe(`${schema.description ?? ''} ${placeholder.description}`.trim()); + } + + if (placeholder.defaultValue !== undefined) { + schema = schema.default(placeholder.defaultValue); + } + + return schema; + } + + /** + * Recursively traverses the nodeParameters object to find all $fromAI calls. + * @param payload The current object or value being traversed. + * @param collectedArgs The array collecting FromAIArgument objects. + */ + private traverseNodeParameters(payload: unknown, collectedArgs: FromAIArgument[]) { + if (typeof payload === 'string') { + const fromAICalls = this.extractFromAICalls(payload); + fromAICalls.forEach((call) => collectedArgs.push(call)); + } else if (Array.isArray(payload)) { + payload.forEach((item: unknown) => this.traverseNodeParameters(item, collectedArgs)); + } else if (typeof payload === 'object' && payload !== null) { + Object.values(payload).forEach((value) => this.traverseNodeParameters(value, collectedArgs)); } } - return results; -} + /** + * Extracts all $fromAI calls from a given string + * @param str The string to search for $fromAI calls. + * @returns An array of FromAIArgument objects. + * + * This method uses a regular expression to find the start of each $fromAI function call + * in the input string. It then employs a character-by-character parsing approach to + * accurately extract the arguments of each call, handling nested parentheses and quoted strings. + * + * The parsing process: + * 1. Finds the starting position of a $fromAI call using regex. + * 2. Iterates through characters, keeping track of parentheses depth and quote status. + * 3. Handles escaped characters within quotes to avoid premature quote closing. + * 4. Builds the argument string until the matching closing parenthesis is found. + * 5. Parses the extracted argument string into a FromAIArgument object. + * 6. Repeats the process for all $fromAI calls in the input string. + * + */ + private extractFromAICalls(str: string): FromAIArgument[] { + const args: FromAIArgument[] = []; + // Regular expression to match the start of a $fromAI function call + const pattern = /\$fromAI\s*\(\s*/gi; + let match: RegExpExecArray | null; -/** - * Recursively traverses an array to find placeholder values. - * @param {unknown[]} arr - The array to traverse. - * @param {string[]} path - The current path in the array. - * @param {Map} results - Map to store found placeholders. - */ -function traverseArray(arr: unknown[], path: string[], results: Map): void { - arr.forEach((item, index) => { - const currentPath = [...path, index.toString()]; - const fullPath = currentPath.join('.'); + while ((match = pattern.exec(str)) !== null) { + const startIndex = match.index + match[0].length; + let current = startIndex; + let inQuotes = false; + let quoteChar = ''; + let parenthesesCount = 1; + let argsString = ''; - if (typeof item === 'string' && item.startsWith("{{ '__PLACEHOLDER")) { - // Store placeholder values with their full path - results.set(encodeDotNotation(fullPath), item); - } else if (Array.isArray(item)) { - // Recursively traverse nested arrays - traverseArray(item, currentPath, results); - } else if (typeof item === 'object' && item !== null) { - // Recursively traverse nested objects - traverseObject(item as NestedObject, currentPath, results); - } - }); -} + // Parse the arguments string, handling nested parentheses and quotes + while (current < str.length && parenthesesCount > 0) { + const char = str[current]; -/** - * Builds a nested object structure from matching keys and their values. - * @param {string} baseKey - The base key to start building from. - * @param {string[]} matchingKeys - Array of matching keys. - * @param {Record} values - Object containing values for the keys. - * @returns {Record} The built nested object structure. - */ -function buildStructureFromMatches( - baseKey: string, - matchingKeys: string[], - values: Record, -): Record { - const result = {}; + if (inQuotes) { + // Handle characters inside quotes, including escaped characters + if (char === '\\' && current + 1 < str.length) { + argsString += char + str[current + 1]; + current += 2; + continue; + } - for (const matchingKey of matchingKeys) { - const decodedKey = decodeDotNotation(matchingKey); - // Extract the part of the key after the base key - const remainingPath = decodedKey - .slice(baseKey.length) - .split('.') - .filter((k) => k !== ''); - let current: Record = result; + if (char === quoteChar) { + inQuotes = false; + quoteChar = ''; + } + argsString += char; + } else { + // Handle characters outside quotes + if (['"', "'", '`'].includes(char)) { + inQuotes = true; + quoteChar = char; + } else if (char === '(') { + parenthesesCount++; + } else if (char === ')') { + parenthesesCount--; + } - // Build the nested structure - for (let i = 0; i < remainingPath.length - 1; i++) { - if (!(remainingPath[i] in current)) { - current[remainingPath[i]] = {}; + // Only add characters if we're still inside the main parentheses + if (parenthesesCount > 0 || char !== ')') { + argsString += char; + } + } + + current++; + } + + // If parentheses are balanced, parse the arguments + if (parenthesesCount === 0) { + try { + const parsedArgs = this.parseArguments(argsString); + args.push(parsedArgs); + } catch (error) { + // If parsing fails, throw an ApplicationError with details + throw new NodeOperationError( + this.ctx.getNode(), + `Failed to parse $fromAI arguments: ${argsString}: ${error}`, + ); + } + } else { + // Log an error if parentheses are unbalanced + throw new NodeOperationError( + this.ctx.getNode(), + `Unbalanced parentheses while parsing $fromAI call: ${str.slice(startIndex)}`, + ); } - current = current[remainingPath[i]] as Record; } - // Set the value at the deepest level - const lastKey = remainingPath[remainingPath.length - 1]; - current[lastKey ?? matchingKey] = values[matchingKey]; + return args; } - // If no nested structure was created, return the direct value - return Object.keys(result).length === 0 ? values[encodeDotNotation(baseKey)] : result; + /** + * Parses the arguments of a single $fromAI function call. + * @param argsString The string containing the function arguments. + * @returns A FromAIArgument object. + */ + private parseArguments(argsString: string): FromAIArgument { + // Split arguments by commas not inside quotes + const args: string[] = []; + let currentArg = ''; + let inQuotes = false; + let quoteChar = ''; + let escapeNext = false; + + for (let i = 0; i < argsString.length; i++) { + const char = argsString[i]; + + if (escapeNext) { + currentArg += char; + escapeNext = false; + continue; + } + + if (char === '\\') { + escapeNext = true; + continue; + } + + if (['"', "'", '`'].includes(char)) { + if (!inQuotes) { + inQuotes = true; + quoteChar = char; + currentArg += char; + } else if (char === quoteChar) { + inQuotes = false; + quoteChar = ''; + currentArg += char; + } else { + currentArg += char; + } + continue; + } + + if (char === ',' && !inQuotes) { + args.push(currentArg.trim()); + currentArg = ''; + continue; + } + + currentArg += char; + } + + if (currentArg) { + args.push(currentArg.trim()); + } + + // Remove surrounding quotes if present + const cleanArgs = args.map((arg) => { + const trimmed = arg.trim(); + if ( + (trimmed.startsWith("'") && trimmed.endsWith("'")) || + (trimmed.startsWith('`') && trimmed.endsWith('`')) || + (trimmed.startsWith('"') && trimmed.endsWith('"')) + ) { + return trimmed + .slice(1, -1) + .replace(/\\'/g, "'") + .replace(/\\`/g, '`') + .replace(/\\"/g, '"') + .replace(/\\\\/g, '\\'); + } + return trimmed; + }); + + const type = cleanArgs?.[2] || 'string'; + + if (!['string', 'number', 'boolean', 'json'].includes(type.toLowerCase())) { + throw new NodeOperationError(this.ctx.getNode(), `Invalid type: ${type}`); + } + + return { + key: cleanArgs[0] || '', + description: cleanArgs[1], + type: (cleanArgs?.[2] ?? 'string') as AllowedTypes, + defaultValue: this.parseDefaultValue(cleanArgs[3]), + }; + } + + /** + * Parses the default value, preserving its original type. + * @param value The default value as a string. + * @returns The parsed default value in its appropriate type. + */ + private parseDefaultValue( + value: string | undefined, + ): string | number | boolean | Record | undefined { + if (value === undefined || value === '') return undefined; + const lowerValue = value.toLowerCase(); + if (lowerValue === 'true') return true; + if (lowerValue === 'false') return false; + if (!isNaN(Number(value))) return Number(value); + try { + return jsonParse(value); + } catch { + return value; + } + } + + /** + * Generates a description for a node based on the provided parameters. + * @param node The node type. + * @param nodeParameters The parameters of the node. + * @returns A string description for the node. + */ + private getDescription(node: INodeType, nodeParameters: INodeParameters): string { + const manualDescription = nodeParameters.toolDescription as string; + + if (nodeParameters.descriptionType === 'auto') { + const resource = nodeParameters.resource as string; + const operation = nodeParameters.operation as string; + let description = node.description.description; + if (resource) { + description += `\n Resource: ${resource}`; + } + if (operation) { + description += `\n Operation: ${operation}`; + } + return description.trim(); + } + if (nodeParameters.descriptionType === 'manual') { + return manualDescription ?? node.description.description; + } + + return node.description.description; + } + + /** + * Creates a DynamicStructuredTool from a node. + * @param node The node type. + * @param nodeParameters The parameters of the node. + * @returns A DynamicStructuredTool instance. + */ + public createTool(node: INodeType, nodeParameters: INodeParameters): DynamicStructuredTool { + const collectedArguments: FromAIArgument[] = []; + this.traverseNodeParameters(nodeParameters, collectedArguments); + + // Validate each collected argument + const nameValidationRegex = /^[a-zA-Z0-9_-]{1,64}$/; + const keyMap = new Map(); + for (const argument of collectedArguments) { + if (argument.key.length === 0 || !nameValidationRegex.test(argument.key)) { + const isEmptyError = 'You must specify a key when using $fromAI()'; + const isInvalidError = `Parameter key \`${argument.key}\` is invalid`; + const error = new Error(argument.key.length === 0 ? isEmptyError : isInvalidError); + throw new NodeOperationError(this.ctx.getNode(), error, { + description: + 'Invalid parameter key, must be between 1 and 64 characters long and only contain letters, numbers, underscores, and hyphens', + }); + } + + if (keyMap.has(argument.key)) { + // If the key already exists in the Map + const existingArg = keyMap.get(argument.key)!; + + // Check if the existing argument has the same description and type + if ( + existingArg.description !== argument.description || + existingArg.type !== argument.type + ) { + // If not, throw an error for inconsistent duplicate keys + throw new NodeOperationError( + this.ctx.getNode(), + `Duplicate key '${argument.key}' found with different description or type`, + { + description: + 'Ensure all $fromAI() calls with the same key have consistent descriptions and types', + }, + ); + } + // If the duplicate key has consistent description and type, it's allowed (no action needed) + } else { + // If the key doesn't exist in the Map, add it + keyMap.set(argument.key, argument); + } + } + + // Remove duplicate keys, latest occurrence takes precedence + const uniqueArgsMap = collectedArguments.reduce((map, arg) => { + map.set(arg.key, arg); + return map; + }, new Map()); + + const uniqueArguments = Array.from(uniqueArgsMap.values()); + + // Generate Zod schema from unique arguments + const schemaObj = uniqueArguments.reduce((acc: Record, placeholder) => { + acc[placeholder.key] = this.generateZodSchema(placeholder); + return acc; + }, {}); + + const schema = z.object(schemaObj).required(); + const description = this.getDescription(node, nodeParameters); + const nodeName = this.ctx.getNode().name.replace(/ /g, '_'); + const name = nodeName || node.description.name; + + const tool = new DynamicStructuredTool({ + name, + description, + schema, + func: async (functionArgs: z.infer) => { + const { index } = this.ctx.addInputData(NodeConnectionType.AiTool, [ + [{ json: functionArgs }], + ]); + + try { + // Execute the node with the proxied context + const result = await node.execute?.bind(this.ctx)(); + + // Process and map the results + const mappedResults = result?.[0]?.flatMap((item) => item.json); + + // Add output data to the context + this.ctx.addOutputData(NodeConnectionType.AiTool, index, [ + [{ json: { response: mappedResults } }], + ]); + + // Return the stringified results + return JSON.stringify(mappedResults); + } catch (error) { + const nodeError = new NodeOperationError(this.ctx.getNode(), error as Error); + this.ctx.addOutputData(NodeConnectionType.AiTool, index, nodeError); + return 'Error during node execution: ' + nodeError.description; + } + }, + }); + + return tool; + } } /** - * Extracts the description from a placeholder string. - * @param {string} value - The placeholder string. - * @returns {string} The extracted description or a default message. - */ -function extractPlaceholderDescription(value: string): string { - const match = value.match(/{{ '__PLACEHOLDER:\s*(.+?)\s*' }}/); - return match ? match[1] : 'No description provided'; -} - -/** - * Creates a DynamicStructuredTool from an n8n node. - * @param {INodeType} node - The n8n node to convert. - * @param {IExecuteFunctions} ctx - The execution context. - * @param {INodeParameters} nodeParameters - The node parameters. - * @returns {DynamicStructuredTool} The created tool. + * Converts node into LangChain tool by analyzing node parameters, + * identifying placeholders using the $fromAI function, and generating a Zod schema. It then creates + * a DynamicStructuredTool that can be used in LangChain workflows. + * + * @param ctx The execution context. + * @param node The node type. + * @param nodeParameters The parameters of the node. + * @returns An object containing the DynamicStructuredTool instance. */ export function createNodeAsTool( - node: INodeType, - ctx: IExecuteFunctions, - nodeParameters: INodeParameters, -): DynamicStructuredTool { - // Find all placeholder values in the node parameters - const placeholderValues = traverseObject(nodeParameters); - - // Generate Zod schema from placeholder values - const schemaObj: { [key: string]: z.ZodString } = {}; - for (const [key, value] of placeholderValues.entries()) { - const description = extractPlaceholderDescription(value); - schemaObj[key] = z.string().describe(description); - } - const schema = z.object(schemaObj).required(); - - // Get the tool description from node parameters or use the default - const toolDescription = ctx.getNodeParameter( - 'toolDescription', - 0, - node.description.description, - ) as string; - type GetNodeParameterMethod = IExecuteFunctions['getNodeParameter']; - - const tool = new DynamicStructuredTool({ - name: node.description.name, - description: toolDescription ? toolDescription : node.description.description, - schema, - func: async (functionArgs: z.infer) => { - // Create a proxy for ctx to soft-override parameters with values from the LLM - const ctxProxy = new Proxy(ctx, { - get(target: IExecuteFunctions, prop: string | symbol, receiver: unknown) { - if (prop === 'getNodeParameter') { - // Override getNodeParameter method - // eslint-disable-next-line @typescript-eslint/unbound-method - return new Proxy(target.getNodeParameter, { - apply( - targetMethod: GetNodeParameterMethod, - thisArg: unknown, - argumentsList: Parameters, - ): ReturnType { - const [key] = argumentsList; - if (typeof key !== 'string') { - // If key is not a string, use the original method - return Reflect.apply(targetMethod, thisArg, argumentsList); - } - - const encodedKey = encodeDotNotation(key); - // Check if the full key or any more specific key is a placeholder - const matchingKeys = Array.from(placeholderValues.keys()).filter((k) => - k.startsWith(encodedKey), - ); - - if (matchingKeys.length > 0) { - // If there are matching keys, build the structure using args - const res = buildStructureFromMatches(encodedKey, matchingKeys, functionArgs); - // Return either the specific value or the entire built structure - return res?.[decodeDotNotation(key)] ?? res; - } - - // If no placeholder is found, use the original function - return Reflect.apply(targetMethod, thisArg, argumentsList); - }, - }); - } - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return Reflect.get(target, prop, receiver); - }, - }); - - // Add input data to the context - ctxProxy.addInputData(NodeConnectionType.AiTool, [[{ json: functionArgs }]]); - - // Execute the node with the proxied context - const result = await node.execute?.bind(ctxProxy)(); - - // Process and map the results - const mappedResults = result?.[0]?.flatMap((item) => item.json); - - // Add output data to the context - ctxProxy.addOutputData(NodeConnectionType.AiTool, 0, [ - [{ json: { response: mappedResults } }], - ]); - - // Return the stringified results - return JSON.stringify(mappedResults); - }, - }); - - return tool; -} - -/** - * Asynchronously creates a DynamicStructuredTool from an n8n node. - * @param {IExecuteFunctions} ctx - The execution context. - * @param {INodeType} node - The n8n node to convert. - * @param {INodeParameters} nodeParameters - The node parameters. - * @returns {Promise<{response: DynamicStructuredTool}>} A promise that resolves to an object containing the created tool. - */ -export function getNodeAsTool( ctx: IExecuteFunctions, node: INodeType, nodeParameters: INodeParameters, ) { + const parser = new AIParametersParser(ctx); + return { - response: createNodeAsTool(node, ctx, nodeParameters), + response: parser.createTool(node, nodeParameters), }; } diff --git a/packages/core/src/Credentials.ts b/packages/core/src/Credentials.ts index 3210703a27..da6deb742c 100644 --- a/packages/core/src/Credentials.ts +++ b/packages/core/src/Credentials.ts @@ -1,6 +1,7 @@ -import { Container } from 'typedi'; import type { ICredentialDataDecryptedObject, ICredentialsEncrypted } from 'n8n-workflow'; import { ApplicationError, ICredentials, jsonParse } from 'n8n-workflow'; +import { Container } from 'typedi'; + import { Cipher } from './Cipher'; export class Credentials< diff --git a/packages/core/src/DirectoryLoader.ts b/packages/core/src/DirectoryLoader.ts index 717edd5359..a1401a8fb5 100644 --- a/packages/core/src/DirectoryLoader.ts +++ b/packages/core/src/DirectoryLoader.ts @@ -1,5 +1,4 @@ import glob from 'fast-glob'; -import { readFile } from 'fs/promises'; import type { CodexData, DocumentationLink, @@ -20,7 +19,10 @@ import { getVersionedNodeTypeAll, jsonParse, } from 'n8n-workflow'; +import { readFileSync } from 'node:fs'; +import { readFile } from 'node:fs/promises'; import * as path from 'path'; + import { loadClassInIsolation } from './ClassLoader'; import { CUSTOM_NODES_CATEGORY } from './Constants'; import type { n8n } from './Interfaces'; @@ -350,18 +352,11 @@ export class CustomDirectoryLoader extends DirectoryLoader { * e.g. /nodes-base or community packages. */ export class PackageDirectoryLoader extends DirectoryLoader { - packageName = ''; + packageJson: n8n.PackageJson = this.readJSONSync('package.json'); - packageJson!: n8n.PackageJson; - - async readPackageJson() { - this.packageJson = await this.readJSON('package.json'); - this.packageName = this.packageJson.name; - } + packageName = this.packageJson.name; override async loadAll() { - await this.readPackageJson(); - const { n8n } = this.packageJson; if (!n8n) return; @@ -391,6 +386,17 @@ export class PackageDirectoryLoader extends DirectoryLoader { }); } + protected readJSONSync(file: string): T { + const filePath = this.resolvePath(file); + const fileString = readFileSync(filePath, 'utf8'); + + try { + return jsonParse(fileString); + } catch (error) { + throw new ApplicationError('Failed to parse JSON', { extra: { filePath } }); + } + } + protected async readJSON(file: string): Promise { const filePath = this.resolvePath(file); const fileString = await readFile(filePath, 'utf8'); @@ -408,8 +414,6 @@ export class PackageDirectoryLoader extends DirectoryLoader { */ export class LazyPackageDirectoryLoader extends PackageDirectoryLoader { override async loadAll() { - await this.readPackageJson(); - try { const knownNodes: typeof this.known.nodes = await this.readJSON('dist/known/nodes.json'); for (const nodeName in knownNodes) { diff --git a/packages/core/src/ExecutionMetadata.ts b/packages/core/src/ExecutionMetadata.ts index cc2743a56f..8466933e05 100644 --- a/packages/core/src/ExecutionMetadata.ts +++ b/packages/core/src/ExecutionMetadata.ts @@ -1,5 +1,6 @@ import type { IRunExecutionData } from 'n8n-workflow'; import { LoggerProxy as Logger } from 'n8n-workflow'; + import { InvalidExecutionMetadataError } from './errors/invalid-execution-metadata.error'; export const KV_LIMIT = 10; diff --git a/packages/core/src/InstanceSettings.ts b/packages/core/src/InstanceSettings.ts index 44f4b0c336..17ccf15def 100644 --- a/packages/core/src/InstanceSettings.ts +++ b/packages/core/src/InstanceSettings.ts @@ -1,8 +1,8 @@ -import path from 'path'; -import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { createHash, randomBytes } from 'crypto'; -import { Service } from 'typedi'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { ApplicationError, jsonParse } from 'n8n-workflow'; +import path from 'path'; +import { Service } from 'typedi'; interface ReadOnlySettings { encryptionKey: string; diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 0d37dde2b3..6cbef1e1b8 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -32,6 +32,7 @@ import { IncomingMessage, type IncomingHttpHeaders } from 'http'; import { Agent, type AgentOptions } from 'https'; import get from 'lodash/get'; import isEmpty from 'lodash/isEmpty'; +import merge from 'lodash/merge'; import pick from 'lodash/pick'; import { DateTime } from 'luxon'; import { extension, lookup } from 'mime-types'; @@ -101,6 +102,13 @@ import type { EnsureTypeOptions, SSHTunnelFunctions, SchedulingFunctions, + DeduplicationHelperFunctions, + IDeduplicationOutput, + IDeduplicationOutputItems, + ICheckProcessedOptions, + DeduplicationScope, + DeduplicationItemTypes, + ICheckProcessedContextData, AiEvent, } from 'n8n-workflow'; import { @@ -128,9 +136,13 @@ import clientOAuth1 from 'oauth-1.0a'; import path from 'path'; import { stringify } from 'qs'; import { Readable } from 'stream'; +import Container from 'typedi'; import url, { URL, URLSearchParams } from 'url'; +import { createAgentStartJob } from './Agent'; import { BinaryDataService } from './BinaryData/BinaryData.service'; +import type { BinaryData } from './BinaryData/types'; +import { binaryToBuffer } from './BinaryData/utils'; import { BINARY_DATA_STORAGE_PATH, BLOCK_FILE_ACCESS_TO_N8N_FILES, @@ -143,23 +155,20 @@ import { UM_EMAIL_TEMPLATES_INVITE, UM_EMAIL_TEMPLATES_PWRESET, } from './Constants'; -import { extractValue } from './ExtractValue'; -import type { ExtendedValidationResult, IResponseError } from './Interfaces'; +import { createNodeAsTool } from './CreateNodeAsTool'; +import { DataDeduplicationService } from './data-deduplication-service'; import { getAllWorkflowExecutionMetadata, getWorkflowExecutionMetadata, setAllWorkflowExecutionMetadata, setWorkflowExecutionMetadata, } from './ExecutionMetadata'; -import { getSecretsProxy } from './Secrets'; -import Container from 'typedi'; -import type { BinaryData } from './BinaryData/types'; -import merge from 'lodash/merge'; +import { extractValue } from './ExtractValue'; import { InstanceSettings } from './InstanceSettings'; +import type { ExtendedValidationResult, IResponseError } from './Interfaces'; import { ScheduledTaskManager } from './ScheduledTaskManager'; +import { getSecretsProxy } from './Secrets'; import { SSHClientsManager } from './SSHClientsManager'; -import { binaryToBuffer } from './BinaryData/utils'; -import { getNodeAsTool } from './CreateNodeAsTool'; axios.defaults.timeout = 300000; // Prevent axios from adding x-form-www-urlencoded headers by default @@ -1283,6 +1292,72 @@ async function prepareBinaryData( return await setBinaryDataBuffer(returnData, binaryData, workflowId, executionId); } +export async function checkProcessedAndRecord( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().checkProcessedAndRecord( + items, + scope, + contextData, + options, + ); +} + +export async function checkProcessedItemsAndRecord( + key: string, + items: IDataObject[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().checkProcessedItemsAndRecord( + key, + items, + scope, + contextData, + options, + ); +} + +export async function removeProcessed( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().removeProcessed( + items, + scope, + contextData, + options, + ); +} + +export async function clearAllProcessedItems( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().clearAllProcessedItems( + scope, + contextData, + options, + ); +} +export async function getProcessedDataCount( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, +): Promise { + return await DataDeduplicationService.getInstance().getProcessedDataCount( + scope, + contextData, + options, + ); +} function applyPaginationRequestData( requestData: IRequestOptions, paginationRequestData: PaginationOptions['request'], @@ -2851,7 +2926,7 @@ async function getInputConnectionData( if (!nodeType.supplyData) { if (nodeType.description.outputs.includes(NodeConnectionType.AiTool)) { nodeType.supplyData = async function (this: IExecuteFunctions) { - return getNodeAsTool(this, nodeType, this.getNode().parameters); + return createNodeAsTool(this, nodeType, this.getNode().parameters); }; } else { throw new ApplicationError('Node does not have a `supplyData` method defined', { @@ -3452,6 +3527,52 @@ const getBinaryHelperFunctions = ( }, }); +const getCheckProcessedHelperFunctions = ( + workflow: Workflow, + node: INode, +): DeduplicationHelperFunctions => ({ + async checkProcessedAndRecord( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await checkProcessedAndRecord(items, scope, { node, workflow }, options); + }, + async checkProcessedItemsAndRecord( + propertyName: string, + items: IDataObject[], + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await checkProcessedItemsAndRecord( + propertyName, + items, + scope, + { node, workflow }, + options, + ); + }, + async removeProcessed( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await removeProcessed(items, scope, { node, workflow }, options); + }, + async clearAllProcessedItems( + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await clearAllProcessedItems(scope, { node, workflow }, options); + }, + async getProcessedDataCount( + scope: DeduplicationScope, + options: ICheckProcessedOptions, + ): Promise { + return await getProcessedDataCount(scope, { node, workflow }, options); + }, +}); + /** * Returns a copy of the items which only contains the json data and * of that only the defined properties @@ -3788,6 +3909,17 @@ export function getExecuteFunctions( additionalData.setExecutionStatus('waiting'); } }, + logNodeOutput(...args: unknown[]): void { + if (mode === 'manual') { + // @ts-expect-error `args` is spreadable + this.sendMessageToUI(...args); + return; + } + + if (process.env.CODE_ENABLE_STDOUT === 'true') { + console.log(`[Workflow "${this.getWorkflow().id}"][Node "${node.name}"]`, ...args); + } + }, sendMessageToUI(...args: any[]): void { if (mode !== 'manual') { return; @@ -3884,6 +4016,7 @@ export function getExecuteFunctions( ...getSSHTunnelFunctions(), ...getFileSystemHelperFunctions(node), ...getBinaryHelperFunctions(additionalData, workflow.id), + ...getCheckProcessedHelperFunctions(workflow, node), assertBinaryData: (itemIndex, propertyName) => assertBinaryData(inputData, node, itemIndex, propertyName, 0), getBinaryDataBuffer: async (itemIndex, propertyName) => @@ -3905,6 +4038,19 @@ export function getExecuteFunctions( }); }, getParentCallbackManager: () => additionalData.parentCallbackManager, + startJob: createAgentStartJob( + additionalData, + inputData, + node, + workflow, + runExecutionData, + runIndex, + node.name, + connectionInputData, + {}, + mode, + executeData, + ), }; })(workflow, runExecutionData, connectionInputData, inputData, node) as IExecuteFunctions; } diff --git a/packages/core/src/ObjectStore/ObjectStore.service.ee.ts b/packages/core/src/ObjectStore/ObjectStore.service.ee.ts index ddd21db799..0e4d8463df 100644 --- a/packages/core/src/ObjectStore/ObjectStore.service.ee.ts +++ b/packages/core/src/ObjectStore/ObjectStore.service.ee.ts @@ -1,12 +1,12 @@ -import { createHash } from 'node:crypto'; -import axios from 'axios'; -import { Service } from 'typedi'; import { sign } from 'aws4'; -import { isStream, parseXml, writeBlockedMessage } from './utils'; -import { ApplicationError, LoggerProxy as Logger } from 'n8n-workflow'; - -import type { AxiosRequestConfig, AxiosResponse, InternalAxiosRequestConfig, Method } from 'axios'; import type { Request as Aws4Options, Credentials as Aws4Credentials } from 'aws4'; +import axios from 'axios'; +import type { AxiosRequestConfig, AxiosResponse, InternalAxiosRequestConfig, Method } from 'axios'; +import { ApplicationError, LoggerProxy as Logger } from 'n8n-workflow'; +import { createHash } from 'node:crypto'; +import type { Readable } from 'stream'; +import { Service } from 'typedi'; + import type { Bucket, ConfigSchemaCredentials, @@ -15,7 +15,7 @@ import type { RawListPage, RequestOptions, } from './types'; -import type { Readable } from 'stream'; +import { isStream, parseXml, writeBlockedMessage } from './utils'; import type { BinaryData } from '../BinaryData/types'; @Service() diff --git a/packages/core/src/ObjectStore/types.ts b/packages/core/src/ObjectStore/types.ts index 639ae2e6a2..d0b7ab0713 100644 --- a/packages/core/src/ObjectStore/types.ts +++ b/packages/core/src/ObjectStore/types.ts @@ -1,4 +1,5 @@ import type { AxiosResponseHeaders, ResponseType } from 'axios'; + import type { BinaryData } from '../BinaryData/types'; export type RawListPage = { diff --git a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts index bd6cf81a2f..606f624d02 100644 --- a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts +++ b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts @@ -12,6 +12,11 @@ export type GraphConnection = { // fromName-outputType-outputIndex-inputIndex-toName type DirectedGraphKey = `${string}-${NodeConnectionType}-${number}-${number}-${string}`; +type RemoveNodeBaseOptions = { + reconnectConnections: boolean; + skipConnectionFn?: (connection: GraphConnection) => boolean; +}; + /** * Represents a directed graph as an adjacency list, e.g. one list for the * vertices and one list for the edges. @@ -67,6 +72,80 @@ export class DirectedGraph { return this; } + /** + * Removes a node from the graph. + * + * By default it will also remove all connections that use that node and + * return nothing. + * + * If you pass `{ reconnectConnections: true }` it will rewire all + * connections making sure all parent nodes are connected to all child nodes + * and return the new connections. + */ + removeNode( + node: INode, + options?: { reconnectConnections: true } & RemoveNodeBaseOptions, + ): GraphConnection[]; + removeNode( + node: INode, + options?: { reconnectConnections: false } & RemoveNodeBaseOptions, + ): undefined; + removeNode( + node: INode, + options: RemoveNodeBaseOptions = { reconnectConnections: false }, + ): undefined | GraphConnection[] { + if (options.reconnectConnections) { + const incomingConnections = this.getDirectParentConnections(node); + const outgoingConnections = this.getDirectChildConnections(node); + + const newConnections: GraphConnection[] = []; + + for (const incomingConnection of incomingConnections) { + if (options.skipConnectionFn && options.skipConnectionFn(incomingConnection)) { + continue; + } + + for (const outgoingConnection of outgoingConnections) { + if (options.skipConnectionFn && options.skipConnectionFn(outgoingConnection)) { + continue; + } + + const newConnection = { + ...incomingConnection, + to: outgoingConnection.to, + inputIndex: outgoingConnection.inputIndex, + }; + + newConnections.push(newConnection); + } + } + + for (const [key, connection] of this.connections.entries()) { + if (connection.to === node || connection.from === node) { + this.connections.delete(key); + } + } + + for (const newConnection of newConnections) { + this.connections.set(this.makeKey(newConnection), newConnection); + } + + this.nodes.delete(node.name); + + return newConnections; + } else { + for (const [key, connection] of this.connections.entries()) { + if (connection.to === node || connection.from === node) { + this.connections.delete(key); + } + } + + this.nodes.delete(node.name); + + return; + } + } + addConnection(connectionInput: { from: INode; to: INode; @@ -108,7 +187,7 @@ export class DirectedGraph { return this; } - getDirectChildren(node: INode) { + getDirectChildConnections(node: INode) { const nodeExists = this.nodes.get(node.name) === node; a.ok(nodeExists); @@ -126,7 +205,7 @@ export class DirectedGraph { } private getChildrenRecursive(node: INode, children: Set) { - const directChildren = this.getDirectChildren(node); + const directChildren = this.getDirectChildConnections(node); for (const directChild of directChildren) { // Break out if we found a cycle. @@ -145,13 +224,13 @@ export class DirectedGraph { * argument. * * If the node being passed in is a child of itself (e.g. is part of a - * cylce), the return set will contain it as well. + * cycle), the return set will contain it as well. */ getChildren(node: INode) { return this.getChildrenRecursive(node, new Set()); } - getDirectParents(node: INode) { + getDirectParentConnections(node: INode) { const nodeExists = this.nodes.get(node.name) === node; a.ok(nodeExists); @@ -168,6 +247,27 @@ export class DirectedGraph { return directParents; } + private getParentConnectionsRecursive(node: INode, connections: Set) { + const parentConnections = this.getDirectParentConnections(node); + + for (const connection of parentConnections) { + // break out of cycles + if (connections.has(connection)) { + continue; + } + + connections.add(connection); + + this.getParentConnectionsRecursive(connection.from, connections); + } + + return connections; + } + + getParentConnections(node: INode) { + return this.getParentConnectionsRecursive(node, new Set()); + } + getConnection( from: INode, outputIndex: number, diff --git a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts index 93df23de32..d6eedf416d 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts @@ -9,8 +9,10 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data -import { DirectedGraph } from '../DirectedGraph'; +import { NodeConnectionType } from 'n8n-workflow'; + import { createNodeData, defaultWorkflowParameter } from './helpers'; +import { DirectedGraph } from '../DirectedGraph'; describe('DirectedGraph', () => { // ┌─────┐ ┌─────┐ ┌─────┐ @@ -86,4 +88,256 @@ describe('DirectedGraph', () => { expect(children).toEqual(new Set([node1, node2, node3])); }); }); + + describe('getParentConnections', () => { + // ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ + // │node1├──►│node2├──►│node3│──►│node4│ + // └─────┘ └─────┘ └─────┘ └─────┘ + test('returns all parent connections', () => { + // ARRANGE + const node1 = createNodeData({ name: 'Node1' }); + const node2 = createNodeData({ name: 'Node2' }); + const node3 = createNodeData({ name: 'Node3' }); + const node4 = createNodeData({ name: 'Node4' }); + const graph = new DirectedGraph() + .addNodes(node1, node2, node3, node4) + .addConnections( + { from: node1, to: node2 }, + { from: node2, to: node3 }, + { from: node3, to: node4 }, + ); + + // ACT + const connections = graph.getParentConnections(node3); + + // ASSERT + const expectedConnections = graph.getConnections().filter((c) => c.to !== node4); + expect(connections.size).toBe(2); + expect(connections).toEqual(new Set(expectedConnections)); + }); + + // ┌─────┐ ┌─────┐ ┌─────┐ + // ┌─►│node1├───►│node2├──►│node3├─┐ + // │ └─────┘ └─────┘ └─────┘ │ + // │ │ + // └───────────────────────────────┘ + test('terminates when finding a cycle', () => { + // ARRANGE + const node1 = createNodeData({ name: 'Node1' }); + const node2 = createNodeData({ name: 'Node2' }); + const node3 = createNodeData({ name: 'Node3' }); + const graph = new DirectedGraph() + .addNodes(node1, node2, node3) + .addConnections( + { from: node1, to: node2 }, + { from: node2, to: node3 }, + { from: node3, to: node1 }, + ); + + // ACT + const connections = graph.getParentConnections(node3); + + // ASSERT + expect(connections.size).toBe(3); + expect(connections).toEqual(new Set(graph.getConnections())); + }); + }); + + describe('removeNode', () => { + // XX + // ┌─────┐ ┌─────┐ ┌─────┐ + // │node0├───►│node1├──►│node2│ + // └─────┘ └─────┘ └─────┘ + // turns into + // ┌─────┐ ┌─────┐ + // │node0│ │node2│ + // └─────┘ └─────┘ + test('remove node and all connections', () => { + // ARRANGE + const node0 = createNodeData({ name: 'node0' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2) + .addConnections({ from: node0, to: node1 }, { from: node0, to: node2 }); + + // ACT + graph.removeNode(node1); + + // ASSERT + expect(graph).toEqual( + new DirectedGraph().addNodes(node0, node2).addConnections({ from: node0, to: node2 }), + ); + }); + + // XX + // ┌─────┐ ┌─────┐ ┌─────┐ + // │node0├───►│node1├──►│node2│ + // └─────┘ └─────┘ └─────┘ + // turns into + // ┌─────┐ ┌─────┐ + // │node0├──►│node2│ + // └─────┘ └─────┘ + test('remove node, but reconnect connections', () => { + // ARRANGE + const node0 = createNodeData({ name: 'node0' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2) + .addConnections({ from: node0, to: node1 }, { from: node1, to: node2 }); + + // ACT + const newConnections = graph.removeNode(node1, { reconnectConnections: true }); + + // ASSERT + expect(newConnections).toHaveLength(1); + expect(newConnections[0]).toEqual({ + from: node0, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 0, + to: node2, + }); + expect(graph).toEqual( + new DirectedGraph().addNodes(node0, node2).addConnections({ from: node0, to: node2 }), + ); + }); + + // XX + // ┌─────┐ ┌─────┐ ┌─────┐ + // │ │o o│ │o o│ │ + // │ │o─┐ o│ │o o│ │ + // │node0│o └►o│node1│o o│node2│ + // │ │o o│ │o─┐ o│ │ + // │ │o o│ │o └►o│ │ + // └─────┘ └─────┘ └─────┘ + // turns into + // ┌─────┐ ┌─────┐ + // │ │o o│ │ + // │ │o───────┐ o│ │ + // │node0│o │ o│node2│ + // │ │o │ o│ │ + // │ │o └──────►o│ │ + // └─────┘ └─────┘ + test('remove node, reconnect connections and retaining the input indexes', () => { + // ARRANGE + const node0 = createNodeData({ name: 'node0' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2) + .addConnections( + { from: node0, outputIndex: 1, inputIndex: 2, to: node1 }, + { from: node1, outputIndex: 3, inputIndex: 4, to: node2 }, + ); + + // ACT + const newConnections = graph.removeNode(node1, { reconnectConnections: true }); + + // ASSERT + expect(newConnections).toHaveLength(1); + expect(newConnections[0]).toEqual({ + from: node0, + outputIndex: 1, + type: NodeConnectionType.Main, + inputIndex: 4, + to: node2, + }); + expect(graph).toEqual( + new DirectedGraph() + .addNodes(node0, node2) + .addConnections({ from: node0, outputIndex: 1, inputIndex: 4, to: node2 }), + ); + }); + + // XX + // ┌─────┐ ┌─────┐ ┌─────┐ + // │ │o o│ │o │ │ + // │ │o─┐ o│ │o │ │ + // │node0│ └►o│node1│o ┌►o│node2│ + // │ │ │ │o─┘ │ │ + // │ │ │ │ │ │ + // └─────┘ └─────┘ └─────┘ + // turns into + // ┌─────┐ ┌─────┐ + // │ │o │ │ + // │ │o───────┐ │ │ + // │node0│ └──────►o│node2│ + // │ │ │ │ + // │ │ │ │ + // └─────┘ └─────┘ + test('remove node, reconnect connections and retaining the input indexes, even if the child has less inputs than the than the removed node had', () => { + // ARRANGE + const node0 = createNodeData({ name: 'node0' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2) + .addConnections( + { from: node0, outputIndex: 1, inputIndex: 2, to: node1 }, + { from: node1, outputIndex: 3, inputIndex: 0, to: node2 }, + ); + + // ACT + const newConnections = graph.removeNode(node1, { reconnectConnections: true }); + + // ASSERT + const expectedGraph = new DirectedGraph() + .addNodes(node0, node2) + .addConnections({ from: node0, outputIndex: 1, inputIndex: 0, to: node2 }); + expect(newConnections).toHaveLength(1); + expect(newConnections).toEqual(expectedGraph.getConnections()); + expect(graph).toEqual(expectedGraph); + }); + + // ┌─────┐ ┌──────┐ + // │left0├─┐ XX ┌►│right0│ + // └─────┘ │ ┌──────┐ │ └──────┘ + // ├─►│center├──┤ + // ┌─────┐ │ └──────┘ │ ┌──────┐ + // │left1├─┘ └►│right1│ + // └─────┘ └──────┘ + // turns into + // + // ┌─────┐ ┌──────┐ + // │left0├─┐ ┌─►│right0│ + // └─────┘ │ │ └──────┘ + // ├───────────┤ + // ┌─────┐ │ │ ┌──────┐ + // │left1├─┘ └─►│right1│ + // └─────┘ └──────┘ + test('remove node, reconnect connections and multiplexes them', () => { + // ARRANGE + const left0 = createNodeData({ name: 'left0' }); + const left1 = createNodeData({ name: 'left1' }); + const center = createNodeData({ name: 'center' }); + const right0 = createNodeData({ name: 'right0' }); + const right1 = createNodeData({ name: 'right1' }); + const graph = new DirectedGraph() + .addNodes(left0, left1, center, right0, right1) + .addConnections( + { from: left0, to: center }, + { from: left1, to: center }, + { from: center, to: right0 }, + { from: center, to: right1 }, + ); + + // ACT + const newConnections = graph.removeNode(center, { reconnectConnections: true }); + + // ASSERT + const expectedGraph = new DirectedGraph() + .addNodes(left0, left1, right0, right1) + .addConnections( + { from: left0, to: right0 }, + { from: left0, to: right1 }, + { from: left1, to: right0 }, + { from: left1, to: right1 }, + ); + expect(newConnections).toHaveLength(4); + expect(newConnections).toEqual(expectedGraph.getConnections()); + expect(graph).toEqual(expectedGraph); + }); + }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts index fabfae0ee3..bf37ec7636 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts @@ -1,7 +1,8 @@ import type { IRunData } from 'n8n-workflow'; + +import { createNodeData, toITaskData } from './helpers'; import { cleanRunData } from '../cleanRunData'; import { DirectedGraph } from '../DirectedGraph'; -import { createNodeData, toITaskData } from './helpers'; describe('cleanRunData', () => { // ┌─────┐ ┌─────┐ ┌─────┐ diff --git a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts index c830833d8d..57022d862c 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts @@ -10,9 +10,10 @@ // PD denotes that the node has pinned data import { type IPinData, type IRunData } from 'n8n-workflow'; + import { createNodeData, toITaskData } from './helpers'; -import { findStartNodes, isDirty } from '../findStartNodes'; import { DirectedGraph } from '../DirectedGraph'; +import { findStartNodes, isDirty } from '../findStartNodes'; describe('isDirty', () => { test("if the node has pinned data it's not dirty", () => { @@ -45,7 +46,7 @@ describe('findStartNodes', () => { const node = createNodeData({ name: 'Basic Node' }); const graph = new DirectedGraph().addNode(node); - const startNodes = findStartNodes(graph, node, node); + const startNodes = findStartNodes({ graph, trigger: node, destination: node }); expect(startNodes).toHaveLength(1); expect(startNodes[0]).toEqual(node); @@ -64,7 +65,7 @@ describe('findStartNodes', () => { // if the trigger has no run data { - const startNodes = findStartNodes(graph, trigger, destination); + const startNodes = findStartNodes({ graph, trigger, destination }); expect(startNodes).toHaveLength(1); expect(startNodes[0]).toEqual(trigger); @@ -76,7 +77,7 @@ describe('findStartNodes', () => { [trigger.name]: [toITaskData([{ data: { value: 1 } }])], }; - const startNodes = findStartNodes(graph, trigger, destination, runData); + const startNodes = findStartNodes({ graph, trigger, destination, runData }); expect(startNodes).toHaveLength(1); expect(startNodes[0]).toEqual(destination); @@ -111,7 +112,7 @@ describe('findStartNodes', () => { }; // ACT - const startNodes = findStartNodes(graph, trigger, node, runData); + const startNodes = findStartNodes({ graph, trigger, destination: node, runData }); // ASSERT expect(startNodes).toHaveLength(1); @@ -152,7 +153,7 @@ describe('findStartNodes', () => { { // ACT - const startNodes = findStartNodes(graph, trigger, node4); + const startNodes = findStartNodes({ graph, trigger, destination: node4 }); // ASSERT expect(startNodes).toHaveLength(1); @@ -171,7 +172,7 @@ describe('findStartNodes', () => { }; // ACT - const startNodes = findStartNodes(graph, trigger, node4, runData); + const startNodes = findStartNodes({ graph, trigger, destination: node4, runData }); // ASSERT expect(startNodes).toHaveLength(1); @@ -200,8 +201,13 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, trigger, node, { - [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + const startNodes = findStartNodes({ + graph, + trigger, + destination: node, + runData: { + [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + }, }); // ASSERT @@ -230,8 +236,13 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, trigger, node, { - [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])], + const startNodes = findStartNodes({ + graph, + trigger, + destination: node, + runData: { + [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])], + }, }); // ASSERT @@ -260,13 +271,18 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, trigger, node, { - [trigger.name]: [ - toITaskData([ - { data: { value: 1 }, outputIndex: 0 }, - { data: { value: 1 }, outputIndex: 1 }, - ]), - ], + const startNodes = findStartNodes({ + graph, + trigger, + destination: node, + runData: { + [trigger.name]: [ + toITaskData([ + { data: { value: 1 }, outputIndex: 0 }, + { data: { value: 1 }, outputIndex: 1 }, + ]), + ], + }, }); // ASSERT @@ -296,10 +312,15 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, trigger, node3, { - [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], - [node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], - [node2.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + const startNodes = findStartNodes({ + graph, + trigger, + destination: node3, + runData: { + [trigger.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + [node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + [node2.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 0 }])], + }, }); // ASSERT @@ -328,9 +349,14 @@ describe('findStartNodes', () => { ); // ACT - const startNodes = findStartNodes(graph, node1, node2, { - [trigger.name]: [toITaskData([{ data: { value: 1 } }])], - [node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])], + const startNodes = findStartNodes({ + graph, + trigger: node1, + destination: node2, + runData: { + [trigger.name]: [toITaskData([{ data: { value: 1 } }])], + [node1.name]: [toITaskData([{ data: { value: 1 }, outputIndex: 1 }])], + }, }); // ASSERT @@ -363,7 +389,7 @@ describe('findStartNodes', () => { const pinData: IPinData = {}; // ACT - const startNodes = findStartNodes(graph, trigger, node2, runData, pinData); + const startNodes = findStartNodes({ graph, trigger, destination: node2, runData, pinData }); // ASSERT expect(startNodes).toHaveLength(1); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts index d82f73e9e3..fceb22da06 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts @@ -9,11 +9,13 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data +import { NodeConnectionType } from 'n8n-workflow'; + +import { createNodeData } from './helpers'; import { DirectedGraph } from '../DirectedGraph'; import { findSubgraph } from '../findSubgraph'; -import { createNodeData } from './helpers'; -describe('findSubgraph2', () => { +describe('findSubgraph', () => { // ►► // ┌───────┐ ┌───────────┐ // │trigger├────►│destination│ @@ -26,7 +28,7 @@ describe('findSubgraph2', () => { .addNodes(trigger, destination) .addConnections({ from: trigger, to: destination }); - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); expect(subgraph).toEqual(graph); }); @@ -48,7 +50,7 @@ describe('findSubgraph2', () => { { from: ifNode, to: noOp, outputIndex: 1 }, ); - const subgraph = findSubgraph(graph, noOp, ifNode); + const subgraph = findSubgraph({ graph, destination: noOp, trigger: ifNode }); expect(subgraph).toEqual(graph); }); @@ -68,7 +70,7 @@ describe('findSubgraph2', () => { .addNodes(trigger, destination, node) .addConnections({ from: trigger, to: destination }, { from: destination, to: node }); - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); expect(subgraph).toEqual( new DirectedGraph() @@ -83,6 +85,12 @@ describe('findSubgraph2', () => { // │trigger│ │disabled├─────►│destination│ // │ ├────────►│ │ └───────────┘ // └───────┘ └────────┘ + // turns into + // ┌───────┐ ►► + // │ │ ┌───────────┐ + // │trigger├─────►│destination│ + // │ │ └───────────┘ + // └───────┘ test('skip disabled nodes', () => { const trigger = createNodeData({ name: 'trigger' }); const disabled = createNodeData({ name: 'disabled', disabled: true }); @@ -92,7 +100,7 @@ describe('findSubgraph2', () => { .addNodes(trigger, disabled, destination) .addConnections({ from: trigger, to: disabled }, { from: disabled, to: destination }); - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); expect(subgraph).toEqual( new DirectedGraph() @@ -101,6 +109,40 @@ describe('findSubgraph2', () => { ); }); + // XX XX + // ┌───────┐ ┌─────┐ ┌─────┐ ┌───────────┐ + // │trigger├────►│node1├────►│node2├────►│destination│ + // └───────┘ └─────┘ └─────┘ └───────────┘ + // turns into + // ┌───────┐ ┌───────────┐ + // │trigger├────►│destination│ + // └───────┘ └───────────┘ + test('skip multiple disabled nodes', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const disabledNode1 = createNodeData({ name: 'disabledNode1', disabled: true }); + const disabledNode2 = createNodeData({ name: 'disabledNode2', disabled: true }); + const destination = createNodeData({ name: 'destination' }); + + const graph = new DirectedGraph() + .addNodes(trigger, disabledNode1, disabledNode2, destination) + .addConnections( + { from: trigger, to: disabledNode1 }, + { from: disabledNode1, to: disabledNode2 }, + { from: disabledNode2, to: destination }, + ); + + // ACT + const subgraph = findSubgraph({ graph, destination, trigger }); + + // ASSERT + expect(subgraph).toEqual( + new DirectedGraph() + .addNodes(trigger, destination) + .addConnections({ from: trigger, to: destination }), + ); + }); + // ►► // ┌───────┐ ┌─────┐ ┌─────┐ // │Trigger├───┬──►│Node1├───┬─►│Node2│ @@ -121,7 +163,7 @@ describe('findSubgraph2', () => { ); // ACT - const subgraph = findSubgraph(graph, node2, trigger); + const subgraph = findSubgraph({ graph, destination: node2, trigger }); // ASSERT expect(subgraph).toEqual(graph); @@ -145,7 +187,7 @@ describe('findSubgraph2', () => { .addConnections({ from: trigger, to: node1 }, { from: node2, to: node1 }); // ACT - const subgraph = findSubgraph(graph, node1, trigger); + const subgraph = findSubgraph({ graph, destination: node1, trigger }); // ASSERT expect(subgraph).toEqual( @@ -173,7 +215,7 @@ describe('findSubgraph2', () => { ); // ACT - const subgraph = findSubgraph(graph, destination, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); // ASSERT expect(subgraph).toEqual( @@ -182,4 +224,110 @@ describe('findSubgraph2', () => { .addConnections({ from: trigger, to: destination }), ); }); + + describe('root nodes', () => { + // ►► + // ┌───────┐ ┌───────────┐ + // │trigger├─────►│destination│ + // └───────┘ └──▲────────┘ + // │AiLanguageModel + // ┌┴──────┐ + // │aiModel│ + // └───────┘ + test('always retain connections that have a different type than `NodeConnectionType.Main`', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const destination = createNodeData({ name: 'destination' }); + const aiModel = createNodeData({ name: 'ai_model' }); + + const graph = new DirectedGraph() + .addNodes(trigger, destination, aiModel) + .addConnections( + { from: trigger, to: destination }, + { from: aiModel, type: NodeConnectionType.AiLanguageModel, to: destination }, + ); + + // ACT + const subgraph = findSubgraph({ graph, destination, trigger }); + + // ASSERT + expect(subgraph).toEqual(graph); + }); + + // This graph is not possible, it's only here to make sure `findSubgraph` + // does not follow non-Main connections. + // + // ┌────┐ ┌───────────┐ + // │root┼───►destination│ + // └──▲─┘ └───────────┘ + // │AiLanguageModel + // ┌┴──────┐ + // │aiModel│ + // └▲──────┘ + // ┌┴──────┐ + // │trigger│ + // └───────┘ + // turns into an empty graph, because there is no `Main` typed connection + // connecting destination and trigger. + test('skip non-Main connection types', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const root = createNodeData({ name: 'root' }); + const aiModel = createNodeData({ name: 'aiModel' }); + const destination = createNodeData({ name: 'destination' }); + const graph = new DirectedGraph() + .addNodes(trigger, root, aiModel, destination) + .addConnections( + { from: trigger, to: aiModel }, + { from: aiModel, type: NodeConnectionType.AiLanguageModel, to: root }, + { from: root, to: destination }, + ); + + // ACT + const subgraph = findSubgraph({ graph, destination, trigger }); + + // ASSERT + expect(subgraph.getConnections()).toHaveLength(0); + expect(subgraph.getNodes().size).toBe(0); + }); + + // + // XX + // ┌───────┐ ┌────┐ ┌───────────┐ + // │trigger├───►root├───►destination│ + // └───────┘ └──▲─┘ └───────────┘ + // │AiLanguageModel + // ┌┴──────┐ + // │aiModel│ + // └───────┘ + // turns into + // ┌───────┐ ┌───────────┐ + // │trigger├────────────►destination│ + // └───────┘ └───────────┘ + test('skip disabled root nodes', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const root = createNodeData({ name: 'root', disabled: true }); + const aiModel = createNodeData({ name: 'ai_model' }); + const destination = createNodeData({ name: 'destination' }); + + const graph = new DirectedGraph() + .addNodes(trigger, root, aiModel, destination) + .addConnections( + { from: trigger, to: root }, + { from: aiModel, type: NodeConnectionType.AiLanguageModel, to: root }, + { from: root, to: destination }, + ); + + // ACT + const subgraph = findSubgraph({ graph, destination: root, trigger }); + + // ASSERT + expect(subgraph).toEqual( + new DirectedGraph() + .addNodes(trigger, destination) + .addConnections({ from: trigger, to: destination }), + ); + }); + }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts index 737d0a2754..dffbe310d1 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts @@ -9,8 +9,9 @@ import type { IPinData } from 'n8n-workflow'; import { NodeConnectionType, type IRunData } from 'n8n-workflow'; -import { DirectedGraph } from '../DirectedGraph'; + import { createNodeData, toITaskData } from './helpers'; +import { DirectedGraph } from '../DirectedGraph'; import { getSourceDataGroups } from '../getSourceDataGroups'; describe('getSourceDataGroups', () => { diff --git a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts index 42cbe1f5ff..a4bcac23a5 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts @@ -9,12 +9,14 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data -import { recreateNodeExecutionStack } from '@/PartialExecutionUtils/recreateNodeExecutionStack'; -import { type IPinData, type IRunData } from 'n8n-workflow'; import { AssertionError } from 'assert'; +import { type IPinData, type IRunData } from 'n8n-workflow'; + +import { recreateNodeExecutionStack } from '@/PartialExecutionUtils/recreateNodeExecutionStack'; + +import { createNodeData, toITaskData } from './helpers'; import { DirectedGraph } from '../DirectedGraph'; import { findSubgraph } from '../findSubgraph'; -import { createNodeData, toITaskData } from './helpers'; describe('recreateNodeExecutionStack', () => { // ►► @@ -30,7 +32,7 @@ describe('recreateNodeExecutionStack', () => { .addNodes(trigger, node) .addConnections({ from: trigger, to: node }); - const workflow = findSubgraph(graph, node, trigger); + const workflow = findSubgraph({ graph, destination: node, trigger }); const startNodes = [node]; const runData: IRunData = { [trigger.name]: [toITaskData([{ data: { value: 1 } }])], diff --git a/packages/core/src/PartialExecutionUtils/__tests__/toIConnections.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/toIConnections.test.ts index a2524bf3ce..e5ea0e658a 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/toIConnections.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/toIConnections.test.ts @@ -1,4 +1,5 @@ import { NodeConnectionType } from 'n8n-workflow'; + import { createNodeData, toIConnections } from './helpers'; test('toIConnections', () => { diff --git a/packages/core/src/PartialExecutionUtils/__tests__/toITaskData.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/toITaskData.test.ts index e255836339..fe9c3f132a 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/toITaskData.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/toITaskData.test.ts @@ -1,4 +1,5 @@ import { NodeConnectionType } from 'n8n-workflow'; + import { toITaskData } from './helpers'; test('toITaskData', function () { diff --git a/packages/core/src/PartialExecutionUtils/cleanRunData.ts b/packages/core/src/PartialExecutionUtils/cleanRunData.ts index 945dca1451..5d74a3575a 100644 --- a/packages/core/src/PartialExecutionUtils/cleanRunData.ts +++ b/packages/core/src/PartialExecutionUtils/cleanRunData.ts @@ -1,4 +1,5 @@ import type { INode, IRunData } from 'n8n-workflow'; + import type { DirectedGraph } from './DirectedGraph'; /** diff --git a/packages/core/src/PartialExecutionUtils/findStartNodes.ts b/packages/core/src/PartialExecutionUtils/findStartNodes.ts index 910045d709..a6165f6564 100644 --- a/packages/core/src/PartialExecutionUtils/findStartNodes.ts +++ b/packages/core/src/PartialExecutionUtils/findStartNodes.ts @@ -1,4 +1,5 @@ import type { INode, IPinData, IRunData } from 'n8n-workflow'; + import type { DirectedGraph } from './DirectedGraph'; import { getIncomingData } from './getIncomingData'; @@ -79,7 +80,7 @@ function findStartNodesRecursive( } // Recurse with every direct child that is part of the sub graph. - const outGoingConnections = graph.getDirectChildren(current); + const outGoingConnections = graph.getDirectChildConnections(current); for (const outGoingConnection of outGoingConnections) { const nodeRunData = getIncomingData( runData, @@ -130,13 +131,19 @@ function findStartNodesRecursive( * - stop following the branch, there is no start node on this branch * 4. Recurse with every direct child that is part of the sub graph */ -export function findStartNodes( - graph: DirectedGraph, - trigger: INode, - destination: INode, - runData: IRunData = {}, - pinData: IPinData = {}, -): INode[] { +export function findStartNodes(options: { + graph: DirectedGraph; + trigger: INode; + destination: INode; + runData?: IRunData; + pinData?: IPinData; +}): INode[] { + const graph = options.graph; + const trigger = options.trigger; + const destination = options.destination; + const runData = options.runData ?? {}; + const pinData = options.pinData ?? {}; + const startNodes = findStartNodesRecursive( graph, trigger, diff --git a/packages/core/src/PartialExecutionUtils/findSubgraph.ts b/packages/core/src/PartialExecutionUtils/findSubgraph.ts index 2b1ceb2998..4d3bc4cc3f 100644 --- a/packages/core/src/PartialExecutionUtils/findSubgraph.ts +++ b/packages/core/src/PartialExecutionUtils/findSubgraph.ts @@ -1,4 +1,5 @@ -import type { INode } from 'n8n-workflow'; +import { NodeConnectionType, type INode } from 'n8n-workflow'; + import type { GraphConnection } from './DirectedGraph'; import { DirectedGraph } from './DirectedGraph'; @@ -20,7 +21,7 @@ function findSubgraphRecursive( return; } - let parentConnections = graph.getDirectParents(current); + let parentConnections = graph.getDirectParentConnections(current); // If the current node has no parents, don’t keep this branch. if (parentConnections.length === 0) { @@ -50,31 +51,31 @@ function findSubgraphRecursive( // Take every incoming connection and connect it to every node that is // connected to the current node’s first output if (current.disabled) { - const incomingConnections = graph.getDirectParents(current); - const outgoingConnections = graph - .getDirectChildren(current) - // NOTE: When a node is disabled only the first output gets data - .filter((connection) => connection.outputIndex === 0); + // The last segment on the current branch is still pointing to the removed + // node, so let's remove it. + currentBranch.pop(); - parentConnections = []; - - for (const incomingConnection of incomingConnections) { - for (const outgoingConnection of outgoingConnections) { - const newConnection = { - ...incomingConnection, - to: outgoingConnection.to, - inputIndex: outgoingConnection.inputIndex, - }; - - parentConnections.push(newConnection); - currentBranch.pop(); - currentBranch.push(newConnection); - } - } + // The node is replaced by a set of new connections, connecting the parents + // and children of it directly. In the recursive call below we'll follow + // them further. + parentConnections = graph.removeNode(current, { + reconnectConnections: true, + // If the node has non-Main connections we don't want to rewire those. + // Otherwise we'd end up connecting AI utilities to nodes that don't + // support them. + skipConnectionFn: (c) => c.type !== NodeConnectionType.Main, + }); } // Recurse on each parent. for (const parentConnection of parentConnections) { + // Skip parents that are connected via non-Main connection types. They are + // only utility nodes for AI and are not part of the data or control flow + // and can never lead too the trigger. + if (parentConnection.type !== NodeConnectionType.Main) { + continue; + } + findSubgraphRecursive(graph, destinationNode, parentConnection.from, trigger, newGraph, [ ...currentBranch, parentConnection, @@ -99,15 +100,41 @@ function findSubgraphRecursive( * - take every incoming connection and connect it to every node that is * connected to the current node’s first output * 6. Recurse on each parent + * 7. Re-add all connections that don't use the `Main` connections type. + * Theses are used by nodes called root nodes and they are not part of the + * dataflow in the graph they are utility nodes, like the AI model used in a + * lang chain node. */ -export function findSubgraph( - graph: DirectedGraph, - destinationNode: INode, - trigger: INode, -): DirectedGraph { - const newGraph = new DirectedGraph(); +export function findSubgraph(options: { + graph: DirectedGraph; + destination: INode; + trigger: INode; +}): DirectedGraph { + const graph = options.graph; + const destination = options.destination; + const trigger = options.trigger; + const subgraph = new DirectedGraph(); - findSubgraphRecursive(graph, destinationNode, destinationNode, trigger, newGraph, []); + findSubgraphRecursive(graph, destination, destination, trigger, subgraph, []); - return newGraph; + // For each node in the subgraph, if it has parent connections of a type that + // is not `Main` in the input graph, add the connections and the nodes + // connected to it to the subgraph + // + // Without this all AI related workflows would not work when executed + // partially, because all utility nodes would be missing. + for (const node of subgraph.getNodes().values()) { + const parentConnections = graph.getParentConnections(node); + + for (const connection of parentConnections) { + if (connection.type === NodeConnectionType.Main) { + continue; + } + + subgraph.addNodes(connection.from, connection.to); + subgraph.addConnection(connection); + } + } + + return subgraph; } diff --git a/packages/core/src/PartialExecutionUtils/findTriggerForPartialExecution.ts b/packages/core/src/PartialExecutionUtils/findTriggerForPartialExecution.ts index baae6e7304..977e99c107 100644 --- a/packages/core/src/PartialExecutionUtils/findTriggerForPartialExecution.ts +++ b/packages/core/src/PartialExecutionUtils/findTriggerForPartialExecution.ts @@ -1,5 +1,5 @@ -import type { INode, Workflow } from 'n8n-workflow'; import * as assert from 'assert/strict'; +import type { INode, Workflow } from 'n8n-workflow'; function findAllParentTriggers(workflow: Workflow, destinationNodeName: string) { const parentNodes = workflow diff --git a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts index b1e3334440..4926becb79 100644 --- a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts +++ b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts @@ -1,3 +1,4 @@ +import * as a from 'assert/strict'; import { NodeConnectionType, type IExecuteData, @@ -11,7 +12,6 @@ import { type IWaitingForExecutionSource, } from 'n8n-workflow'; -import * as a from 'assert/strict'; import type { DirectedGraph } from './DirectedGraph'; import { getIncomingData } from './getIncomingData'; import { getSourceDataGroups } from './getSourceDataGroups'; @@ -44,12 +44,12 @@ export function recreateNodeExecutionStack( // Validate invariants. // The graph needs to be free of disabled nodes. If it's not it hasn't been - // passed through findSubgraph2. + // passed through findSubgraph. for (const node of graph.getNodes().values()) { a.notEqual( node.disabled, true, - `Graph contains disabled nodes. This is not supported. Make sure to pass the graph through "findSubgraph2" before calling "recreateNodeExecutionStack". The node in question is "${node.name}"`, + `Graph contains disabled nodes. This is not supported. Make sure to pass the graph through "findSubgraph" before calling "recreateNodeExecutionStack". The node in question is "${node.name}"`, ); } @@ -64,7 +64,7 @@ export function recreateNodeExecutionStack( for (const startNode of startNodes) { const incomingStartNodeConnections = graph - .getDirectParents(startNode) + .getDirectParentConnections(startNode) .filter((c) => c.type === NodeConnectionType.Main); let incomingData: INodeExecutionData[][] = []; @@ -135,7 +135,7 @@ export function recreateNodeExecutionStack( // Check if the destinationNode has to be added as waiting // because some input data is already fully available const incomingDestinationNodeConnections = graph - .getDirectParents(destinationNode) + .getDirectParentConnections(destinationNode) .filter((c) => c.type === NodeConnectionType.Main); if (incomingDestinationNodeConnections !== undefined) { for (const connection of incomingDestinationNodeConnections) { diff --git a/packages/core/src/SSHClientsManager.ts b/packages/core/src/SSHClientsManager.ts index 78126f96e8..17046a26eb 100644 --- a/packages/core/src/SSHClientsManager.ts +++ b/packages/core/src/SSHClientsManager.ts @@ -1,7 +1,7 @@ -import { Service } from 'typedi'; -import { Client, type ConnectConfig } from 'ssh2'; -import { createHash } from 'node:crypto'; import type { SSHCredentials } from 'n8n-workflow'; +import { createHash } from 'node:crypto'; +import { Client, type ConnectConfig } from 'ssh2'; +import { Service } from 'typedi'; @Service() export class SSHClientsManager { diff --git a/packages/core/src/ScheduledTaskManager.ts b/packages/core/src/ScheduledTaskManager.ts index eb519a60a7..00396903a5 100644 --- a/packages/core/src/ScheduledTaskManager.ts +++ b/packages/core/src/ScheduledTaskManager.ts @@ -1,6 +1,7 @@ -import { Service } from 'typedi'; import { CronJob } from 'cron'; import type { CronExpression, Workflow } from 'n8n-workflow'; +import { Service } from 'typedi'; + import { InstanceSettings } from './InstanceSettings'; @Service() @@ -29,8 +30,9 @@ export class ScheduledTaskManager { deregisterCrons(workflowId: string) { const cronJobs = this.cronJobs.get(workflowId) ?? []; - for (const cronJob of cronJobs) { - cronJob.stop(); + while (cronJobs.length) { + const cronJob = cronJobs.pop(); + if (cronJob) cronJob.stop(); } } diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index a10d8c530c..ec5963a54b 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -2,9 +2,9 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/prefer-nullish-coalescing */ +import * as assert from 'assert/strict'; import { setMaxListeners } from 'events'; -import PCancelable from 'p-cancelable'; - +import get from 'lodash/get'; import type { ExecutionBaseError, ExecutionStatus, @@ -46,11 +46,9 @@ import { sleep, ErrorReporterProxy, } from 'n8n-workflow'; -import get from 'lodash/get'; -import * as NodeExecuteFunctions from './NodeExecuteFunctions'; +import PCancelable from 'p-cancelable'; -import * as assert from 'assert/strict'; -import { recreateNodeExecutionStack } from './PartialExecutionUtils/recreateNodeExecutionStack'; +import * as NodeExecuteFunctions from './NodeExecuteFunctions'; import { DirectedGraph, findCycles, @@ -59,6 +57,7 @@ import { findTriggerForPartialExecution, } from './PartialExecutionUtils'; import { cleanRunData } from './PartialExecutionUtils/cleanRunData'; +import { recreateNodeExecutionStack } from './PartialExecutionUtils/recreateNodeExecutionStack'; export class WorkflowExecute { private status: ExecutionStatus = 'new'; @@ -88,7 +87,7 @@ export class WorkflowExecute { * Executes the given workflow. * * @param {Workflow} workflow The workflow to execute - * @param {INode[]} [startNodes] Node to start execution from + * @param {INode[]} [startNode] Node to start execution from * @param {string} [destinationNode] Node to stop execution at */ // IMPORTANT: Do not add "async" to this function, it will then convert the @@ -333,9 +332,9 @@ export class WorkflowExecute { 'a destinationNodeName is required for the new partial execution flow', ); - const destinationNode = workflow.getNode(destinationNodeName); + const destination = workflow.getNode(destinationNodeName); assert.ok( - destinationNode, + destination, `Could not find a node with the name ${destinationNodeName} in the workflow.`, ); @@ -349,11 +348,11 @@ export class WorkflowExecute { // 2. Find the Subgraph const graph = DirectedGraph.fromWorkflow(workflow); - const subgraph = findSubgraph(graph, destinationNode, trigger); + const subgraph = findSubgraph({ graph, destination, trigger }); const filteredNodes = subgraph.getNodes(); // 3. Find the Start Nodes - const startNodes = findStartNodes(subgraph, trigger, destinationNode, runData); + const startNodes = findStartNodes({ graph: subgraph, trigger, destination, runData }); // 4. Detect Cycles const cycles = findCycles(workflow); @@ -368,7 +367,7 @@ export class WorkflowExecute { // 7. Recreate Execution Stack const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(subgraph, startNodes, destinationNode, runData, pinData ?? {}); + recreateNodeExecutionStack(subgraph, startNodes, destination, runData, pinData ?? {}); // 8. Execute this.status = 'running'; @@ -1059,7 +1058,7 @@ export class WorkflowExecute { this.runExecutionData.startData!.runNodeFilter.indexOf(executionNode.name) === -1 ) { // If filter is set and node is not on filter skip it, that avoids the problem that it executes - // leafs that are parallel to a selected destinationNode. Normally it would execute them because + // leaves that are parallel to a selected destinationNode. Normally it would execute them because // they have the same parent and it executes all child nodes. continue; } @@ -1760,7 +1759,7 @@ export class WorkflowExecute { continue; } } else { - // A certain amout of inputs are required (amount of inputs) + // A certain amount of inputs are required (amount of inputs) if (inputsWithData.length < requiredInputs) { continue; } @@ -1818,7 +1817,7 @@ export class WorkflowExecute { // Node to add did not get found, rather an empty one removed so continue with search waitingNodes = Object.keys(this.runExecutionData.executionData!.waitingExecution); // Set counter to start again from the beginning. Set it to -1 as it auto increments - // after run. So only like that will we end up again ot 0. + // after run. So only like that will we end up again at 0. i = -1; } } diff --git a/packages/core/src/data-deduplication-service.ts b/packages/core/src/data-deduplication-service.ts new file mode 100644 index 0000000000..4b7a51fcc2 --- /dev/null +++ b/packages/core/src/data-deduplication-service.ts @@ -0,0 +1,124 @@ +import get from 'lodash/get'; +import type { + IDataDeduplicator, + ICheckProcessedOptions, + IDeduplicationOutput, + IDeduplicationOutputItems, + IDataObject, + DeduplicationScope, + DeduplicationItemTypes, + ICheckProcessedContextData, +} from 'n8n-workflow'; +import * as assert from 'node:assert/strict'; + +/** + * A singleton service responsible for data deduplication. + * This service wraps around the IDataDeduplicator interface and provides methods to handle + * deduplication-related operations such as checking, recording, and clearing processed data. + */ +export class DataDeduplicationService { + private static instance: DataDeduplicationService; + + private deduplicator: IDataDeduplicator; + + private constructor(deduplicator: IDataDeduplicator) { + this.deduplicator = deduplicator; + } + + private assertDeduplicator() { + assert.ok( + this.deduplicator, + 'Manager needs to initialized before use. Make sure to call init()', + ); + } + + private static assertInstance() { + assert.ok( + DataDeduplicationService.instance, + 'Instance needs to initialized before use. Make sure to call init()', + ); + } + + private static assertSingleInstance() { + assert.ok( + !DataDeduplicationService.instance, + 'Instance already initialized. Multiple initializations are not allowed.', + ); + } + + static async init(deduplicator: IDataDeduplicator): Promise { + this.assertSingleInstance(); + DataDeduplicationService.instance = new DataDeduplicationService(deduplicator); + } + + static getInstance(): DataDeduplicationService { + this.assertInstance(); + return DataDeduplicationService.instance; + } + + async checkProcessedItemsAndRecord( + propertyName: string, + items: IDataObject[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + let value; + const itemLookup = items.reduce((acc, cur, index) => { + value = JSON.stringify(get(cur, propertyName)); + acc[value ? value.toString() : ''] = index; + return acc; + }, {}); + + const checkedItems = await this.deduplicator.checkProcessedAndRecord( + Object.keys(itemLookup), + scope, + contextData, + options, + ); + + return { + new: checkedItems.new.map((key) => items[itemLookup[key] as number]), + processed: checkedItems.processed.map((key) => items[itemLookup[key] as number]), + }; + } + + async checkProcessedAndRecord( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + return await this.deduplicator.checkProcessedAndRecord(items, scope, contextData, options); + } + + async removeProcessed( + items: DeduplicationItemTypes[], + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + return await this.deduplicator.removeProcessed(items, scope, contextData, options); + } + + async clearAllProcessedItems( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + return await this.deduplicator.clearAllProcessedItems(scope, contextData, options); + } + + async getProcessedDataCount( + scope: DeduplicationScope, + contextData: ICheckProcessedContextData, + options: ICheckProcessedOptions, + ): Promise { + this.assertDeduplicator(); + return await this.deduplicator.getProcessedDataCount(scope, contextData, options); + } +} diff --git a/packages/core/src/errors/invalid-mode.error.ts b/packages/core/src/errors/invalid-mode.error.ts index 348fbb410d..179582911a 100644 --- a/packages/core/src/errors/invalid-mode.error.ts +++ b/packages/core/src/errors/invalid-mode.error.ts @@ -1,4 +1,5 @@ import { ApplicationError } from 'n8n-workflow'; + import { CONFIG_MODES } from '../BinaryData/utils'; export class InvalidModeError extends ApplicationError { diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index c6b8450a4f..ebe240b51e 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -14,6 +14,7 @@ export { InstanceSettings, InstanceType } from './InstanceSettings'; export * from './NodeExecuteFunctions'; export * from './WorkflowExecute'; export { NodeExecuteFunctions }; +export * from './data-deduplication-service'; export * from './errors'; export { ObjectStoreService } from './ObjectStore/ObjectStore.service.ee'; export { BinaryData } from './BinaryData/types'; diff --git a/packages/core/test/BinaryData/utils.test.ts b/packages/core/test/BinaryData/utils.test.ts index 95a138c00d..50a7f165df 100644 --- a/packages/core/test/BinaryData/utils.test.ts +++ b/packages/core/test/BinaryData/utils.test.ts @@ -1,5 +1,6 @@ import { Readable } from 'node:stream'; import { createGunzip } from 'node:zlib'; + import { binaryToBuffer } from '@/BinaryData/utils'; describe('BinaryData/utils', () => { diff --git a/packages/core/test/Cipher.test.ts b/packages/core/test/Cipher.test.ts index 1b7c0de944..e3dfa609fa 100644 --- a/packages/core/test/Cipher.test.ts +++ b/packages/core/test/Cipher.test.ts @@ -1,6 +1,8 @@ import Container from 'typedi'; -import { InstanceSettings } from '@/InstanceSettings'; + import { Cipher } from '@/Cipher'; +import { InstanceSettings } from '@/InstanceSettings'; + import { mockInstance } from './utils'; describe('Cipher', () => { diff --git a/packages/core/test/CreateNodeAsTool.test.ts b/packages/core/test/CreateNodeAsTool.test.ts index c4509e08be..5c485b9837 100644 --- a/packages/core/test/CreateNodeAsTool.test.ts +++ b/packages/core/test/CreateNodeAsTool.test.ts @@ -1,8 +1,9 @@ -import { createNodeAsTool } from '@/CreateNodeAsTool'; import type { IExecuteFunctions, INodeParameters, INodeType } from 'n8n-workflow'; -import { NodeConnectionType } from 'n8n-workflow'; +import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { z } from 'zod'; +import { createNodeAsTool } from '@/CreateNodeAsTool'; + jest.mock('@langchain/core/tools', () => ({ DynamicStructuredTool: jest.fn().mockImplementation((config) => ({ name: config.name, @@ -18,10 +19,12 @@ describe('createNodeAsTool', () => { let mockNodeParameters: INodeParameters; beforeEach(() => { + // Setup mock objects mockCtx = { getNodeParameter: jest.fn(), - addInputData: jest.fn(), + addInputData: jest.fn().mockReturnValue({ index: 0 }), addOutputData: jest.fn(), + getNode: jest.fn().mockReturnValue({ name: 'Test_Node' }), } as unknown as IExecuteFunctions; mockNode = { @@ -33,60 +36,456 @@ describe('createNodeAsTool', () => { } as unknown as INodeType; mockNodeParameters = { - param1: "{{ '__PLACEHOLDER: Test parameter' }}", + param1: "={{$fromAI('param1', 'Test parameter', 'string') }}", param2: 'static value', nestedParam: { - subParam: "{{ '__PLACEHOLDER: Nested parameter' }}", + subParam: "={{ $fromAI('subparam', 'Nested parameter', 'string') }}", }, + descriptionType: 'auto', + resource: 'testResource', + operation: 'testOperation', }; + jest.clearAllMocks(); }); - it('should create a DynamicStructuredTool with correct properties', () => { - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + describe('Tool Creation and Basic Properties', () => { + it('should create a DynamicStructuredTool with correct properties', () => { + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; - expect(tool).toBeDefined(); - expect(tool.name).toBe('TestNode'); - expect(tool.description).toBe('Test node description'); - expect(tool.schema).toBeDefined(); + expect(tool).toBeDefined(); + expect(tool.name).toBe('Test_Node'); + expect(tool.description).toBe( + 'Test node description\n Resource: testResource\n Operation: testOperation', + ); + expect(tool.schema).toBeDefined(); + }); + + it('should use toolDescription if provided', () => { + mockNodeParameters.descriptionType = 'manual'; + mockNodeParameters.toolDescription = 'Custom tool description'; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.description).toBe('Custom tool description'); + }); }); - it('should use toolDescription if provided', () => { - const customDescription = 'Custom tool description'; - (mockCtx.getNodeParameter as jest.Mock).mockReturnValue(customDescription); + describe('Schema Creation and Parameter Handling', () => { + it('should create a schema based on fromAI arguments in nodeParameters', () => { + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + expect(tool.schema).toBeDefined(); + expect(tool.schema.shape).toHaveProperty('param1'); + expect(tool.schema.shape).toHaveProperty('subparam'); + expect(tool.schema.shape).not.toHaveProperty('param2'); + }); - expect(tool.description).toBe(customDescription); + it('should handle fromAI arguments correctly', () => { + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.subparam).toBeInstanceOf(z.ZodString); + }); + + it('should handle default values correctly', () => { + mockNodeParameters = { + paramWithDefault: + "={{ $fromAI('paramWithDefault', 'Parameter with default', 'string', 'default value') }}", + numberWithDefault: + "={{ $fromAI('numberWithDefault', 'Number with default', 'number', 42) }}", + booleanWithDefault: + "={{ $fromAI('booleanWithDefault', 'Boolean with default', 'boolean', true) }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramWithDefault.description).toBe('Parameter with default'); + expect(tool.schema.shape.numberWithDefault.description).toBe('Number with default'); + expect(tool.schema.shape.booleanWithDefault.description).toBe('Boolean with default'); + }); + + it('should handle nested parameters correctly', () => { + mockNodeParameters = { + topLevel: "={{ $fromAI('topLevel', 'Top level parameter', 'string') }}", + nested: { + level1: "={{ $fromAI('level1', 'Nested level 1', 'string') }}", + deeperNested: { + level2: "={{ $fromAI('level2', 'Nested level 2', 'number') }}", + }, + }, + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.topLevel).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.level1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.level2).toBeInstanceOf(z.ZodNumber); + }); + + it('should handle array parameters correctly', () => { + mockNodeParameters = { + arrayParam: [ + "={{ $fromAI('item1', 'First item', 'string') }}", + "={{ $fromAI('item2', 'Second item', 'number') }}", + ], + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.item1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.item2).toBeInstanceOf(z.ZodNumber); + }); }); - it('should create a schema based on placeholder values in nodeParameters', () => { - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + describe('Error Handling and Edge Cases', () => { + it('should handle error during node execution', async () => { + mockNode.execute = jest.fn().mockRejectedValue(new Error('Execution failed')); + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; - expect(tool.schema).toBeDefined(); - expect(tool.schema.shape).toHaveProperty('param1'); - expect(tool.schema.shape).toHaveProperty('nestedParam__subParam'); - expect(tool.schema.shape).not.toHaveProperty('param2'); + const result = await tool.func({ param1: 'test value' }); + + expect(result).toContain('Error during node execution:'); + expect(mockCtx.addOutputData).toHaveBeenCalledWith( + NodeConnectionType.AiTool, + 0, + expect.any(NodeOperationError), + ); + }); + + it('should throw an error for invalid parameter names', () => { + mockNodeParameters.invalidParam = "$fromAI('invalid param', 'Invalid parameter', 'string')"; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + 'Parameter key `invalid param` is invalid', + ); + }); + + it('should throw an error for $fromAI calls with unsupported types', () => { + mockNodeParameters = { + invalidTypeParam: + "={{ $fromAI('invalidType', 'Param with unsupported type', 'unsupportedType') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + 'Invalid type: unsupportedType', + ); + }); + + it('should handle empty parameters and parameters with no fromAI calls', () => { + mockNodeParameters = { + param1: 'static value 1', + param2: 'static value 2', + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape).toEqual({}); + }); }); - it('should handle nested parameters correctly', () => { - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + describe('Parameter Name and Description Handling', () => { + it('should accept parameter names with underscores and hyphens', () => { + mockNodeParameters = { + validName1: + "={{ $fromAI('param_name-1', 'Valid name with underscore and hyphen', 'string') }}", + validName2: "={{ $fromAI('param_name_2', 'Another valid name', 'number') }}", + }; - expect(tool.schema.shape.nestedParam__subParam).toBeInstanceOf(z.ZodString); + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape['param_name-1']).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape['param_name-1'].description).toBe( + 'Valid name with underscore and hyphen', + ); + + expect(tool.schema.shape.param_name_2).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.param_name_2.description).toBe('Another valid name'); + }); + + it('should throw an error for parameter names with invalid special characters', () => { + mockNodeParameters = { + invalidNameParam: + "={{ $fromAI('param@name!', 'Invalid name with special characters', 'string') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + 'Parameter key `param@name!` is invalid', + ); + }); + + it('should throw an error for empty parameter name', () => { + mockNodeParameters = { + invalidNameParam: "={{ $fromAI('', 'Invalid name with special characters', 'string') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + 'You must specify a key when using $fromAI()', + ); + }); + + it('should handle parameter names with exact and exceeding character limits', () => { + const longName = 'a'.repeat(64); + const tooLongName = 'a'.repeat(65); + mockNodeParameters = { + longNameParam: `={{ $fromAI('${longName}', 'Param with 64 character name', 'string') }}`, + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape[longName]).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape[longName].description).toBe('Param with 64 character name'); + + expect(() => + createNodeAsTool(mockCtx, mockNode, { + tooLongNameParam: `={{ $fromAI('${tooLongName}', 'Param with 65 character name', 'string') }}`, + }), + ).toThrow(`Parameter key \`${tooLongName}\` is invalid`); + }); + + it('should handle $fromAI calls with empty description', () => { + mockNodeParameters = { + emptyDescriptionParam: "={{ $fromAI('emptyDescription', '', 'number') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.emptyDescription).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.emptyDescription.description).toBeUndefined(); + }); + + it('should throw an error for calls with the same parameter but different descriptions', () => { + mockNodeParameters = { + duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}", + duplicateParam2: "={{ $fromAI('duplicate', 'Second duplicate', 'number') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + "Duplicate key 'duplicate' found with different description or type", + ); + }); + it('should throw an error for calls with the same parameter but different types', () => { + mockNodeParameters = { + duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}", + duplicateParam2: "={{ $fromAI('duplicate', 'First duplicate', 'number') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + "Duplicate key 'duplicate' found with different description or type", + ); + }); }); - it('should create a function that wraps the node execution', async () => { - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + describe('Complex Parsing Scenarios', () => { + it('should correctly parse $fromAI calls with varying spaces, capitalization, and within template literals', () => { + mockNodeParameters = { + varyingSpacing1: "={{$fromAI('param1','Description1','string')}}", + varyingSpacing2: "={{ $fromAI ( 'param2' , 'Description2' , 'number' ) }}", + varyingSpacing3: "={{ $FROMai('param3', 'Description3', 'boolean') }}", + wrongCapitalization: "={{$fromai('param4','Description4','number')}}", + templateLiteralParam: + // eslint-disable-next-line n8n-local-rules/no-interpolation-in-regular-string + "={{ `Value is: ${$fromAI('templatedParam', 'Templated param description', 'string')}` }}", + }; - const result = await tool.func({ param1: 'test value', nestedParam__subParam: 'nested value' }); + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; - expect(mockCtx.addInputData).toHaveBeenCalledWith(NodeConnectionType.AiTool, [ - [{ json: { param1: 'test value', nestedParam__subParam: 'nested value' } }], - ]); - expect(mockNode.execute).toHaveBeenCalled(); - expect(mockCtx.addOutputData).toHaveBeenCalledWith(NodeConnectionType.AiTool, 0, [ - [{ json: { response: [{ result: 'test' }] } }], - ]); - expect(result).toBe(JSON.stringify([{ result: 'test' }])); + expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.param1.description).toBe('Description1'); + + expect(tool.schema.shape.param2).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.param2.description).toBe('Description2'); + + expect(tool.schema.shape.param3).toBeInstanceOf(z.ZodBoolean); + expect(tool.schema.shape.param3.description).toBe('Description3'); + + expect(tool.schema.shape.param4).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.param4.description).toBe('Description4'); + + expect(tool.schema.shape.templatedParam).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.templatedParam.description).toBe('Templated param description'); + }); + + it('should correctly parse multiple $fromAI calls interleaved with regular text', () => { + mockNodeParameters = { + interleavedParams: + "={{ 'Start ' + $fromAI('param1', 'First param', 'string') + ' Middle ' + $fromAI('param2', 'Second param', 'number') + ' End' }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.param1.description).toBe('First param'); + + expect(tool.schema.shape.param2).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.param2.description).toBe('Second param'); + }); + + it('should correctly parse $fromAI calls with complex JSON default values', () => { + mockNodeParameters = { + complexJsonDefault: + '={{ $fromAI(\'complexJson\', \'Param with complex JSON default\', \'json\', \'{"nested": {"key": "value"}, "array": [1, 2, 3]}\') }}', + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.complexJson._def.innerType).toBeInstanceOf(z.ZodRecord); + expect(tool.schema.shape.complexJson.description).toBe('Param with complex JSON default'); + expect(tool.schema.shape.complexJson._def.defaultValue()).toEqual({ + nested: { key: 'value' }, + array: [1, 2, 3], + }); + }); + + it('should ignore $fromAI calls embedded in non-string node parameters', () => { + mockNodeParameters = { + numberParam: 42, + booleanParam: false, + objectParam: { + innerString: "={{ $fromAI('innerParam', 'Inner param', 'string') }}", + innerNumber: 100, + innerObject: { + deepParam: "={{ $fromAI('deepParam', 'Deep param', 'number') }}", + }, + }, + arrayParam: [ + "={{ $fromAI('arrayParam1', 'First array param', 'string') }}", + 200, + "={{ $fromAI('nestedArrayParam', 'Nested array param', 'boolean') }}", + ], + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.innerParam).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.innerParam.description).toBe('Inner param'); + + expect(tool.schema.shape.deepParam).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.deepParam.description).toBe('Deep param'); + + expect(tool.schema.shape.arrayParam1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.arrayParam1.description).toBe('First array param'); + + expect(tool.schema.shape.nestedArrayParam).toBeInstanceOf(z.ZodBoolean); + expect(tool.schema.shape.nestedArrayParam.description).toBe('Nested array param'); + }); + }); + + describe('Escaping and Special Characters', () => { + it('should handle escaped single quotes in parameter names and descriptions', () => { + mockNodeParameters = { + escapedQuotesParam: + "={{ $fromAI('paramName', 'Description with \\'escaped\\' quotes', 'string') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.paramName.description).toBe("Description with 'escaped' quotes"); + }); + + it('should handle escaped double quotes in parameter names and descriptions', () => { + mockNodeParameters = { + escapedQuotesParam: + '={{ $fromAI("paramName", "Description with \\"escaped\\" quotes", "string") }}', + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.paramName.description).toBe('Description with "escaped" quotes'); + }); + + it('should handle escaped backslashes in parameter names and descriptions', () => { + mockNodeParameters = { + escapedBackslashesParam: + "={{ $fromAI('paramName', 'Description with \\\\ backslashes', 'string') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.paramName.description).toBe('Description with \\ backslashes'); + }); + + it('should handle mixed escaped characters in parameter names and descriptions', () => { + mockNodeParameters = { + mixedEscapesParam: + '={{ $fromAI(`paramName`, \'Description with \\\'mixed" characters\', "number") }}', + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.paramName.description).toBe('Description with \'mixed" characters'); + }); + }); + + describe('Edge Cases and Limitations', () => { + it('should ignore excess arguments in $fromAI calls beyond the fourth argument', () => { + mockNodeParameters = { + excessArgsParam: + "={{ $fromAI('excessArgs', 'Param with excess arguments', 'string', 'default', 'extraArg1', 'extraArg2') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.excessArgs._def.innerType).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.excessArgs.description).toBe('Param with excess arguments'); + expect(tool.schema.shape.excessArgs._def.defaultValue()).toBe('default'); + }); + + it('should correctly parse $fromAI calls with nested parentheses', () => { + mockNodeParameters = { + nestedParenthesesParam: + "={{ $fromAI('paramWithNested', 'Description with ((nested)) parentheses', 'string') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramWithNested).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.paramWithNested.description).toBe( + 'Description with ((nested)) parentheses', + ); + }); + + it('should handle $fromAI calls with very long descriptions', () => { + const longDescription = 'A'.repeat(1000); + mockNodeParameters = { + longParam: `={{ $fromAI('longParam', '${longDescription}', 'string') }}`, + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.longParam).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.longParam.description).toBe(longDescription); + }); + + it('should handle $fromAI calls with only some parameters', () => { + mockNodeParameters = { + partialParam1: "={{ $fromAI('partial1') }}", + partialParam2: "={{ $fromAI('partial2', 'Description only') }}", + partialParam3: "={{ $fromAI('partial3', '', 'number') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.partial1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.partial2).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.partial3).toBeInstanceOf(z.ZodNumber); + }); + }); + + describe('Unicode and Internationalization', () => { + it('should handle $fromAI calls with unicode characters', () => { + mockNodeParameters = { + unicodeParam: "={{ $fromAI('unicodeParam', '🌈 Unicode parameter 你好', 'string') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.unicodeParam).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.unicodeParam.description).toBe('🌈 Unicode parameter 你好'); + }); }); }); diff --git a/packages/core/test/Credentials.test.ts b/packages/core/test/Credentials.test.ts index ada86a07b0..fa7be59267 100644 --- a/packages/core/test/Credentials.test.ts +++ b/packages/core/test/Credentials.test.ts @@ -1,6 +1,7 @@ -import { Container } from 'typedi'; import { mock } from 'jest-mock-extended'; import type { CredentialInformation } from 'n8n-workflow'; +import { Container } from 'typedi'; + import { Cipher } from '@/Cipher'; import { Credentials } from '@/Credentials'; import type { InstanceSettings } from '@/InstanceSettings'; diff --git a/packages/core/test/FileSystem.manager.test.ts b/packages/core/test/FileSystem.manager.test.ts index 7087242726..581974c0e9 100644 --- a/packages/core/test/FileSystem.manager.test.ts +++ b/packages/core/test/FileSystem.manager.test.ts @@ -1,9 +1,11 @@ -import path from 'node:path'; import fs from 'node:fs'; import fsp from 'node:fs/promises'; import { tmpdir } from 'node:os'; +import path from 'node:path'; + import { FileSystemManager } from '@/BinaryData/FileSystem.manager'; import { isStream } from '@/ObjectStore/utils'; + import { toFileId, toStream } from './utils'; jest.mock('fs'); diff --git a/packages/core/test/InstanceSettings.test.ts b/packages/core/test/InstanceSettings.test.ts index 414f875274..64b6840f2f 100644 --- a/packages/core/test/InstanceSettings.test.ts +++ b/packages/core/test/InstanceSettings.test.ts @@ -1,4 +1,5 @@ import fs from 'fs'; + import { InstanceSettings } from '@/InstanceSettings'; describe('InstanceSettings', () => { diff --git a/packages/core/test/NodeExecuteFunctions.test.ts b/packages/core/test/NodeExecuteFunctions.test.ts index 3af9c752f6..421b7cd247 100644 --- a/packages/core/test/NodeExecuteFunctions.test.ts +++ b/packages/core/test/NodeExecuteFunctions.test.ts @@ -1,20 +1,9 @@ -import type { SecureContextOptions } from 'tls'; -import { - cleanupParameterData, - copyInputItems, - ensureType, - getBinaryDataBuffer, - isFilePathBlocked, - parseIncomingMessage, - parseRequestObject, - proxyRequestToAxios, - removeEmptyBody, - setBinaryDataBuffer, -} from '@/NodeExecuteFunctions'; -import { DateTime } from 'luxon'; import { mkdtempSync, readFileSync } from 'fs'; import type { IncomingMessage } from 'http'; +import type { Agent } from 'https'; import { mock } from 'jest-mock-extended'; +import toPlainObject from 'lodash/toPlainObject'; +import { DateTime } from 'luxon'; import type { IBinaryData, IHttpRequestMethods, @@ -28,14 +17,26 @@ import type { WorkflowHooks, } from 'n8n-workflow'; import { ExpressionError } from 'n8n-workflow'; -import { BinaryDataService } from '@/BinaryData/BinaryData.service'; import nock from 'nock'; import { tmpdir } from 'os'; import { join } from 'path'; +import type { SecureContextOptions } from 'tls'; import Container from 'typedi'; -import type { Agent } from 'https'; -import toPlainObject from 'lodash/toPlainObject'; + +import { BinaryDataService } from '@/BinaryData/BinaryData.service'; import { InstanceSettings } from '@/InstanceSettings'; +import { + cleanupParameterData, + copyInputItems, + ensureType, + getBinaryDataBuffer, + isFilePathBlocked, + parseIncomingMessage, + parseRequestObject, + proxyRequestToAxios, + removeEmptyBody, + setBinaryDataBuffer, +} from '@/NodeExecuteFunctions'; const temporaryDir = mkdtempSync(join(tmpdir(), 'n8n')); diff --git a/packages/core/test/ObjectStore.manager.test.ts b/packages/core/test/ObjectStore.manager.test.ts index abc1f24c3a..f01e170213 100644 --- a/packages/core/test/ObjectStore.manager.test.ts +++ b/packages/core/test/ObjectStore.manager.test.ts @@ -1,9 +1,11 @@ -import fs from 'node:fs/promises'; import { mock } from 'jest-mock-extended'; +import fs from 'node:fs/promises'; + import { ObjectStoreManager } from '@/BinaryData/ObjectStore.manager'; import { ObjectStoreService } from '@/ObjectStore/ObjectStore.service.ee'; -import { isStream } from '@/ObjectStore/utils'; import type { MetadataResponseHeaders } from '@/ObjectStore/types'; +import { isStream } from '@/ObjectStore/utils'; + import { mockInstance, toFileId, toStream } from './utils'; jest.mock('fs/promises'); diff --git a/packages/core/test/ObjectStore.service.test.ts b/packages/core/test/ObjectStore.service.test.ts index d39f08e1e2..77936c20f0 100644 --- a/packages/core/test/ObjectStore.service.test.ts +++ b/packages/core/test/ObjectStore.service.test.ts @@ -1,6 +1,7 @@ import axios from 'axios'; -import { ObjectStoreService } from '@/ObjectStore/ObjectStore.service.ee'; import { Readable } from 'stream'; + +import { ObjectStoreService } from '@/ObjectStore/ObjectStore.service.ee'; import { writeBlockedMessage } from '@/ObjectStore/utils'; jest.mock('axios'); diff --git a/packages/core/test/SSHClientsManager.test.ts b/packages/core/test/SSHClientsManager.test.ts index a7ceabe9f9..132a54baef 100644 --- a/packages/core/test/SSHClientsManager.test.ts +++ b/packages/core/test/SSHClientsManager.test.ts @@ -1,5 +1,6 @@ -import { Client } from 'ssh2'; import type { SSHCredentials } from 'n8n-workflow'; +import { Client } from 'ssh2'; + import { SSHClientsManager } from '@/SSHClientsManager'; describe('SSHClientsManager', () => { diff --git a/packages/core/test/ScheduledTaskManager.test.ts b/packages/core/test/ScheduledTaskManager.test.ts index 15d5f7d487..5166240856 100644 --- a/packages/core/test/ScheduledTaskManager.test.ts +++ b/packages/core/test/ScheduledTaskManager.test.ts @@ -1,5 +1,5 @@ -import type { Workflow } from 'n8n-workflow'; import { mock } from 'jest-mock-extended'; +import type { Workflow } from 'n8n-workflow'; import type { InstanceSettings } from '@/InstanceSettings'; import { ScheduledTaskManager } from '@/ScheduledTaskManager'; @@ -56,8 +56,13 @@ describe('ScheduledTaskManager', () => { scheduledTaskManager.registerCron(workflow, everyMinute, onTick); scheduledTaskManager.registerCron(workflow, everyMinute, onTick); scheduledTaskManager.registerCron(workflow, everyMinute, onTick); + + expect(scheduledTaskManager.cronJobs.get(workflow.id)?.length).toBe(3); + scheduledTaskManager.deregisterCrons(workflow.id); + expect(scheduledTaskManager.cronJobs.get(workflow.id)?.length).toBe(0); + expect(onTick).not.toHaveBeenCalled(); jest.advanceTimersByTime(10 * 60 * 1000); // 10 minutes expect(onTick).not.toHaveBeenCalled(); diff --git a/packages/core/test/Validation.test.ts b/packages/core/test/Validation.test.ts index a19422a090..04ad3c134e 100644 --- a/packages/core/test/Validation.test.ts +++ b/packages/core/test/Validation.test.ts @@ -1,4 +1,5 @@ import type { IDataObject, INode, INodeType } from 'n8n-workflow'; + import { validateValueAgainstSchema } from '@/NodeExecuteFunctions'; describe('Validation', () => { diff --git a/packages/core/test/WorkflowExecute.test.ts b/packages/core/test/WorkflowExecute.test.ts index d14a4e3fd1..6d1927fb88 100644 --- a/packages/core/test/WorkflowExecute.test.ts +++ b/packages/core/test/WorkflowExecute.test.ts @@ -5,6 +5,7 @@ import { NodeExecutionOutput, Workflow, } from 'n8n-workflow'; + import { WorkflowExecute } from '@/WorkflowExecute'; import * as Helpers from './helpers'; diff --git a/packages/core/test/WorkflowExecutionMetadata.test.ts b/packages/core/test/WorkflowExecutionMetadata.test.ts index cdb50c9737..63d0892e6a 100644 --- a/packages/core/test/WorkflowExecutionMetadata.test.ts +++ b/packages/core/test/WorkflowExecutionMetadata.test.ts @@ -1,3 +1,6 @@ +import type { IRunExecutionData } from 'n8n-workflow'; + +import { InvalidExecutionMetadataError } from '@/errors/invalid-execution-metadata.error'; import { setWorkflowExecutionMetadata, setAllWorkflowExecutionMetadata, @@ -5,8 +8,6 @@ import { getWorkflowExecutionMetadata, getAllWorkflowExecutionMetadata, } from '@/ExecutionMetadata'; -import { InvalidExecutionMetadataError } from '@/errors/invalid-execution-metadata.error'; -import type { IRunExecutionData } from 'n8n-workflow'; describe('Execution Metadata functions', () => { test('setWorkflowExecutionMetadata will set a value', () => { diff --git a/packages/core/test/helpers/constants.ts b/packages/core/test/helpers/constants.ts index 70819478ad..f3de1c667c 100644 --- a/packages/core/test/helpers/constants.ts +++ b/packages/core/test/helpers/constants.ts @@ -5,6 +5,7 @@ import type { WorkflowTestData, } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; + import { If } from '../../../nodes-base/dist/nodes/If/If.node'; import { Merge } from '../../../nodes-base/dist/nodes/Merge/Merge.node'; import { NoOp } from '../../../nodes-base/dist/nodes/NoOp/NoOp.node'; diff --git a/packages/core/test/helpers/index.ts b/packages/core/test/helpers/index.ts index 5f935ef850..5f0858ea41 100644 --- a/packages/core/test/helpers/index.ts +++ b/packages/core/test/helpers/index.ts @@ -1,8 +1,5 @@ -import path from 'path'; import { readdirSync, readFileSync } from 'fs'; - -const BASE_DIR = path.resolve(__dirname, '../../..'); - +import { mock } from 'jest-mock-extended'; import type { IDataObject, IDeferredPromise, @@ -17,11 +14,12 @@ import type { WorkflowTestData, INodeTypeData, } from 'n8n-workflow'; - import { ApplicationError, NodeHelpers, WorkflowHooks } from 'n8n-workflow'; +import path from 'path'; import { predefinedNodesTypes } from './constants'; -import { mock } from 'jest-mock-extended'; + +const BASE_DIR = path.resolve(__dirname, '../../..'); class NodeTypesClass implements INodeTypes { constructor(private nodeTypes: INodeTypeData = predefinedNodesTypes) {} diff --git a/packages/core/test/utils.ts b/packages/core/test/utils.ts index 8895875240..7f4862cabd 100644 --- a/packages/core/test/utils.ts +++ b/packages/core/test/utils.ts @@ -1,8 +1,8 @@ -import { Container } from 'typedi'; import { mock } from 'jest-mock-extended'; import { Duplex } from 'stream'; - import type { DeepPartial } from 'ts-essentials'; +import { Container } from 'typedi'; + import type { Class } from '@/Interfaces'; export const mockInstance = ( diff --git a/packages/design-system/package.json b/packages/design-system/package.json index 526a9e43c4..36fcf31528 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "1.51.0", + "version": "1.53.0", "main": "src/main.ts", "import": "src/main.ts", "scripts": { diff --git a/packages/design-system/src/__tests__/render.ts b/packages/design-system/src/__tests__/render.ts new file mode 100644 index 0000000000..afe27e6855 --- /dev/null +++ b/packages/design-system/src/__tests__/render.ts @@ -0,0 +1,20 @@ +import { render } from '@testing-library/vue'; + +import { N8nPlugin } from 'n8n-design-system/plugin'; + +type Component = Parameters[0]; +type RenderOptions = Parameters[1]; + +export const createComponentRenderer = (component: Component) => (options: RenderOptions) => { + const mergedOptions: RenderOptions = { + ...options, + global: { + ...(options?.global ?? {}), + stubs: { + ...(options?.global?.stubs ?? {}), + }, + plugins: [N8nPlugin, ...(options?.global?.plugins ?? [])], + }, + }; + return render(component, mergedOptions); +}; diff --git a/packages/design-system/src/__tests__/setup.ts b/packages/design-system/src/__tests__/setup.ts index 3ad85f014a..6eb1c426fc 100644 --- a/packages/design-system/src/__tests__/setup.ts +++ b/packages/design-system/src/__tests__/setup.ts @@ -4,3 +4,11 @@ import { config } from '@vue/test-utils'; import { N8nPlugin } from 'n8n-design-system/plugin'; config.global.plugins = [N8nPlugin]; + +window.ResizeObserver = + window.ResizeObserver || + vi.fn().mockImplementation(() => ({ + disconnect: vi.fn(), + observe: vi.fn(), + unobserve: vi.fn(), + })); diff --git a/packages/design-system/src/components/AskAssistantChat/__tests__/AskAssistantChat.spec.ts b/packages/design-system/src/components/AskAssistantChat/__tests__/AskAssistantChat.spec.ts index 8977df997a..b4b7d281a5 100644 --- a/packages/design-system/src/components/AskAssistantChat/__tests__/AskAssistantChat.spec.ts +++ b/packages/design-system/src/components/AskAssistantChat/__tests__/AskAssistantChat.spec.ts @@ -4,21 +4,26 @@ import { n8nHtml } from 'n8n-design-system/directives'; import AskAssistantChat from '../AskAssistantChat.vue'; +const stubs = ['n8n-avatar', 'n8n-button', 'n8n-icon', 'n8n-icon-button']; + describe('AskAssistantChat', () => { it('renders default placeholder chat correctly', () => { const { container } = render(AskAssistantChat, { props: { user: { firstName: 'Kobi', lastName: 'Dog' }, }, + global: { stubs }, }); expect(container).toMatchSnapshot(); }); + it('renders chat with messages correctly', () => { const { container } = render(AskAssistantChat, { global: { directives: { n8nHtml, }, + stubs, }, props: { user: { firstName: 'Kobi', lastName: 'Dog' }, @@ -92,12 +97,14 @@ describe('AskAssistantChat', () => { }); expect(container).toMatchSnapshot(); }); + it('renders streaming chat correctly', () => { const { container } = render(AskAssistantChat, { global: { directives: { n8nHtml, }, + stubs, }, props: { user: { firstName: 'Kobi', lastName: 'Dog' }, @@ -116,12 +123,14 @@ describe('AskAssistantChat', () => { }); expect(container).toMatchSnapshot(); }); + it('renders end of session chat correctly', () => { const { container } = render(AskAssistantChat, { global: { directives: { n8nHtml, }, + stubs, }, props: { user: { firstName: 'Kobi', lastName: 'Dog' }, @@ -146,12 +155,14 @@ describe('AskAssistantChat', () => { }); expect(container).toMatchSnapshot(); }); + it('renders message with code snippet', () => { const { container } = render(AskAssistantChat, { global: { directives: { n8nHtml, }, + stubs, }, props: { user: { firstName: 'Kobi', lastName: 'Dog' }, diff --git a/packages/design-system/src/components/AskAssistantChat/__tests__/__snapshots__/AskAssistantChat.spec.ts.snap b/packages/design-system/src/components/AskAssistantChat/__tests__/__snapshots__/AskAssistantChat.spec.ts.snap index b3cfeff42a..79b57d2d17 100644 --- a/packages/design-system/src/components/AskAssistantChat/__tests__/__snapshots__/AskAssistantChat.spec.ts.snap +++ b/packages/design-system/src/components/AskAssistantChat/__tests__/__snapshots__/AskAssistantChat.spec.ts.snap @@ -64,7 +64,7 @@ exports[`AskAssistantChat > renders chat with messages correctly 1`] = ` class="back" data-test-id="close-chat-button" > - @@ -400,16 +400,14 @@ exports[`AskAssistantChat > renders chat with messages correctly 1`] = `
- - Replace my code - + />
@@ -422,7 +420,7 @@ exports[`AskAssistantChat > renders chat with messages correctly 1`] = `
- - - Replace my code - + />
@@ -708,22 +704,18 @@ Testing more code
- - Give me another solution - + />
- - All good - + />
@@ -742,7 +734,7 @@ Testing more code rows="1" wrap="hard" /> - renders default placeholder chat correctly 1`] = ` class="back" data-test-id="close-chat-button" > - @@ -916,7 +908,7 @@ exports[`AskAssistantChat > renders default placeholder chat correctly 1`] = ` rows="1" wrap="hard" /> - renders end of session chat correctly 1`] = ` class="back" data-test-id="close-chat-button" > - @@ -1161,7 +1153,7 @@ exports[`AskAssistantChat > renders end of session chat correctly 1`] = ` rows="1" wrap="hard" /> - renders message with code snippet 1`] = ` class="back" data-test-id="close-chat-button" > - @@ -1405,7 +1397,7 @@ catch(e) { rows="1" wrap="hard" /> - renders streaming chat correctly 1`] = ` class="back" data-test-id="close-chat-button" > - @@ -1581,7 +1573,7 @@ exports[`AskAssistantChat > renders streaming chat correctly 1`] = ` rows="1" wrap="hard" /> - { it('renders code diff correctly', () => { const { container } = render(CodeDiff, { @@ -10,9 +12,11 @@ describe('CodeDiff', () => { content: "--- original.js\n+++ modified.js\n@@ -1,2 +1,2 @@\n-const SIGNING_SECRET = $input.first().json.slack_secret_signature;\n-const item = $('Webhook to call for Slack command').first();\n+const SIGNING_SECRET = items[0].json.slack_secret_signature;\n+const item = items[0];\n@@ -7,8 +7,6 @@\n}\n\n-const crypto = require('crypto');\n-\n const { binary: { data } } = item;\n\n if (\n@@ -22,7 +20,7 @@\n const rawBody = Buffer.from(data.data, 'base64').toString()\n \n // compute the ", }, + global: { stubs }, }); expect(container).toMatchSnapshot(); }); + it('renders replaced code diff correctly', () => { const { container } = render(CodeDiff, { props: { @@ -21,9 +25,11 @@ describe('CodeDiff', () => { '@@ -1,7 +1,6 @@\n-The Way that can be told of is not the eternal Way;\n-The name that can be named is not the eternal name.\nThe Nameless is the origin of Heaven and Earth;\n-The Named is the mother of all things.\n+The named is the mother of all things.\n+\nTherefore let there always be non-being,\nso we may see their subtlety,\nAnd let there always be being,\n@@ -9,3 +8,6 @@\n The two are the same,\n But after they are produced,\n they have different names.\n+They both may be called deep and profound.\n+Deeper and more profound,\n+The door of all subtleties!', replaced: true, }, + global: { stubs }, }); expect(container).toMatchSnapshot(); }); + it('renders replacing code diff correctly', () => { const { container } = render(CodeDiff, { props: { @@ -32,9 +38,11 @@ describe('CodeDiff', () => { '@@ -1,7 +1,6 @@\n-The Way that can be told of is not the eternal Way;\n-The name that can be named is not the eternal name.\nThe Nameless is the origin of Heaven and Earth;\n-The Named is the mother of all things.\n+The named is the mother of all things.\n+\nTherefore let there always be non-being,\nso we may see their subtlety,\nAnd let there always be being,\n@@ -9,3 +8,6 @@\n The two are the same,\n But after they are produced,\n they have different names.\n+They both may be called deep and profound.\n+Deeper and more profound,\n+The door of all subtleties!', replacing: true, }, + global: { stubs }, }); expect(container).toMatchSnapshot(); }); + it('renders error state correctly', () => { const { container } = render(CodeDiff, { props: { @@ -43,6 +51,7 @@ describe('CodeDiff', () => { '@@ -1,7 +1,6 @@\n-The Way that can be told of is not the eternal Way;\n-The name that can be named is not the eternal name.\nThe Nameless is the origin of Heaven and Earth;\n-The Named is the mother of all things.\n+The named is the mother of all things.\n+\nTherefore let there always be non-being,\nso we may see their subtlety,\nAnd let there always be being,\n@@ -9,3 +8,6 @@\n The two are the same,\n But after they are produced,\n they have different names.\n+They both may be called deep and profound.\n+Deeper and more profound,\n+The door of all subtleties!', error: true, }, + global: { stubs }, }); expect(container).toMatchSnapshot(); }); diff --git a/packages/design-system/src/components/CodeDiff/__tests__/__snapshots__/CodeDiff.spec.ts.snap b/packages/design-system/src/components/CodeDiff/__tests__/__snapshots__/CodeDiff.spec.ts.snap index 97fc21bf5a..0810f24767 100644 --- a/packages/design-system/src/components/CodeDiff/__tests__/__snapshots__/CodeDiff.spec.ts.snap +++ b/packages/design-system/src/components/CodeDiff/__tests__/__snapshots__/CodeDiff.spec.ts.snap @@ -270,16 +270,14 @@ exports[`CodeDiff > renders code diff correctly 1`] = `
- - Replace my code - + />
@@ -535,7 +533,7 @@ exports[`CodeDiff > renders error state correctly 1`] = ` class="actions" >
- renders replaced code diff correctly 1`] = ` class="actions" >
- - Undo - - + renders replacing code diff correctly 1`] = `
- - Replacing... - + />
diff --git a/packages/design-system/src/components/N8nFormBox/__tests__/FormBox.test.ts b/packages/design-system/src/components/N8nFormBox/__tests__/FormBox.test.ts new file mode 100644 index 0000000000..a309e1aa40 --- /dev/null +++ b/packages/design-system/src/components/N8nFormBox/__tests__/FormBox.test.ts @@ -0,0 +1,57 @@ +import { createComponentRenderer } from '../../../__tests__/render'; +import FormBox from '../FormBox.vue'; + +const render = createComponentRenderer(FormBox); + +describe('FormBox', () => { + it('should render the component', () => { + const { container } = render({ + props: { + title: 'Title', + inputs: [ + { + name: 'name', + properties: { + label: 'Name', + type: 'text', + required: true, + showRequiredAsterisk: true, + validateOnBlur: false, + autocomplete: 'email', + capitalize: true, + labelSize: 'small', + tagSize: 'small', + }, + }, + { + name: 'email', + properties: { + label: 'Email', + type: 'email', + required: true, + showRequiredAsterisk: true, + validateOnBlur: false, + autocomplete: 'email', + capitalize: true, + labelSize: 'medium', + tagSize: 'medium', + }, + }, + { + name: 'password', + properties: { + label: 'Password', + type: 'password', + required: true, + showRequiredAsterisk: true, + validateOnBlur: false, + autocomplete: 'current-password', + capitalize: true, + }, + }, + ], + }, + }); + expect(container).toMatchSnapshot(); + }); +}); diff --git a/packages/design-system/src/components/N8nFormBox/__tests__/__snapshots__/FormBox.test.ts.snap b/packages/design-system/src/components/N8nFormBox/__tests__/__snapshots__/FormBox.test.ts.snap new file mode 100644 index 0000000000..8138b44b8c --- /dev/null +++ b/packages/design-system/src/components/N8nFormBox/__tests__/__snapshots__/FormBox.test.ts.snap @@ -0,0 +1,259 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`FormBox > should render the component 1`] = ` +
+
+
+ + + Title + + +
+
+
+ +
+ +
+
+ + +
+
+ + + + +
+ + + + + +
+ + + +
+
+ + +
+
+
+
+ + +
+
+ + + + +
+ + + + + +
+ + + +
+
+ + +
+
+
+
+ + +
+
+ + + + +
+ + + + + +
+ + + +
+
+ + +
+
+ +
+ +
+
+ +
+ +
+ + +
+
+`; diff --git a/packages/design-system/src/components/N8nFormInput/FormInput.vue b/packages/design-system/src/components/N8nFormInput/FormInput.vue index b1fbd4b6e3..b50868a303 100644 --- a/packages/design-system/src/components/N8nFormInput/FormInput.vue +++ b/packages/design-system/src/components/N8nFormInput/FormInput.vue @@ -49,7 +49,7 @@ export interface Props { inactiveLabel?: string; inactiveColor?: string; teleported?: boolean; - tagSize?: 'small' | 'medium'; + tagSize?: 'small' | 'medium' | 'large'; } const props = withDefaults(defineProps(), { @@ -59,7 +59,7 @@ const props = withDefaults(defineProps(), { showRequiredAsterisk: true, validateOnBlur: true, teleported: true, - tagSize: 'small', + tagSize: 'large', }); const emit = defineEmits<{ diff --git a/packages/design-system/src/components/N8nFormInputs/FormInputs.vue b/packages/design-system/src/components/N8nFormInputs/FormInputs.vue index 8a27b28137..261663de32 100644 --- a/packages/design-system/src/components/N8nFormInputs/FormInputs.vue +++ b/packages/design-system/src/components/N8nFormInputs/FormInputs.vue @@ -13,7 +13,6 @@ export type FormInputsProps = { columnView?: boolean; verticalSpacing?: '' | 'xs' | 's' | 'm' | 'l' | 'xl'; teleported?: boolean; - tagSize?: 'small' | 'medium'; }; type Value = string | number | boolean | null | undefined; @@ -24,7 +23,6 @@ const props = withDefaults(defineProps(), { columnView: false, verticalSpacing: '', teleported: true, - tagSize: 'small', }); const emit = defineEmits<{ @@ -129,7 +127,6 @@ onMounted(() => { :data-test-id="input.name" :show-validation-warnings="showValidationWarnings" :teleported="teleported" - :tag-size="tagSize" @update:model-value="(value: Value) => onUpdateModelValue(input.name, value)" @validate="(value: boolean) => onValidate(input.name, value)" @enter="onSubmit" diff --git a/packages/design-system/src/components/N8nInput/Input.vue b/packages/design-system/src/components/N8nInput/Input.vue index 39cda6ce30..776b0bf7c8 100644 --- a/packages/design-system/src/components/N8nInput/Input.vue +++ b/packages/design-system/src/components/N8nInput/Input.vue @@ -39,7 +39,7 @@ const props = withDefaults(defineProps(), { }); const resolvedSize = computed( - () => (props.size === 'xlarge' ? undefined : props.size) as ElementPlusSizePropType, + () => (props.size === 'medium' ? 'default' : props.size) as ElementPlusSizePropType, ); const classes = computed(() => { diff --git a/packages/design-system/src/components/N8nInputLabel/InputLabel.vue b/packages/design-system/src/components/N8nInputLabel/InputLabel.vue index 4a89233bf1..e522579713 100644 --- a/packages/design-system/src/components/N8nInputLabel/InputLabel.vue +++ b/packages/design-system/src/components/N8nInputLabel/InputLabel.vue @@ -33,7 +33,14 @@ const addTargetBlank = (html: string) =>