diff --git a/CHANGELOG.md b/CHANGELOG.md index 219c7b1726..7c2f569bf6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,55 @@ +## [1.62.1](https://github.com/n8n-io/n8n/compare/n8n@1.61.0...n8n@1.62.1) (2024-10-02) + + +### Bug Fixes + +* **AI Agent Node:** Fix output parsing and empty tool input handling in AI Agent node ([#10970](https://github.com/n8n-io/n8n/issues/10970)) ([3a65bdc](https://github.com/n8n-io/n8n/commit/3a65bdc1f522932d463b4da0e67d29076887d06c)) +* **API:** Fix workflow project transfer ([#10651](https://github.com/n8n-io/n8n/issues/10651)) ([5f89e3a](https://github.com/n8n-io/n8n/commit/5f89e3a01c1bbb3589ff0464fd5bc991426f55dc)) +* **AwsS3 Node:** Fix search only using first input parameters ([#10998](https://github.com/n8n-io/n8n/issues/10998)) ([846cfde](https://github.com/n8n-io/n8n/commit/846cfde8dcaf7bf80f0a4ca7d65fc2a7b61d0e23)) +* **Chat Trigger Node:** Fix Allowed Origins paramter ([#11011](https://github.com/n8n-io/n8n/issues/11011)) ([b5f4afe](https://github.com/n8n-io/n8n/commit/b5f4afe12ec77f527080a4b7f812e12f9f73f8df)) +* **core:** Fix ownerless project case in statistics service ([#11051](https://github.com/n8n-io/n8n/issues/11051)) ([bdaadf1](https://github.com/n8n-io/n8n/commit/bdaadf10e058e2c0b1141289189d6526c030a2ca)) +* **core:** Handle Redis disconnects gracefully ([#11007](https://github.com/n8n-io/n8n/issues/11007)) ([cd91648](https://github.com/n8n-io/n8n/commit/cd916480c2d2b55f2215c72309dc432340fc3f30)) +* **core:** Prevent backend from loading duplicate copies of nodes packages ([#10979](https://github.com/n8n-io/n8n/issues/10979)) ([4584f22](https://github.com/n8n-io/n8n/commit/4584f22a9b16883779d8555cda309fd8bd113f6c)) +* **core:** Upgrade @n8n/typeorm to address a rare mutex release issue ([#10993](https://github.com/n8n-io/n8n/issues/10993)) ([2af0fbf](https://github.com/n8n-io/n8n/commit/2af0fbf52f0b404697f5148f81ad0035c9ffb6b9)) +* **editor:** Allow resources to move between personal and team projects ([#10683](https://github.com/n8n-io/n8n/issues/10683)) ([136d491](https://github.com/n8n-io/n8n/commit/136d49132567558b7d27069c857c0e0bfee70ce2)) +* **editor:** Color scheme for a markdown code blocks in dark mode ([#11008](https://github.com/n8n-io/n8n/issues/11008)) ([b20d2eb](https://github.com/n8n-io/n8n/commit/b20d2eb403f71fe1dc21c92df118adcebef51ffe)) +* **editor:** Fix filter execution by "Queued" ([#10987](https://github.com/n8n-io/n8n/issues/10987)) ([819d20f](https://github.com/n8n-io/n8n/commit/819d20fa2eee314b88a7ce1c4db632afac514704)) +* **editor:** Fix performance issue in credentials list ([#10988](https://github.com/n8n-io/n8n/issues/10988)) ([7073ec6](https://github.com/n8n-io/n8n/commit/7073ec6fe5384cc8c50dcb242212999a1fbc9041)) +* **editor:** Fix schema view pill highlighting ([#10936](https://github.com/n8n-io/n8n/issues/10936)) ([1b973dc](https://github.com/n8n-io/n8n/commit/1b973dcd8dbce598e6ada490fd48fad52f7b4f3a)) +* **editor:** Fix workflow executions list page redirection ([#10981](https://github.com/n8n-io/n8n/issues/10981)) ([fe7d060](https://github.com/n8n-io/n8n/commit/fe7d0605681dc963f5e5d1607f9d40c5173e0f9f)) +* **editor:** Format action names properly when action is not defined ([#11030](https://github.com/n8n-io/n8n/issues/11030)) ([9c43fb3](https://github.com/n8n-io/n8n/commit/9c43fb301d1ccb82e42f46833e19587289803cd3)) +* **Elasticsearch Node:** Fix issue with self signed certificates not working ([#10954](https://github.com/n8n-io/n8n/issues/10954)) ([79622b5](https://github.com/n8n-io/n8n/commit/79622b5f267f2a4a53f3eb48e228939d6e3a9caa)) +* **Facebook Lead Ads Trigger Node:** Pagination fix in RLC ([#10956](https://github.com/n8n-io/n8n/issues/10956)) ([6322372](https://github.com/n8n-io/n8n/commit/632237261087ada0177b67922f9f48ca02ef1d9e)) +* **Github Document Loader Node:** Pass through apiUrl from credentials & fix log output ([#11049](https://github.com/n8n-io/n8n/issues/11049)) ([a7af981](https://github.com/n8n-io/n8n/commit/a7af98183c47a5e215869c8269729b0fb2f318b5)) +* **Google Sheets Node:** Updating on row_number using automatic matching ([#10940](https://github.com/n8n-io/n8n/issues/10940)) ([ed91495](https://github.com/n8n-io/n8n/commit/ed91495ebc1e09b89533ffef4b775eaa0139f365)) +* **HTTP Request Tool Node:** Remove default user agent header ([#10971](https://github.com/n8n-io/n8n/issues/10971)) ([5a99e93](https://github.com/n8n-io/n8n/commit/5a99e93f8d2c66d7dbcef382478badd63bc4a0b5)) +* **Postgres Node:** Falsy query parameters ignored ([#10960](https://github.com/n8n-io/n8n/issues/10960)) ([4a63cff](https://github.com/n8n-io/n8n/commit/4a63cff5ec722c810e3ff2bd7b0bb1e32f7f403b)) +* **Respond to Webhook Node:** Node does not work with Wait node ([#10992](https://github.com/n8n-io/n8n/issues/10992)) ([2df5a5b](https://github.com/n8n-io/n8n/commit/2df5a5b649f8ba3b747782d6d5045820aa74955d)) +* **RSS Feed Trigger Node:** Fix regression on missing timestamps ([#10991](https://github.com/n8n-io/n8n/issues/10991)) ([d2bc076](https://github.com/n8n-io/n8n/commit/d2bc0760e2b5c977fcc683f0a0281f099a9c538d)) +* **Supabase Node:** Fix issue with delete not always working ([#10952](https://github.com/n8n-io/n8n/issues/10952)) ([1944b46](https://github.com/n8n-io/n8n/commit/1944b46fd472bb59552b5fbf7783168a622a2bd2)) +* **Text Classifier Node:** Default system prompt template ([#11018](https://github.com/n8n-io/n8n/issues/11018)) ([77fec19](https://github.com/n8n-io/n8n/commit/77fec195d92e0fe23c60552a72e8c030cf7e5e5c)) +* **Todoist Node:** Fix listSearch filter bug in Todoist Node ([#10989](https://github.com/n8n-io/n8n/issues/10989)) ([c4b3272](https://github.com/n8n-io/n8n/commit/c4b327248d7aa1352e8d6acec5627ff406aea3d4)) +* **Todoist Node:** Make Section Name optional in Move Task operation ([#10732](https://github.com/n8n-io/n8n/issues/10732)) ([799006a](https://github.com/n8n-io/n8n/commit/799006a3cce6abe210469c839ae392d0c1aec486)) + + +### Features + +* Add more context to support chat ([#11014](https://github.com/n8n-io/n8n/issues/11014)) ([8a30f92](https://github.com/n8n-io/n8n/commit/8a30f92156d6a4fe73113bd3cdfb751b8c9ce4b4)) +* Add Sysdig API credentials for SecOps ([#7033](https://github.com/n8n-io/n8n/issues/7033)) ([a8d1a1e](https://github.com/n8n-io/n8n/commit/a8d1a1ea854fb2c69643b0a5738440b389121ca3)) +* **core:** Filter executions by project ID in internal API ([#10976](https://github.com/n8n-io/n8n/issues/10976)) ([06d749f](https://github.com/n8n-io/n8n/commit/06d749ffa7ced503141d8b07e22c47d971eb1623)) +* **core:** Implement Dynamic Parameters within regular nodes used as AI Tools ([#10862](https://github.com/n8n-io/n8n/issues/10862)) ([ef5b7cf](https://github.com/n8n-io/n8n/commit/ef5b7cf9b77b653111eb5b1d9de8116c9f6b9f92)) +* **editor:** Do not show error for remote options when credentials aren't specified ([#10944](https://github.com/n8n-io/n8n/issues/10944)) ([9fc3699](https://github.com/n8n-io/n8n/commit/9fc3699beb0c150909889ed17740a5cd9e0461c3)) +* **editor:** Enable drag and drop in code editors (Code/SQL/HTML) ([#10888](https://github.com/n8n-io/n8n/issues/10888)) ([af9e227](https://github.com/n8n-io/n8n/commit/af9e227ad4848995b9d82c72f814dbf9d1de506f)) +* **editor:** Overhaul document title management ([#10999](https://github.com/n8n-io/n8n/issues/10999)) ([bb28956](https://github.com/n8n-io/n8n/commit/bb2895689fb006897bc244271aca6f0bfa1839b9)) +* **editor:** Remove execution annotation feature flag ([#11020](https://github.com/n8n-io/n8n/issues/11020)) ([e7199db](https://github.com/n8n-io/n8n/commit/e7199dbfccdbdf1c4273f916e3006ca610c230e9)) +* **editor:** Support node-creator actions for vector store nodes ([#11032](https://github.com/n8n-io/n8n/issues/11032)) ([72b70d9](https://github.com/n8n-io/n8n/commit/72b70d9d98daeba654baf6785ff1ae234c73c977)) +* **Google BigQuery Node:** Return numeric values as integers ([#10943](https://github.com/n8n-io/n8n/issues/10943)) ([d7c1d24](https://github.com/n8n-io/n8n/commit/d7c1d24f74648740b2f425640909037ba06c5030)) +* **Invoice Ninja Node:** Add more query params to getAll requests ([#9238](https://github.com/n8n-io/n8n/issues/9238)) ([50b7238](https://github.com/n8n-io/n8n/commit/50b723836e70bbe405594f690b73057f9c33fbe4)) +* **Iterable Node:** Add support for EDC and USDC selection ([#10908](https://github.com/n8n-io/n8n/issues/10908)) ([0ca9c07](https://github.com/n8n-io/n8n/commit/0ca9c076ca51d313392e45c3b013f2e83aaea843)) +* **Question and Answer Chain Node:** Customize question and answer system prompt ([#10385](https://github.com/n8n-io/n8n/issues/10385)) ([08a27b3](https://github.com/n8n-io/n8n/commit/08a27b3148aac2282f64339ddc33ac7c90835d84)) + + + # [1.61.0](https://github.com/n8n-io/n8n/compare/n8n@1.60.0...n8n@1.61.0) (2024-09-25) diff --git a/cypress/composables/projects.ts b/cypress/composables/projects.ts index 84379088d1..da9c6fcc65 100644 --- a/cypress/composables/projects.ts +++ b/cypress/composables/projects.ts @@ -32,8 +32,6 @@ export const addProjectMember = (email: string, role?: string) => { } }; export const getResourceMoveModal = () => cy.getByTestId('project-move-resource-modal'); -export const getResourceMoveConfirmModal = () => - cy.getByTestId('project-move-resource-confirm-modal'); export const getProjectMoveSelect = () => cy.getByTestId('project-move-resource-modal-select'); export function createProject(name: string) { diff --git a/cypress/composables/workflow.ts b/cypress/composables/workflow.ts index 8d37d5f2ad..394a35af18 100644 --- a/cypress/composables/workflow.ts +++ b/cypress/composables/workflow.ts @@ -144,6 +144,12 @@ export function addToolNodeToParent(nodeName: string, parentNodeName: string) { export function addOutputParserNodeToParent(nodeName: string, parentNodeName: string) { addSupplementalNodeToParent(nodeName, 'ai_outputParser', parentNodeName); } +export function addVectorStoreNodeToParent(nodeName: string, parentNodeName: string) { + addSupplementalNodeToParent(nodeName, 'ai_vectorStore', parentNodeName); +} +export function addRetrieverNodeToParent(nodeName: string, parentNodeName: string) { + addSupplementalNodeToParent(nodeName, 'ai_retriever', parentNodeName); +} export function clickExecuteWorkflowButton() { getExecuteWorkflowButton().click(); diff --git a/cypress/e2e/1-workflows.cy.ts b/cypress/e2e/1-workflows.cy.ts index 6835346012..a6683bbee4 100644 --- a/cypress/e2e/1-workflows.cy.ts +++ b/cypress/e2e/1-workflows.cy.ts @@ -73,4 +73,28 @@ describe('Workflows', () => { WorkflowsPage.getters.newWorkflowButtonCard().should('be.visible'); }); + + it('should respect tag querystring filter when listing workflows', () => { + WorkflowsPage.getters.newWorkflowButtonCard().click(); + + cy.createFixtureWorkflow('Test_workflow_2.json', getUniqueWorkflowName('My New Workflow')); + + cy.visit(WorkflowsPage.url); + + WorkflowsPage.getters.createWorkflowButton().click(); + + cy.createFixtureWorkflow('Test_workflow_1.json', 'Empty State Card Workflow'); + + cy.visit(WorkflowsPage.url); + + WorkflowsPage.getters.workflowFilterButton().click(); + + WorkflowsPage.getters.workflowTagsDropdown().click(); + + WorkflowsPage.getters.workflowTagItem('some-tag-1').click(); + + cy.reload(); + + WorkflowsPage.getters.workflowCards().should('have.length', 1); + }); }); diff --git a/cypress/e2e/26-resource-locator.cy.ts b/cypress/e2e/26-resource-locator.cy.ts index 6e431690ad..124e322c2b 100644 --- a/cypress/e2e/26-resource-locator.cy.ts +++ b/cypress/e2e/26-resource-locator.cy.ts @@ -65,7 +65,7 @@ describe('Resource Locator', () => { }); it('should show appropriate errors when search filter is required', () => { - workflowPage.actions.addNodeToCanvas('Github', true, true, 'On Pull Request'); + workflowPage.actions.addNodeToCanvas('Github', true, true, 'On pull request'); ndv.getters.resourceLocator('owner').should('be.visible'); ndv.getters.resourceLocatorInput('owner').click(); ndv.getters.resourceLocatorErrorMessage().should('contain', NO_CREDENTIALS_MESSAGE); diff --git a/cypress/e2e/39-projects.cy.ts b/cypress/e2e/39-projects.cy.ts index 59ed6bcb84..4e3bb583df 100644 --- a/cypress/e2e/39-projects.cy.ts +++ b/cypress/e2e/39-projects.cy.ts @@ -1,5 +1,11 @@ import * as projects from '../composables/projects'; -import { INSTANCE_MEMBERS, MANUAL_TRIGGER_NODE_NAME, NOTION_NODE_NAME } from '../constants'; +import { + INSTANCE_ADMIN, + INSTANCE_MEMBERS, + INSTANCE_OWNER, + MANUAL_TRIGGER_NODE_NAME, + NOTION_NODE_NAME, +} from '../constants'; import { WorkflowsPage, WorkflowPage, @@ -481,44 +487,15 @@ describe('Projects', { disableAutoLogin: true }, () => { projects .getResourceMoveModal() .should('be.visible') - .find('button:contains("Next")') + .find('button:contains("Move workflow")') .should('be.disabled'); projects.getProjectMoveSelect().click(); getVisibleSelect() .find('li') - .should('have.length', 2) - .first() - .should('contain.text', 'Project 1') - .click(); - projects.getResourceMoveModal().find('button:contains("Next")').click(); - - projects - .getResourceMoveConfirmModal() - .should('be.visible') - .find('button:contains("Confirm")') - .should('be.disabled'); - - projects - .getResourceMoveConfirmModal() - .find('input[type="checkbox"]') - .first() - .parents('label') - .click(); - projects - .getResourceMoveConfirmModal() - .find('button:contains("Confirm")') - .should('be.disabled'); - projects - .getResourceMoveConfirmModal() - .find('input[type="checkbox"]') - .last() - .parents('label') - .click(); - projects - .getResourceMoveConfirmModal() - .find('button:contains("Confirm")') - .should('not.be.disabled') + .should('have.length', 5) + .filter(':contains("Project 1")') .click(); + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); workflowsPage.getters .workflowCards() @@ -526,9 +503,77 @@ describe('Projects', { disableAutoLogin: true }, () => { .filter(':contains("Owned by me")') .should('not.exist'); - // Move the credential from Project 1 to Project 2 + // Move the workflow from Project 1 to Project 2 projects.getMenuItems().first().click(); workflowsPage.getters.workflowCards().should('have.length', 2); + workflowsPage.getters.workflowCardActions('Workflow in Home project').click(); + workflowsPage.getters.workflowMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move workflow")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(':contains("Project 2")') + .click(); + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); + + // Move the workflow from Project 2 to a member user + projects.getMenuItems().last().click(); + workflowsPage.getters.workflowCards().should('have.length', 2); + workflowsPage.getters.workflowCardActions('Workflow in Home project').click(); + workflowsPage.getters.workflowMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move workflow")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(`:contains("${INSTANCE_MEMBERS[0].email}")`) + .click(); + + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); + workflowsPage.getters.workflowCards().should('have.length', 1); + + // Move the workflow from member user back to Home + projects.getHomeButton().click(); + workflowsPage.getters + .workflowCards() + .should('have.length', 3) + .filter(':has(.n8n-badge:contains("Project"))') + .should('have.length', 2); + workflowsPage.getters.workflowCardActions('Workflow in Home project').click(); + workflowsPage.getters.workflowMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move workflow")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(`:contains("${INSTANCE_OWNER.email}")`) + .click(); + + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); + workflowsPage.getters + .workflowCards() + .should('have.length', 3) + .filter(':contains("Owned by me")') + .should('have.length', 1); + + // Move the credential from Project 1 to Project 2 + projects.getMenuItems().first().click(); projects.getProjectTabCredentials().click(); credentialsPage.getters.credentialCards().should('have.length', 1); credentialsPage.getters.credentialCardActions('Credential in Project 1').click(); @@ -537,48 +582,162 @@ describe('Projects', { disableAutoLogin: true }, () => { projects .getResourceMoveModal() .should('be.visible') - .find('button:contains("Next")') + .find('button:contains("Move credential")') .should('be.disabled'); projects.getProjectMoveSelect().click(); getVisibleSelect() .find('li') - .should('have.length', 1) - .first() - .should('contain.text', 'Project 2') + .should('have.length', 5) + .filter(':contains("Project 2")') .click(); - projects.getResourceMoveModal().find('button:contains("Next")').click(); + projects.getResourceMoveModal().find('button:contains("Move credential")').click(); - projects - .getResourceMoveConfirmModal() - .should('be.visible') - .find('button:contains("Confirm")') - .should('be.disabled'); - - projects - .getResourceMoveConfirmModal() - .find('input[type="checkbox"]') - .first() - .parents('label') - .click(); - projects - .getResourceMoveConfirmModal() - .find('button:contains("Confirm")') - .should('be.disabled'); - projects - .getResourceMoveConfirmModal() - .find('input[type="checkbox"]') - .last() - .parents('label') - .click(); - projects - .getResourceMoveConfirmModal() - .find('button:contains("Confirm")') - .should('not.be.disabled') - .click(); credentialsPage.getters.credentialCards().should('not.have.length'); + + // Move the credential from Project 2 to admin user projects.getMenuItems().last().click(); projects.getProjectTabCredentials().click(); credentialsPage.getters.credentialCards().should('have.length', 2); + + credentialsPage.getters.credentialCardActions('Credential in Project 1').click(); + credentialsPage.getters.credentialMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move credential")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(`:contains("${INSTANCE_ADMIN.email}")`) + .click(); + projects.getResourceMoveModal().find('button:contains("Move credential")').click(); + credentialsPage.getters.credentialCards().should('have.length', 1); + + // Move the credential from admin user back to instance owner + projects.getHomeButton().click(); + projects.getProjectTabCredentials().click(); + credentialsPage.getters.credentialCards().should('have.length', 3); + + credentialsPage.getters.credentialCardActions('Credential in Project 1').click(); + credentialsPage.getters.credentialMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move credential")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(`:contains("${INSTANCE_OWNER.email}")`) + .click(); + projects.getResourceMoveModal().find('button:contains("Move credential")').click(); + + credentialsPage.getters + .credentialCards() + .should('have.length', 3) + .filter(':contains("Owned by me")') + .should('have.length', 2); + + // Move the credential from admin user back to its original project (Project 1) + credentialsPage.getters.credentialCardActions('Credential in Project 1').click(); + credentialsPage.getters.credentialMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move credential")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 5) + .filter(':contains("Project 1")') + .click(); + projects.getResourceMoveModal().find('button:contains("Move credential")').click(); + + projects.getMenuItems().first().click(); + projects.getProjectTabCredentials().click(); + credentialsPage.getters + .credentialCards() + .filter(':contains("Credential in Project 1")') + .should('have.length', 1); + }); + + it('should allow to change inaccessible credential when the workflow was moved to a team project', () => { + cy.signinAsOwner(); + cy.visit(workflowsPage.url); + + // Create a credential in the Home project + projects.getProjectTabCredentials().should('be.visible').click(); + credentialsPage.getters.emptyListCreateCredentialButton().click(); + projects.createCredential('Credential in Home project'); + + // Create a workflow in the Home project + projects.getHomeButton().click(); + workflowsPage.getters.workflowCards().should('not.have.length'); + workflowsPage.getters.newWorkflowButtonCard().click(); + workflowsPage.getters.workflowCards().should('not.have.length'); + + workflowsPage.getters.newWorkflowButtonCard().click(); + workflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME, true, true); + ndv.getters.backToCanvas().click(); + workflowPage.actions.saveWorkflowOnButtonClick(); + + // Create a project and add a user to it + projects.createProject('Project 1'); + projects.addProjectMember(INSTANCE_MEMBERS[0].email); + projects.getProjectSettingsSaveButton().click(); + + // Move the workflow from Home to Project 1 + projects.getHomeButton().click(); + workflowsPage.getters + .workflowCards() + .should('have.length', 1) + .filter(':contains("Owned by me")') + .should('exist'); + workflowsPage.getters.workflowCardActions('My workflow').click(); + workflowsPage.getters.workflowMoveButton().click(); + + projects + .getResourceMoveModal() + .should('be.visible') + .find('button:contains("Move workflow")') + .should('be.disabled'); + projects.getProjectMoveSelect().click(); + getVisibleSelect() + .find('li') + .should('have.length', 4) + .filter(':contains("Project 1")') + .click(); + projects.getResourceMoveModal().find('button:contains("Move workflow")').click(); + + workflowsPage.getters + .workflowCards() + .should('have.length', 1) + .filter(':contains("Owned by me")') + .should('not.exist'); + + //Log out with instance owner and log in with the member user + mainSidebar.actions.openUserMenu(); + cy.getByTestId('user-menu-item-logout').click(); + + cy.get('input[name="email"]').type(INSTANCE_MEMBERS[0].email); + cy.get('input[name="password"]').type(INSTANCE_MEMBERS[0].password); + cy.getByTestId('form-submit-button').click(); + + // Open the moved workflow + workflowsPage.getters.workflowCards().should('have.length', 1); + workflowsPage.getters.workflowCards().first().click(); + + // Check if the credential can be changed + workflowPage.getters.canvasNodeByName(NOTION_NODE_NAME).should('be.visible').dblclick(); + ndv.getters.credentialInput().find('input').should('be.enabled'); }); it('should handle viewer role', () => { diff --git a/cypress/e2e/4-node-creator.cy.ts b/cypress/e2e/4-node-creator.cy.ts index 9dfe128322..a2cd5968d1 100644 --- a/cypress/e2e/4-node-creator.cy.ts +++ b/cypress/e2e/4-node-creator.cy.ts @@ -1,3 +1,9 @@ +import { + addNodeToCanvas, + addRetrieverNodeToParent, + addVectorStoreNodeToParent, + getNodeCreatorItems, +} from '../composables/workflow'; import { IF_NODE_NAME } from '../constants'; import { NodeCreator } from '../pages/features/node-creator'; import { NDV } from '../pages/ndv'; @@ -504,4 +510,38 @@ describe('Node Creator', () => { nodeCreatorFeature.getters.searchBar().find('input').clear().type('gith'); nodeCreatorFeature.getters.nodeItemName().first().should('have.text', 'GitHub'); }); + + it('should show vector stores actions', () => { + const actions = [ + 'Get ranked documents from vector store', + 'Add documents to vector store', + 'Retrieve documents for AI processing', + ]; + + nodeCreatorFeature.actions.openNodeCreator(); + + nodeCreatorFeature.getters.searchBar().find('input').clear().type('Vector Store'); + + getNodeCreatorItems().then((items) => { + const vectorStores = items.map((_i, el) => el.innerText); + + // Loop over all vector stores and check if they have the three actions + vectorStores.each((_i, vectorStore) => { + nodeCreatorFeature.getters.getCreatorItem(vectorStore).click(); + actions.forEach((action) => { + nodeCreatorFeature.getters.getCreatorItem(action).should('be.visible'); + }); + cy.realPress('ArrowLeft'); + }); + }); + }); + + it('should add node directly for sub-connection', () => { + addNodeToCanvas('Question and Answer Chain', true); + addRetrieverNodeToParent('Vector Store Retriever', 'Question and Answer Chain'); + cy.realPress('Escape'); + addVectorStoreNodeToParent('In-Memory Vector Store', 'Vector Store Retriever'); + cy.realPress('Escape'); + WorkflowPage.getters.canvasNodes().should('have.length', 4); + }); }); diff --git a/lefthook.yml b/lefthook.yml index cc39a32495..aa17417824 100644 --- a/lefthook.yml +++ b/lefthook.yml @@ -8,7 +8,7 @@ pre-commit: - merge - rebase prettier_check: - glob: 'packages/**/*.{vue,yml,md}' + glob: 'packages/**/*.{vue,yml,md,css,scss}' run: ./node_modules/.bin/prettier --write --ignore-unknown --no-error-on-unmatched-pattern {staged_files} stage_fixed: true skip: diff --git a/package.json b/package.json index 8253d86dfd..feda1c4701 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "n8n-monorepo", - "version": "1.61.0", + "version": "1.62.1", "private": true, "engines": { "node": ">=20.15", @@ -15,7 +15,7 @@ "build:frontend": "turbo run build:frontend", "build:nodes": "turbo run build:nodes", "typecheck": "turbo typecheck", - "dev": "turbo run dev --parallel --env-mode=loose --filter=!n8n-design-system --filter=!@n8n/chat", + "dev": "turbo run dev --parallel --env-mode=loose --filter=!n8n-design-system --filter=!@n8n/chat --filter=!@n8n/task-runner", "dev:ai": "turbo run dev --parallel --env-mode=loose --filter=@n8n/nodes-langchain --filter=n8n --filter=n8n-core", "clean": "turbo run clean --parallel", "reset": "node scripts/ensure-zx.mjs && zx scripts/reset.mjs", @@ -59,7 +59,7 @@ "ts-jest": "^29.1.1", "tsc-alias": "^1.8.7", "tsc-watch": "^6.0.4", - "turbo": "2.0.6", + "turbo": "2.1.2", "typescript": "*", "zx": "^8.1.4" }, diff --git a/packages/@n8n/api-types/src/frontend-settings.ts b/packages/@n8n/api-types/src/frontend-settings.ts index d50408bc3c..c70826d4d1 100644 --- a/packages/@n8n/api-types/src/frontend-settings.ts +++ b/packages/@n8n/api-types/src/frontend-settings.ts @@ -33,6 +33,7 @@ export interface FrontendSettings { endpointFormWaiting: string; endpointWebhook: string; endpointWebhookTest: string; + endpointWebhookWaiting: string; saveDataErrorExecution: WorkflowSettings.SaveDataExecution; saveDataSuccessExecution: WorkflowSettings.SaveDataExecution; saveManualExecutions: boolean; diff --git a/packages/@n8n/benchmark/package.json b/packages/@n8n/benchmark/package.json index f0b11ddeb2..2b6979fa45 100644 --- a/packages/@n8n/benchmark/package.json +++ b/packages/@n8n/benchmark/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/n8n-benchmark", - "version": "1.5.0", + "version": "1.6.1", "description": "Cli for running benchmark tests for n8n", "main": "dist/index", "scripts": { diff --git a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.manifest.json b/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.manifest.json deleted file mode 100644 index 1d768f706e..0000000000 --- a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.manifest.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "$schema": "../scenario.schema.json", - "name": "CodeNodeJsOnceForEach", - "description": "A JS Code Node that runs once for each item and adds, modifies and removes properties. The data of 5 items is generated using DebugHelper Node, and returned with RespondToWebhook Node.", - "scenarioData": { "workflowFiles": ["js-code-node-once-for-each.json"] }, - "scriptPath": "js-code-node-once-for-each.script.js" -} diff --git a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.json b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.json similarity index 58% rename from packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.json rename to packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.json index 7b89ffde96..d6f30ac5ea 100644 --- a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.json +++ b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.json @@ -1,9 +1,31 @@ { "createdAt": "2024-08-06T12:19:51.268Z", "updatedAt": "2024-08-06T12:20:45.000Z", - "name": "JS Code Node Once For Each", + "name": "JS Code Node", "active": true, "nodes": [ + { + "parameters": { + "respondWith": "allIncomingItems", + "options": {} + }, + "type": "n8n-nodes-base.respondToWebhook", + "typeVersion": 1.1, + "position": [1280, 460], + "id": "0067e317-09b8-478a-8c50-e19b4c9e294c", + "name": "Respond to Webhook" + }, + { + "parameters": { + "mode": "runOnceForEachItem", + "jsCode": "// Add new field\n$input.item.json.age = 10 + Math.floor(Math.random() * 30);\n// Mutate existing field\n$input.item.json.password = $input.item.json.password.split('').map(() => '*').join(\"\")\n// Remove field\ndelete $input.item.json.lastname\n// New object field\nconst emailParts = $input.item.json.email.split(\"@\")\n$input.item.json.emailData = {\n user: emailParts[0],\n domain: emailParts[1]\n}\n\nreturn $input.item;" + }, + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [1040, 460], + "id": "56d751c0-0d30-43c3-89fa-bebf3a9d436f", + "name": "OnceForEachItemJSCode" + }, { "parameters": { "httpMethod": "POST", @@ -13,68 +35,23 @@ }, "type": "n8n-nodes-base.webhook", "typeVersion": 2, - "position": [0, 0], - "id": "849350b3-4212-4416-a462-1cf331157d37", + "position": [580, 460], + "id": "417d749d-156c-4ffe-86ea-336f702dc5da", "name": "Webhook", "webhookId": "34ca1895-ccf4-4a4a-8bb8-a042f5edb567" }, { "parameters": { - "respondWith": "allIncomingItems", - "options": {} - }, - "type": "n8n-nodes-base.respondToWebhook", - "typeVersion": 1.1, - "position": [660, 0], - "id": "f0660aa1-8a65-490f-b5cd-f8d134070c13", - "name": "Respond to Webhook" - }, - { - "parameters": { - "category": "randomData", - "randomDataCount": 5 - }, - "type": "n8n-nodes-base.debugHelper", - "typeVersion": 1, - "position": [220, 0], - "id": "50f1efe8-bd2d-4061-9f51-b38c0e3daeb2", - "name": "DebugHelper" - }, - { - "parameters": { - "mode": "runOnceForEachItem", - "jsCode": "// Add new field\n$input.item.json.age = 10 + Math.floor(Math.random() * 30);\n// Mutate existing field\n$input.item.json.password = $input.item.json.password.split('').map(() => '*').join(\"\")\n// Remove field\ndelete $input.item.json.lastname\n// New object field\nconst emailParts = $input.item.json.email.split(\"@\")\n$input.item.json.emailData = {\n user: emailParts[0],\n domain: emailParts[1]\n}\n\nreturn $input.item;" + "jsCode": "const digits = '0123456789';\nconst uppercaseLetters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';\nconst lowercaseLetters = uppercaseLetters.toLowerCase();\nconst alphabet = [digits, uppercaseLetters, lowercaseLetters].join('').split('')\n\nconst randomInt = (min, max) => Math.floor(Math.random() * (max - min + 1)) + min;\nconst randomItem = (arr) => arr.at(randomInt(0, arr.length - 1))\nconst randomString = (len) => Array.from({ length: len }).map(() => randomItem(alphabet)).join('')\n\nconst randomUid = () => [8,4,4,4,8].map(len => randomString(len)).join(\"-\")\nconst randomEmail = () => `${randomString(8)}@${randomString(10)}.com`\n\nconst randomPerson = () => ({\n uid: randomUid(),\n email: randomEmail(),\n firstname: randomString(5),\n lastname: randomString(12),\n password: randomString(10)\n})\n\nreturn Array.from({ length: 100 }).map(() => ({\n json: randomPerson()\n}))" }, + "id": "c30db155-73ca-48b9-8860-c3fe7a0926fb", + "name": "Code", "type": "n8n-nodes-base.code", "typeVersion": 2, - "position": [440, 0], - "id": "f9f2f865-e228-403d-8e47-72308359e207", - "name": "OnceForEachItemJSCode" + "position": [820, 460] } ], "connections": { - "Webhook": { - "main": [ - [ - { - "node": "DebugHelper", - "type": "main", - "index": 0 - } - ] - ] - }, - "DebugHelper": { - "main": [ - [ - { - "node": "OnceForEachItemJSCode", - "type": "main", - "index": 0 - } - ] - ] - }, "OnceForEachItemJSCode": { "main": [ [ @@ -85,6 +62,28 @@ } ] ] + }, + "Webhook": { + "main": [ + [ + { + "node": "Code", + "type": "main", + "index": 0 + } + ] + ] + }, + "Code": { + "main": [ + [ + { + "node": "OnceForEachItemJSCode", + "type": "main", + "index": 0 + } + ] + ] } }, "settings": { "executionOrder": "v1" }, diff --git a/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.manifest.json b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.manifest.json new file mode 100644 index 0000000000..8b0165baf7 --- /dev/null +++ b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.manifest.json @@ -0,0 +1,7 @@ +{ + "$schema": "../scenario.schema.json", + "name": "CodeNodeJs", + "description": "A JS Code Node that first generates 100 items and then runs once for each item and adds, modifies and removes properties. The data returned with RespondToWebhook Node.", + "scenarioData": { "workflowFiles": ["js-code-node.json"] }, + "scriptPath": "js-code-node.script.js" +} diff --git a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.script.js b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js similarity index 88% rename from packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.script.js rename to packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js index 11e8e87ac3..74cef4f441 100644 --- a/packages/@n8n/benchmark/scenarios/js-code-node-once-for-each/js-code-node-once-for-each.script.js +++ b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js @@ -12,7 +12,7 @@ export default function () { try { const body = JSON.parse(r.body); - return Array.isArray(body) ? body.length === 5 : false; + return Array.isArray(body) ? body.length === 100 : false; } catch (error) { console.error('Error parsing response body: ', error); return false; diff --git a/packages/@n8n/chat/package.json b/packages/@n8n/chat/package.json index 0848422a05..6f3e74fc71 100644 --- a/packages/@n8n/chat/package.json +++ b/packages/@n8n/chat/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/chat", - "version": "0.26.0", + "version": "0.27.1", "scripts": { "dev": "pnpm run storybook", "build": "pnpm build:vite && pnpm build:bundle", diff --git a/packages/@n8n/chat/src/css/markdown.scss b/packages/@n8n/chat/src/css/markdown.scss index 6d219bbe02..070e6d6a5f 100644 --- a/packages/@n8n/chat/src/css/markdown.scss +++ b/packages/@n8n/chat/src/css/markdown.scss @@ -1,4 +1,20 @@ -@import 'highlight.js/styles/github.css'; +@use 'sass:meta'; + +@include meta.load-css('highlight.js/styles/github.css'); + +@mixin hljs-dark-theme { + @include meta.load-css('highlight.js/styles/github-dark-dimmed.css'); +} + +body { + &[data-theme='dark'] { + @include hljs-dark-theme; + } + + @media (prefers-color-scheme: dark) { + @include hljs-dark-theme; + } +} // https://github.com/pxlrbt/markdown-css .chat-message-markdown { @@ -561,7 +577,6 @@ kbd, /* different style for kbd? */ code { - background: #eee; padding: 0.1em 0.25em; border-radius: 0.2rem; -webkit-box-decoration-break: clone; diff --git a/packages/@n8n/config/package.json b/packages/@n8n/config/package.json index e1ac104f04..6d989f8208 100644 --- a/packages/@n8n/config/package.json +++ b/packages/@n8n/config/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/config", - "version": "1.11.0", + "version": "1.12.1", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/config/src/configs/logging.config.ts b/packages/@n8n/config/src/configs/logging.config.ts new file mode 100644 index 0000000000..2f68416df4 --- /dev/null +++ b/packages/@n8n/config/src/configs/logging.config.ts @@ -0,0 +1,47 @@ +import { Config, Env, Nested } from '../decorators'; +import { StringArray } from '../utils'; + +@Config +class FileLoggingConfig { + /** + * Max number of log files to keep, or max number of days to keep logs for. + * Once the limit is reached, the oldest log files will be rotated out. + * If using days, append a `d` suffix. Only for `file` log output. + * + * @example `N8N_LOG_FILE_COUNT_MAX=7` will keep at most 7 files. + * @example `N8N_LOG_FILE_COUNT_MAX=7d` will keep at most 7 days worth of files. + */ + @Env('N8N_LOG_FILE_COUNT_MAX') + fileCountMax: number = 100; + + /** Max size (in MiB) for each log file. Only for `file` log output. */ + @Env('N8N_LOG_FILE_SIZE_MAX') + fileSizeMax: number = 16; + + /** Location of the log files inside `~/.n8n`. Only for `file` log output. */ + @Env('N8N_LOG_FILE_LOCATION') + location: string = 'logs/n8n.log'; +} + +@Config +export class LoggingConfig { + /** + * Minimum level of logs to output. Logs with this or higher level will be output; + * logs with lower levels will not. Exception: `silent` disables all logging. + * + * @example `N8N_LOG_LEVEL=info` will output `error`, `warn` and `info` logs, but not `debug`. + */ + @Env('N8N_LOG_LEVEL') + level: 'error' | 'warn' | 'info' | 'debug' | 'silent' = 'info'; + + /** + * Where to output logs to. Options are: `console` or `file` or both in a comma separated list. + * + * @example `N8N_LOG_OUTPUT=console,file` will output to both console and file. + */ + @Env('N8N_LOG_OUTPUT') + outputs: StringArray<'console' | 'file'> = ['console']; + + @Nested + file: FileLoggingConfig; +} diff --git a/packages/@n8n/config/src/configs/runners.config.ts b/packages/@n8n/config/src/configs/runners.config.ts new file mode 100644 index 0000000000..e7335e8827 --- /dev/null +++ b/packages/@n8n/config/src/configs/runners.config.ts @@ -0,0 +1,22 @@ +import { Config, Env } from '../decorators'; + +@Config +export class TaskRunnersConfig { + // Defaults to true for now + @Env('N8N_RUNNERS_DISABLED') + disabled: boolean = true; + + @Env('N8N_RUNNERS_PATH') + path: string = '/runners'; + + @Env('N8N_RUNNERS_AUTH_TOKEN') + authToken: string = ''; + + /** IP address task runners server should listen on */ + @Env('N8N_RUNNERS_SERVER_PORT') + port: number = 5679; + + /** IP address task runners server should listen on */ + @Env('N8N_RUNNERS_SERVER_LISTEN_ADDRESS') + listen_address: string = '127.0.0.1'; +} diff --git a/packages/@n8n/config/src/configs/scaling-mode.config.ts b/packages/@n8n/config/src/configs/scaling-mode.config.ts index 750de77b07..a1f5b2a7d6 100644 --- a/packages/@n8n/config/src/configs/scaling-mode.config.ts +++ b/packages/@n8n/config/src/configs/scaling-mode.config.ts @@ -6,9 +6,13 @@ class HealthConfig { @Env('QUEUE_HEALTH_CHECK_ACTIVE') active: boolean = false; - /** Port for worker to respond to health checks requests on, if enabled. */ + /** Port for worker server to listen on. */ @Env('QUEUE_HEALTH_CHECK_PORT') port: number = 5678; + + /** IP address for worker server to listen on. */ + @Env('N8N_WORKER_SERVER_ADDRESS') + address: string = '0.0.0.0'; } @Config diff --git a/packages/@n8n/config/src/index.ts b/packages/@n8n/config/src/index.ts index 5098093db4..3290cac5bb 100644 --- a/packages/@n8n/config/src/index.ts +++ b/packages/@n8n/config/src/index.ts @@ -5,8 +5,11 @@ import { EndpointsConfig } from './configs/endpoints.config'; import { EventBusConfig } from './configs/event-bus.config'; import { ExternalSecretsConfig } from './configs/external-secrets.config'; import { ExternalStorageConfig } from './configs/external-storage.config'; +import { LoggingConfig } from './configs/logging.config'; import { NodesConfig } from './configs/nodes.config'; import { PublicApiConfig } from './configs/public-api.config'; +import { TaskRunnersConfig } from './configs/runners.config'; +export { TaskRunnersConfig } from './configs/runners.config'; import { ScalingModeConfig } from './configs/scaling-mode.config'; import { SentryConfig } from './configs/sentry.config'; import { TemplatesConfig } from './configs/templates.config'; @@ -81,4 +84,10 @@ export class GlobalConfig { @Nested queue: ScalingModeConfig; + + @Nested + logging: LoggingConfig; + + @Nested + taskRunners: TaskRunnersConfig; } diff --git a/packages/@n8n/config/src/utils.ts b/packages/@n8n/config/src/utils.ts new file mode 100644 index 0000000000..c90fcb8266 --- /dev/null +++ b/packages/@n8n/config/src/utils.ts @@ -0,0 +1,7 @@ +export class StringArray extends Array { + constructor(str: string) { + super(); + const parsed = str.split(',') as StringArray; + return parsed.every((i) => typeof i === 'string') ? parsed : []; + } +} diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index 11fd97a5db..a0952d0dd0 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -198,6 +198,7 @@ describe('GlobalConfig', () => { health: { active: false, port: 5678, + address: '0.0.0.0', }, bull: { redis: { @@ -221,10 +222,26 @@ describe('GlobalConfig', () => { }, }, }, + taskRunners: { + disabled: true, + path: '/runners', + authToken: '', + listen_address: '127.0.0.1', + port: 5679, + }, sentry: { backendDsn: '', frontendDsn: '', }, + logging: { + level: 'info', + outputs: ['console'], + file: { + fileCountMax: 100, + fileSizeMax: 16, + location: 'logs/n8n.log', + }, + }, }; it('should use all default values when no env variables are defined', () => { diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts index a6dc4a63f2..90952bac41 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts @@ -1,27 +1,28 @@ -import { BINARY_ENCODING, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; - -import type { AgentAction, AgentFinish } from 'langchain/agents'; -import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; +import { HumanMessage } from '@langchain/core/messages'; +import type { BaseMessage } from '@langchain/core/messages'; +import type { BaseOutputParser, StructuredOutputParser } from '@langchain/core/output_parsers'; import type { BaseMessagePromptTemplateLike } from '@langchain/core/prompts'; import { ChatPromptTemplate } from '@langchain/core/prompts'; -import { omit } from 'lodash'; +import { RunnableSequence } from '@langchain/core/runnables'; import type { Tool } from '@langchain/core/tools'; import { DynamicStructuredTool } from '@langchain/core/tools'; +import type { AgentAction, AgentFinish } from 'langchain/agents'; +import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'; +import { OutputFixingParser } from 'langchain/output_parsers'; +import { omit } from 'lodash'; +import { BINARY_ENCODING, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import type { ZodObject } from 'zod'; import { z } from 'zod'; -import type { BaseOutputParser, StructuredOutputParser } from '@langchain/core/output_parsers'; -import { OutputFixingParser } from 'langchain/output_parsers'; -import { HumanMessage } from '@langchain/core/messages'; -import { RunnableSequence } from '@langchain/core/runnables'; + +import { SYSTEM_MESSAGE } from './prompt'; import { isChatInstance, getPromptInputByType, getOptionalOutputParsers, getConnectedTools, } from '../../../../../utils/helpers'; -import { SYSTEM_MESSAGE } from './prompt'; function getOutputParserSchema(outputParser: BaseOutputParser): ZodObject { const parserType = outputParser.lc_namespace[outputParser.lc_namespace.length - 1]; @@ -74,6 +75,39 @@ async function extractBinaryMessages(ctx: IExecuteFunctions) { content: [...binaryMessages], }); } +/** + * Fixes empty content messages in agent steps. + * + * This function is necessary when using RunnableSequence.from in LangChain. + * If a tool doesn't have any arguments, LangChain returns input: '' (empty string). + * This can throw an error for some providers (like Anthropic) which expect the input to always be an object. + * This function replaces empty string inputs with empty objects to prevent such errors. + * + * @param steps - The agent steps to fix + * @returns The fixed agent steps + */ +function fixEmptyContentMessage(steps: AgentFinish | AgentAction[]) { + if (!Array.isArray(steps)) return steps; + + steps.forEach((step) => { + if ('messageLog' in step && step.messageLog !== undefined) { + if (Array.isArray(step.messageLog)) { + step.messageLog.forEach((message: BaseMessage) => { + if ('content' in message && Array.isArray(message.content)) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + (message.content as Array<{ input?: string | object }>).forEach((content) => { + if (content.input === '') { + content.input = {}; + } + }); + } + }); + } + } + }); + + return steps; +} export async function toolsAgentExecute(this: IExecuteFunctions): Promise { this.logger.debug('Executing Tools Agent'); @@ -156,6 +190,14 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise) { + return { + returnValues: memory ? { output: JSON.stringify(output) } : output, + log: 'Final response formatted', + }; + } async function agentStepsParser( steps: AgentFinish | AgentAction[], ): Promise { @@ -168,24 +210,18 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise; - return { - returnValues, - log: 'Final response formatted', - }; + return handleParsedStepOutput(returnValues); } } - // If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will parse the output manually + + // If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will try to parse the output manually if (outputParser && typeof steps === 'object' && (steps as AgentFinish).returnValues) { const finalResponse = (steps as AgentFinish).returnValues; const returnValues = (await outputParser.parse(finalResponse as unknown as string)) as Record< string, unknown >; - - return { - returnValues, - log: 'Final response formatted', - }; + return handleParsedStepOutput(returnValues); } return handleAgentFinishOutput(steps); } @@ -233,7 +269,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise }>( + response.output as string, + ); + response.output = parsedOutput?.output ?? parsedOutput; + } + returnData.push({ json: omit( response, diff --git a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts index 3c0740664c..7afc317c37 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts @@ -1,3 +1,8 @@ +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import { HumanMessage } from '@langchain/core/messages'; +import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts'; +import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; +import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; import type { IDataObject, IExecuteFunctions, @@ -6,14 +11,8 @@ import type { INodeType, INodeTypeDescription, } from 'n8n-workflow'; - -import { NodeConnectionType } from 'n8n-workflow'; - -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import { HumanMessage } from '@langchain/core/messages'; -import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts'; -import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; import { z } from 'zod'; + import { getTracingConfig } from '../../../utils/tracing'; const SYSTEM_PROMPT_TEMPLATE = @@ -172,11 +171,15 @@ export class TextClassifier implements INodeType { 0, )) as BaseLanguageModel; - const categories = this.getNodeParameter('categories.categories', 0) as Array<{ + const categories = this.getNodeParameter('categories.categories', 0, []) as Array<{ category: string; description: string; }>; + if (categories.length === 0) { + throw new NodeOperationError(this.getNode(), 'At least one category must be defined'); + } + const options = this.getNodeParameter('options', 0, {}) as { multiClass: boolean; fallback?: string; @@ -229,6 +232,7 @@ export class TextClassifier implements INodeType { const systemPromptTemplateOpt = this.getNodeParameter( 'options.systemPromptTemplate', itemIdx, + SYSTEM_PROMPT_TEMPLATE, ) as string; const systemPromptTemplate = SystemMessagePromptTemplate.fromTemplate( `${systemPromptTemplateOpt ?? SYSTEM_PROMPT_TEMPLATE} diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts index 7123ebbc38..916f0e7159 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts @@ -109,17 +109,22 @@ export class DocumentGithubLoader implements INodeType { 0, )) as CharacterTextSplitter | undefined; + const { index } = this.addInputData(NodeConnectionType.AiDocument, [ + [{ json: { repository, branch, ignorePaths, recursive } }], + ]); const docs = new GithubRepoLoader(repository, { branch, ignorePaths: (ignorePaths ?? '').split(',').map((p) => p.trim()), recursive, accessToken: (credentials.accessToken as string) || '', + apiUrl: credentials.server as string, }); const loadedDocs = textSplitter ? await textSplitter.splitDocuments(await docs.load()) : await docs.load(); + this.addOutputData(NodeConnectionType.AiDocument, index, [[{ json: { loadedDocs } }]]); return { response: logWrapper(loadedDocs, this), }; diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts index 5c53a69006..489b4fe28b 100644 --- a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts @@ -1,4 +1,6 @@ -import { Node, NodeConnectionType } from 'n8n-workflow'; +import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; +import { pick } from 'lodash'; +import { Node, NodeConnectionType, commonCORSParameters } from 'n8n-workflow'; import type { IDataObject, IWebhookFunctions, @@ -10,10 +12,8 @@ import type { INodeProperties, } from 'n8n-workflow'; -import { pick } from 'lodash'; -import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; -import { createPage } from './templates'; import { validateAuth } from './GenericFunctions'; +import { createPage } from './templates'; import type { LoadPreviousSessionChatOption } from './types'; const CHAT_TRIGGER_PATH_IDENTIFIER = 'chat'; @@ -56,7 +56,6 @@ export class ChatTrigger extends Node { ], }, }, - supportsCORS: true, maxNodes: 1, inputs: `={{ (() => { if (!['hostedChat', 'webhook'].includes($parameter.mode)) { @@ -241,6 +240,15 @@ export class ChatTrigger extends Node { placeholder: 'Add Field', default: {}, options: [ + // CORS parameters are only valid for when chat is used in hosted or webhook mode + ...commonCORSParameters.map((p) => ({ + ...p, + displayOptions: { + show: { + '/mode': ['hostedChat', 'webhook'], + }, + }, + })), { ...allowFileUploadsOption, displayOptions: { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts index 7e14eb4887..0c9a148bec 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts @@ -1,14 +1,16 @@ -import { type INodeProperties } from 'n8n-workflow'; import { PGVectorStore, type DistanceStrategy, type PGVectorStoreArgs, } from '@langchain/community/vectorstores/pgvector'; -import { configurePostgres } from 'n8n-nodes-base/dist/nodes/Postgres/v2/transport'; +import type { EmbeddingsInterface } from '@langchain/core/embeddings'; import type { PostgresNodeCredentials } from 'n8n-nodes-base/dist/nodes/Postgres/v2/helpers/interfaces'; +import { configurePostgres } from 'n8n-nodes-base/dist/nodes/Postgres/v2/transport'; +import type { INodeProperties } from 'n8n-workflow'; import type pg from 'pg'; -import { createVectorStoreNode } from '../shared/createVectorStoreNode'; + import { metadataFilterField } from '../../../utils/sharedFields'; +import { createVectorStoreNode } from '../shared/createVectorStoreNode'; type CollectionOptions = { useCollection?: boolean; @@ -177,13 +179,46 @@ const retrieveFields: INodeProperties[] = [ }, ]; +/** + * Extended PGVectorStore class to handle custom filtering. + * This wrapper is necessary because when used as a retriever, + * similaritySearchVectorWithScore should use this.filter instead of + * expecting it from the parameter + */ +class ExtendedPGVectorStore extends PGVectorStore { + static async initialize( + embeddings: EmbeddingsInterface, + args: PGVectorStoreArgs & { dimensions?: number }, + ): Promise { + const { dimensions, ...rest } = args; + const postgresqlVectorStore = new this(embeddings, rest); + + await postgresqlVectorStore._initializeClient(); + await postgresqlVectorStore.ensureTableInDatabase(dimensions); + if (postgresqlVectorStore.collectionTableName) { + await postgresqlVectorStore.ensureCollectionTableInDatabase(); + } + + return postgresqlVectorStore; + } + + async similaritySearchVectorWithScore( + query: number[], + k: number, + filter?: PGVectorStore['FilterType'], + ) { + const mergedFilter = { ...this.filter, ...filter }; + return await super.similaritySearchVectorWithScore(query, k, mergedFilter); + } +} + export const VectorStorePGVector = createVectorStoreNode({ meta: { description: 'Work with your data in Postgresql with the PGVector extension', icon: 'file:postgres.svg', displayName: 'Postgres PGVector Store', docsUrl: - 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoresupabase/', + 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorepgvector/', name: 'vectorStorePGVector', credentials: [ { @@ -236,7 +271,7 @@ export const VectorStorePGVector = createVectorStoreNode({ 'cosine', ) as DistanceStrategy; - return await PGVectorStore.initialize(embeddings, config); + return await ExtendedPGVectorStore.initialize(embeddings, config); }, async populateVectorStore(context, embeddings, documents, itemIndex) { // NOTE: if you are to create the HNSW index before use, you need to consider moving the distanceStrategy field to diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts index 10ea879bdd..d487969073 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts @@ -88,25 +88,25 @@ function getOperationModeOptions(args: VectorStoreNodeConstructorArgs): INodePro name: 'Get Many', value: 'load', description: 'Get many ranked documents from vector store for query', - action: 'Get many ranked documents from vector store for query', + action: 'Get ranked documents from vector store', }, { name: 'Insert Documents', value: 'insert', description: 'Insert documents into vector store', - action: 'Insert documents into vector store', + action: 'Add documents to vector store', }, { name: 'Retrieve Documents (For Agent/Chain)', value: 'retrieve', description: 'Retrieve documents from vector store to be used with AI nodes', - action: 'Retrieve documents from vector store to be used with AI nodes', + action: 'Retrieve documents for AI processing', }, { name: 'Update Documents', value: 'update', description: 'Update documents in vector store by ID', - action: 'Update documents in vector store by ID', + action: 'Update vector store documents', }, ]; diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts index 3ec2e46eea..cf770e4057 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts @@ -1,30 +1,27 @@ +import type { BaseMessage } from '@langchain/core/messages'; import { AgentExecutor } from 'langchain/agents'; - -import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; -import { OpenAI as OpenAIClient } from 'openai'; - -import { - ApplicationError, - NodeConnectionType, - NodeOperationError, - updateDisplayOptions, -} from 'n8n-workflow'; +import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; +import type { BufferWindowMemory } from 'langchain/memory'; +import omit from 'lodash/omit'; import type { IDataObject, IExecuteFunctions, INodeExecutionData, INodeProperties, } from 'n8n-workflow'; - -import type { BufferWindowMemory } from 'langchain/memory'; -import omit from 'lodash/omit'; -import type { BaseMessage } from '@langchain/core/messages'; -import { formatToOpenAIAssistantTool } from '../../helpers/utils'; -import { assistantRLC } from '../descriptions'; +import { + ApplicationError, + NodeConnectionType, + NodeOperationError, + updateDisplayOptions, +} from 'n8n-workflow'; +import { OpenAI as OpenAIClient } from 'openai'; import { getConnectedTools } from '../../../../../utils/helpers'; import { getTracingConfig } from '../../../../../utils/tracing'; +import { formatToOpenAIAssistantTool } from '../../helpers/utils'; +import { assistantRLC } from '../descriptions'; const properties: INodeProperties[] = [ assistantRLC, @@ -63,6 +60,46 @@ const properties: INodeProperties[] = [ }, }, }, + { + displayName: 'Memory', + name: 'memory', + type: 'options', + options: [ + { + // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased + name: 'Use memory connector', + value: 'connector', + description: 'Connect one of the supported memory nodes', + }, + { + // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased + name: 'Use thread ID', + value: 'threadId', + description: 'Specify the ID of the thread to continue', + }, + ], + displayOptions: { + show: { + '@version': [{ _cnd: { gte: 1.6 } }], + }, + }, + default: 'connector', + }, + { + displayName: 'Thread ID', + name: 'threadId', + type: 'string', + default: '', + placeholder: '', + description: 'The ID of the thread to continue, a new thread will be created if not specified', + hint: 'If the thread ID is empty or undefined a new thread will be created and included in the response', + displayOptions: { + show: { + '@version': [{ _cnd: { gte: 1.6 } }], + memory: ['threadId'], + }, + }, + }, { displayName: 'Connect your own custom n8n tools to this node on the canvas', name: 'noticeTools', @@ -201,9 +238,19 @@ export async function execute(this: IExecuteFunctions, i: number): Promise= 1.6 && this.getNodeParameter('memory', i) === 'connector'; + const memory = + useMemoryConnector || nodeVersion < 1.6 + ? ((await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as + | BufferWindowMemory + | undefined) + : undefined; + + const threadId = + nodeVersion >= 1.6 && !useMemoryConnector + ? (this.getNodeParameter('threadId', i) as string) + : undefined; const chainValues: IDataObject = { content: input, @@ -231,6 +278,8 @@ export async function execute(this: IExecuteFunctions, i: number): Promise { return `${capitalize(operation)} ${capitalize(resource)}`; }; -const configureNodeInputs = (resource: string, operation: string, hideTools: string) => { +const configureNodeInputs = ( + resource: string, + operation: string, + hideTools: string, + memory: string | undefined, +) => { if (resource === 'assistant' && operation === 'message') { - return [ + const inputs: INodeInputConfiguration[] = [ { type: NodeConnectionType.Main }, - { type: NodeConnectionType.AiMemory, displayName: 'Memory', maxConnections: 1 }, { type: NodeConnectionType.AiTool, displayName: 'Tools' }, ]; + if (memory !== 'threadId') { + inputs.push({ type: NodeConnectionType.AiMemory, displayName: 'Memory', maxConnections: 1 }); + } + return inputs; } if (resource === 'text' && operation === 'message') { if (hideTools === 'hide') { @@ -69,7 +77,7 @@ export const versionDescription: INodeTypeDescription = { name: 'openAi', icon: { light: 'file:openAi.svg', dark: 'file:openAi.dark.svg' }, group: ['transform'], - version: [1, 1.1, 1.2, 1.3, 1.4, 1.5], + version: [1, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6], subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`, description: 'Message an assistant or GPT, analyze images, generate audio, etc.', defaults: { @@ -89,7 +97,7 @@ export const versionDescription: INodeTypeDescription = { ], }, }, - inputs: `={{(${configureNodeInputs})($parameter.resource, $parameter.operation, $parameter.hideTools)}}`, + inputs: `={{(${configureNodeInputs})($parameter.resource, $parameter.operation, $parameter.hideTools, $parameter.memory ?? undefined)}}`, outputs: [NodeConnectionType.Main], credentials: [ { diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index ec7001286d..34219d3ce6 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/n8n-nodes-langchain", - "version": "1.61.0", + "version": "1.62.1", "description": "", "main": "index.js", "scripts": { @@ -150,7 +150,7 @@ "@langchain/redis": "0.1.0", "@langchain/textsplitters": "0.1.0", "@mozilla/readability": "^0.5.0", - "@n8n/typeorm": "0.3.20-10", + "@n8n/typeorm": "0.3.20-12", "@n8n/vm2": "3.9.25", "@pinecone-database/pinecone": "3.0.3", "@qdrant/js-client-rest": "1.11.0", diff --git a/packages/@n8n/task-runner/.eslintrc.js b/packages/@n8n/task-runner/.eslintrc.js new file mode 100644 index 0000000000..dd79f2157e --- /dev/null +++ b/packages/@n8n/task-runner/.eslintrc.js @@ -0,0 +1,19 @@ +const sharedOptions = require('@n8n_io/eslint-config/shared'); + +/** + * @type {import('@types/eslint').ESLint.ConfigData} + */ +module.exports = { + extends: ['@n8n_io/eslint-config/node'], + + ...sharedOptions(__dirname), + + ignorePatterns: ['jest.config.js'], + + rules: { + 'unicorn/filename-case': ['error', { case: 'kebabCase' }], + '@typescript-eslint/no-duplicate-imports': 'off', + + complexity: 'error', + }, +}; diff --git a/packages/@n8n/task-runner/jest.config.js b/packages/@n8n/task-runner/jest.config.js new file mode 100644 index 0000000000..5c3abe1ef7 --- /dev/null +++ b/packages/@n8n/task-runner/jest.config.js @@ -0,0 +1,5 @@ +/** @type {import('jest').Config} */ +module.exports = { + ...require('../../../jest.config'), + testTimeout: 10_000, +}; diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json new file mode 100644 index 0000000000..2ce5993bb9 --- /dev/null +++ b/packages/@n8n/task-runner/package.json @@ -0,0 +1,29 @@ +{ + "name": "@n8n/task-runner", + "version": "1.0.1", + "scripts": { + "clean": "rimraf dist .turbo", + "start": "node dist/start.js", + "dev": "pnpm build && pnpm start", + "typecheck": "tsc --noEmit", + "build": "tsc -p ./tsconfig.build.json", + "format": "biome format --write src", + "format:check": "biome ci src", + "test": "echo \"Error: no tests in this package\" && exit 0", + "lint": "eslint . --quiet", + "lintfix": "eslint . --fix", + "watch": "tsc -p tsconfig.build.json --watch" + }, + "main": "dist/start.js", + "module": "src/start.ts", + "types": "dist/start.d.ts", + "files": [ + "dist/**/*" + ], + "dependencies": { + "n8n-workflow": "workspace:*", + "n8n-core": "workspace:*", + "nanoid": "^3.3.6", + "ws": "^8.18.0" + } +} diff --git a/packages/@n8n/task-runner/src/authenticator.ts b/packages/@n8n/task-runner/src/authenticator.ts new file mode 100644 index 0000000000..717af58dd2 --- /dev/null +++ b/packages/@n8n/task-runner/src/authenticator.ts @@ -0,0 +1,47 @@ +import { ApplicationError } from 'n8n-workflow'; +import * as a from 'node:assert/strict'; + +export type AuthOpts = { + n8nUri: string; + authToken: string; +}; + +/** + * Requests a one-time token that can be used to establish a task runner connection + */ +export async function authenticate(opts: AuthOpts) { + try { + const authEndpoint = `http://${opts.n8nUri}/rest/runners/auth`; + const response = await fetch(authEndpoint, { + method: 'POST', + headers: { + // eslint-disable-next-line @typescript-eslint/naming-convention + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + token: opts.authToken, + }), + }); + + if (!response.ok) { + throw new ApplicationError( + `Invalid response status ${response.status}: ${await response.text()}`, + ); + } + + const { data } = (await response.json()) as { data: { token: string } }; + const grantToken = data.token; + a.ok(grantToken); + + return grantToken; + } catch (e) { + console.error(e); + const error = e as Error; + throw new ApplicationError( + `Could not connect to n8n message broker ${opts.n8nUri}: ${error.message}`, + { + cause: error, + }, + ); + } +} diff --git a/packages/@n8n/task-runner/src/code.ts b/packages/@n8n/task-runner/src/code.ts new file mode 100644 index 0000000000..6fcb6cf878 --- /dev/null +++ b/packages/@n8n/task-runner/src/code.ts @@ -0,0 +1,147 @@ +import { getAdditionalKeys } from 'n8n-core'; +import { + type INode, + type INodeType, + type ITaskDataConnections, + type IWorkflowExecuteAdditionalData, + WorkflowDataProxy, + type WorkflowParameters, + type IDataObject, + type IExecuteData, + type INodeExecutionData, + type INodeParameters, + type IRunExecutionData, + // type IWorkflowDataProxyAdditionalKeys, + Workflow, + type WorkflowExecuteMode, +} from 'n8n-workflow'; +import * as a from 'node:assert'; +import { runInNewContext, type Context } from 'node:vm'; + +import type { TaskResultData } from './runner-types'; +import { type Task, TaskRunner } from './task-runner'; + +interface JSExecSettings { + code: string; + + // For workflow data proxy + mode: WorkflowExecuteMode; +} + +export interface PartialAdditionalData { + executionId?: string; + restartExecutionId?: string; + restApiUrl: string; + instanceBaseUrl: string; + formWaitingBaseUrl: string; + webhookBaseUrl: string; + webhookWaitingBaseUrl: string; + webhookTestBaseUrl: string; + currentNodeParameters?: INodeParameters; + executionTimeoutTimestamp?: number; + userId?: string; + variables: IDataObject; +} + +export interface AllCodeTaskData { + workflow: Omit; + inputData: ITaskDataConnections; + node: INode; + + runExecutionData: IRunExecutionData; + runIndex: number; + itemIndex: number; + activeNodeName: string; + connectionInputData: INodeExecutionData[]; + siblingParameters: INodeParameters; + mode: WorkflowExecuteMode; + executeData?: IExecuteData; + defaultReturnRunIndex: number; + selfData: IDataObject; + contextNodeName: string; + additionalData: PartialAdditionalData; +} + +export class JsTaskRunner extends TaskRunner { + constructor( + taskType: string, + wsUrl: string, + grantToken: string, + maxConcurrency: number, + name?: string, + ) { + super(taskType, wsUrl, grantToken, maxConcurrency, name ?? 'JS Task Runner'); + } + + async executeTask(task: Task): Promise { + const allData = await this.requestData(task.taskId, 'all'); + + const settings = task.settings; + a.ok(settings, 'JS Code not sent to runner'); + + const workflowParams = allData.workflow; + const workflow = new Workflow({ + ...workflowParams, + nodeTypes: { + getByNameAndVersion() { + return undefined as unknown as INodeType; + }, + getByName() { + return undefined as unknown as INodeType; + }, + getKnownTypes() { + return {}; + }, + }, + }); + + const dataProxy = new WorkflowDataProxy( + workflow, + allData.runExecutionData, + allData.runIndex, + allData.itemIndex, + allData.activeNodeName, + allData.connectionInputData, + allData.siblingParameters, + settings.mode, + getAdditionalKeys( + allData.additionalData as IWorkflowExecuteAdditionalData, + allData.mode, + allData.runExecutionData, + ), + allData.executeData, + allData.defaultReturnRunIndex, + allData.selfData, + allData.contextNodeName, + ); + + const customConsole = { + log: (...args: unknown[]) => { + const logOutput = args + .map((arg) => (typeof arg === 'object' && arg !== null ? JSON.stringify(arg) : arg)) + .join(' '); + console.log('[JS Code]', logOutput); + void this.makeRpcCall(task.taskId, 'logNodeOutput', [logOutput]); + }, + }; + + const context: Context = { + require, + module: {}, + console: customConsole, + + ...dataProxy.getDataProxy(), + ...this.buildRpcCallObject(task.taskId), + }; + + const result = (await runInNewContext( + `module.exports = async function() {${settings.code}\n}()`, + context, + )) as TaskResultData['result']; + + return { + result, + customData: allData.runExecutionData.resultData.metadata, + }; + } +} diff --git a/packages/@n8n/task-runner/src/index.ts b/packages/@n8n/task-runner/src/index.ts new file mode 100644 index 0000000000..59e6f6d288 --- /dev/null +++ b/packages/@n8n/task-runner/src/index.ts @@ -0,0 +1,2 @@ +export * from './task-runner'; +export * from './runner-types'; diff --git a/packages/@n8n/task-runner/src/runner-types.ts b/packages/@n8n/task-runner/src/runner-types.ts new file mode 100644 index 0000000000..27b4e9a76c --- /dev/null +++ b/packages/@n8n/task-runner/src/runner-types.ts @@ -0,0 +1,231 @@ +import type { INodeExecutionData } from 'n8n-workflow'; + +export type DataRequestType = 'input' | 'node' | 'all'; + +export interface TaskResultData { + result: INodeExecutionData[]; + customData?: Record; +} + +export namespace N8nMessage { + export namespace ToRunner { + export interface InfoRequest { + type: 'broker:inforequest'; + } + + export interface RunnerRegistered { + type: 'broker:runnerregistered'; + } + + export interface TaskOfferAccept { + type: 'broker:taskofferaccept'; + taskId: string; + offerId: string; + } + + export interface TaskCancel { + type: 'broker:taskcancel'; + taskId: string; + reason: string; + } + + export interface TaskSettings { + type: 'broker:tasksettings'; + taskId: string; + settings: unknown; + } + + export interface RPCResponse { + type: 'broker:rpcresponse'; + callId: string; + taskId: string; + status: 'success' | 'error'; + data: unknown; + } + + export interface TaskDataResponse { + type: 'broker:taskdataresponse'; + taskId: string; + requestId: string; + data: unknown; + } + + export type All = + | InfoRequest + | TaskOfferAccept + | TaskCancel + | TaskSettings + | RunnerRegistered + | RPCResponse + | TaskDataResponse; + } + + export namespace ToRequester { + export interface TaskReady { + type: 'broker:taskready'; + requestId: string; + taskId: string; + } + + export interface TaskDone { + type: 'broker:taskdone'; + taskId: string; + data: TaskResultData; + } + + export interface TaskError { + type: 'broker:taskerror'; + taskId: string; + error: unknown; + } + + export interface TaskDataRequest { + type: 'broker:taskdatarequest'; + taskId: string; + requestId: string; + requestType: DataRequestType; + param?: string; + } + + export interface RPC { + type: 'broker:rpc'; + callId: string; + taskId: string; + name: (typeof RPC_ALLOW_LIST)[number]; + params: unknown[]; + } + + export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC; + } +} + +export namespace RequesterMessage { + export namespace ToN8n { + export interface TaskSettings { + type: 'requester:tasksettings'; + taskId: string; + settings: unknown; + } + + export interface TaskCancel { + type: 'requester:taskcancel'; + taskId: string; + reason: string; + } + + export interface TaskDataResponse { + type: 'requester:taskdataresponse'; + taskId: string; + requestId: string; + data: unknown; + } + + export interface RPCResponse { + type: 'requester:rpcresponse'; + taskId: string; + callId: string; + status: 'success' | 'error'; + data: unknown; + } + + export interface TaskRequest { + type: 'requester:taskrequest'; + requestId: string; + taskType: string; + } + + export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest; + } +} + +export namespace RunnerMessage { + export namespace ToN8n { + export interface Info { + type: 'runner:info'; + name: string; + types: string[]; + } + + export interface TaskAccepted { + type: 'runner:taskaccepted'; + taskId: string; + } + + export interface TaskRejected { + type: 'runner:taskrejected'; + taskId: string; + reason: string; + } + + export interface TaskDone { + type: 'runner:taskdone'; + taskId: string; + data: TaskResultData; + } + + export interface TaskError { + type: 'runner:taskerror'; + taskId: string; + error: unknown; + } + + export interface TaskOffer { + type: 'runner:taskoffer'; + offerId: string; + taskType: string; + validFor: number; + } + + export interface TaskDataRequest { + type: 'runner:taskdatarequest'; + taskId: string; + requestId: string; + requestType: DataRequestType; + param?: string; + } + + export interface RPC { + type: 'runner:rpc'; + callId: string; + taskId: string; + name: (typeof RPC_ALLOW_LIST)[number]; + params: unknown[]; + } + + export type All = + | Info + | TaskDone + | TaskError + | TaskAccepted + | TaskRejected + | TaskOffer + | RPC + | TaskDataRequest; + } +} + +export const RPC_ALLOW_LIST = [ + 'helpers.httpRequestWithAuthentication', + 'helpers.requestWithAuthenticationPaginated', + // "helpers.normalizeItems" + // "helpers.constructExecutionMetaData" + // "helpers.assertBinaryData" + 'helpers.getBinaryDataBuffer', + // "helpers.copyInputItems" + // "helpers.returnJsonArray" + 'helpers.getSSHClient', + 'helpers.createReadStream', + // "helpers.getStoragePath" + 'helpers.writeContentToFile', + 'helpers.prepareBinaryData', + 'helpers.setBinaryDataBuffer', + 'helpers.copyBinaryFile', + 'helpers.binaryToBuffer', + // "helpers.binaryToString" + // "helpers.getBinaryPath" + 'helpers.getBinaryStream', + 'helpers.getBinaryMetadata', + 'helpers.createDeferredPromise', + 'helpers.httpRequest', + 'logNodeOutput', +] as const; diff --git a/packages/@n8n/task-runner/src/start.ts b/packages/@n8n/task-runner/src/start.ts new file mode 100644 index 0000000000..8838ea5b95 --- /dev/null +++ b/packages/@n8n/task-runner/src/start.ts @@ -0,0 +1,48 @@ +import { ApplicationError, ensureError } from 'n8n-workflow'; +import * as a from 'node:assert/strict'; + +import { authenticate } from './authenticator'; +import { JsTaskRunner } from './code'; + +type Config = { + n8nUri: string; + authToken?: string; + grantToken?: string; +}; + +function readAndParseConfig(): Config { + const authToken = process.env.N8N_RUNNERS_AUTH_TOKEN; + const grantToken = process.env.N8N_RUNNERS_GRANT_TOKEN; + if (!authToken && !grantToken) { + throw new ApplicationError( + 'Missing task runner authentication. Use either N8N_RUNNERS_AUTH_TOKEN or N8N_RUNNERS_GRANT_TOKEN to configure it', + ); + } + + return { + n8nUri: process.env.N8N_RUNNERS_N8N_URI ?? 'localhost:5678', + authToken, + grantToken, + }; +} + +void (async function start() { + const config = readAndParseConfig(); + + let grantToken = config.grantToken; + if (!grantToken) { + a.ok(config.authToken); + + grantToken = await authenticate({ + authToken: config.authToken, + n8nUri: config.n8nUri, + }); + } + + const wsUrl = `ws://${config.n8nUri}/runners/_ws`; + new JsTaskRunner('javascript', wsUrl, grantToken, 5); +})().catch((e) => { + const error = ensureError(e); + console.error('Task runner failed to start', { error }); + process.exit(1); +}); diff --git a/packages/@n8n/task-runner/src/task-runner.ts b/packages/@n8n/task-runner/src/task-runner.ts new file mode 100644 index 0000000000..6971df6bb4 --- /dev/null +++ b/packages/@n8n/task-runner/src/task-runner.ts @@ -0,0 +1,362 @@ +import { ApplicationError, ensureError } from 'n8n-workflow'; +import { nanoid } from 'nanoid'; +import { URL } from 'node:url'; +import { type MessageEvent, WebSocket } from 'ws'; + +import { + RPC_ALLOW_LIST, + type RunnerMessage, + type N8nMessage, + type TaskResultData, +} from './runner-types'; + +export interface Task { + taskId: string; + settings?: T; + active: boolean; + cancelled: boolean; +} + +export interface TaskOffer { + offerId: string; + validUntil: bigint; +} + +interface DataRequest { + requestId: string; + resolve: (data: unknown) => void; + reject: (error: unknown) => void; +} + +interface RPCCall { + callId: string; + resolve: (data: unknown) => void; + reject: (error: unknown) => void; +} + +export interface RPCCallObject { + [name: string]: ((...args: unknown[]) => Promise) | RPCCallObject; +} + +const VALID_TIME_MS = 1000; +const VALID_EXTRA_MS = 100; + +export abstract class TaskRunner { + id: string = nanoid(); + + ws: WebSocket; + + canSendOffers = false; + + runningTasks: Map = new Map(); + + offerInterval: NodeJS.Timeout | undefined; + + openOffers: Map = new Map(); + + dataRequests: Map = new Map(); + + rpcCalls: Map = new Map(); + + constructor( + public taskType: string, + wsUrl: string, + grantToken: string, + private maxConcurrency: number, + public name?: string, + ) { + const url = new URL(wsUrl); + url.searchParams.append('id', this.id); + this.ws = new WebSocket(url.toString(), { + headers: { + authorization: `Bearer ${grantToken}`, + }, + }); + this.ws.addEventListener('message', this.receiveMessage); + this.ws.addEventListener('close', this.stopTaskOffers); + } + + private receiveMessage = (message: MessageEvent) => { + // eslint-disable-next-line n8n-local-rules/no-uncaught-json-parse + const data = JSON.parse(message.data as string) as N8nMessage.ToRunner.All; + void this.onMessage(data); + }; + + private stopTaskOffers = () => { + this.canSendOffers = false; + if (this.offerInterval) { + clearInterval(this.offerInterval); + this.offerInterval = undefined; + } + }; + + private startTaskOffers() { + this.canSendOffers = true; + if (this.offerInterval) { + clearInterval(this.offerInterval); + } + this.offerInterval = setInterval(() => this.sendOffers(), 250); + } + + deleteStaleOffers() { + this.openOffers.forEach((offer, key) => { + if (offer.validUntil < process.hrtime.bigint()) { + this.openOffers.delete(key); + } + }); + } + + sendOffers() { + this.deleteStaleOffers(); + + const offersToSend = + this.maxConcurrency - + (Object.values(this.openOffers).length + Object.values(this.runningTasks).length); + + for (let i = 0; i < offersToSend; i++) { + const offer: TaskOffer = { + offerId: nanoid(), + validUntil: process.hrtime.bigint() + BigInt((VALID_TIME_MS + VALID_EXTRA_MS) * 1_000_000), // Adding a little extra time to account for latency + }; + this.openOffers.set(offer.offerId, offer); + this.send({ + type: 'runner:taskoffer', + taskType: this.taskType, + offerId: offer.offerId, + validFor: VALID_TIME_MS, + }); + } + } + + send(message: RunnerMessage.ToN8n.All) { + this.ws.send(JSON.stringify(message)); + } + + onMessage(message: N8nMessage.ToRunner.All) { + switch (message.type) { + case 'broker:inforequest': + this.send({ + type: 'runner:info', + name: this.name ?? 'Node.js Task Runner SDK', + types: [this.taskType], + }); + break; + case 'broker:runnerregistered': + this.startTaskOffers(); + break; + case 'broker:taskofferaccept': + this.offerAccepted(message.offerId, message.taskId); + break; + case 'broker:taskcancel': + this.taskCancelled(message.taskId); + break; + case 'broker:tasksettings': + void this.receivedSettings(message.taskId, message.settings); + break; + case 'broker:taskdataresponse': + this.processDataResponse(message.requestId, message.data); + break; + case 'broker:rpcresponse': + this.handleRpcResponse(message.callId, message.status, message.data); + } + } + + processDataResponse(requestId: string, data: unknown) { + const request = this.dataRequests.get(requestId); + if (!request) { + return; + } + // Deleting of the request is handled in `requestData`, using a + // `finally` wrapped around the return + request.resolve(data); + } + + hasOpenTasks() { + return Object.values(this.runningTasks).length < this.maxConcurrency; + } + + offerAccepted(offerId: string, taskId: string) { + if (!this.hasOpenTasks()) { + this.send({ + type: 'runner:taskrejected', + taskId, + reason: 'No open task slots', + }); + return; + } + const offer = this.openOffers.get(offerId); + if (!offer) { + this.send({ + type: 'runner:taskrejected', + taskId, + reason: 'Offer expired and no open task slots', + }); + return; + } else { + this.openOffers.delete(offerId); + } + + this.runningTasks.set(taskId, { + taskId, + active: false, + cancelled: false, + }); + + this.send({ + type: 'runner:taskaccepted', + taskId, + }); + } + + taskCancelled(taskId: string) { + const task = this.runningTasks.get(taskId); + if (!task) { + return; + } + task.cancelled = true; + if (task.active) { + // TODO + } else { + this.runningTasks.delete(taskId); + } + this.sendOffers(); + } + + taskErrored(taskId: string, error: unknown) { + this.send({ + type: 'runner:taskerror', + taskId, + error, + }); + this.runningTasks.delete(taskId); + this.sendOffers(); + } + + taskDone(taskId: string, data: RunnerMessage.ToN8n.TaskDone['data']) { + this.send({ + type: 'runner:taskdone', + taskId, + data, + }); + this.runningTasks.delete(taskId); + this.sendOffers(); + } + + async receivedSettings(taskId: string, settings: unknown) { + const task = this.runningTasks.get(taskId); + if (!task) { + return; + } + if (task.cancelled) { + this.runningTasks.delete(taskId); + return; + } + task.settings = settings; + task.active = true; + try { + const data = await this.executeTask(task); + this.taskDone(taskId, data); + } catch (e) { + if (ensureError(e)) { + this.taskErrored(taskId, (e as Error).message); + } else { + this.taskErrored(taskId, e); + } + } + } + + // eslint-disable-next-line @typescript-eslint/naming-convention + async executeTask(_task: Task): Promise { + throw new ApplicationError('Unimplemented'); + } + + async requestData( + taskId: Task['taskId'], + type: RunnerMessage.ToN8n.TaskDataRequest['requestType'], + param?: string, + ): Promise { + const requestId = nanoid(); + + const p = new Promise((resolve, reject) => { + this.dataRequests.set(requestId, { + requestId, + resolve: resolve as (data: unknown) => void, + reject, + }); + }); + + this.send({ + type: 'runner:taskdatarequest', + taskId, + requestId, + requestType: type, + param, + }); + + try { + return await p; + } finally { + this.dataRequests.delete(requestId); + } + } + + async makeRpcCall(taskId: string, name: RunnerMessage.ToN8n.RPC['name'], params: unknown[]) { + const callId = nanoid(); + + const dataPromise = new Promise((resolve, reject) => { + this.rpcCalls.set(callId, { + callId, + resolve, + reject, + }); + }); + + this.send({ + type: 'runner:rpc', + callId, + taskId, + name, + params, + }); + + try { + return await dataPromise; + } finally { + this.rpcCalls.delete(callId); + } + } + + handleRpcResponse( + callId: string, + status: N8nMessage.ToRunner.RPCResponse['status'], + data: unknown, + ) { + const call = this.rpcCalls.get(callId); + if (!call) { + return; + } + if (status === 'success') { + call.resolve(data); + } else { + call.reject(typeof data === 'string' ? new Error(data) : data); + } + } + + buildRpcCallObject(taskId: string) { + const rpcObject: RPCCallObject = {}; + for (const r of RPC_ALLOW_LIST) { + const splitPath = r.split('.'); + let obj = rpcObject; + + splitPath.forEach((s, index) => { + if (index !== splitPath.length - 1) { + obj[s] = {}; + obj = obj[s]; + return; + } + obj[s] = async (...args: unknown[]) => await this.makeRpcCall(taskId, r, args); + }); + } + return rpcObject; + } +} diff --git a/packages/@n8n/task-runner/tsconfig.build.json b/packages/@n8n/task-runner/tsconfig.build.json new file mode 100644 index 0000000000..59065a1e2b --- /dev/null +++ b/packages/@n8n/task-runner/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": ["./tsconfig.json", "../../../tsconfig.build.json"], + "compilerOptions": { + "composite": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "dist/build.tsbuildinfo" + }, + "include": ["src/**/*.ts"], + "exclude": ["src/**/__tests__/**"] +} diff --git a/packages/@n8n/task-runner/tsconfig.json b/packages/@n8n/task-runner/tsconfig.json new file mode 100644 index 0000000000..db6ad545e3 --- /dev/null +++ b/packages/@n8n/task-runner/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": ["../../../tsconfig.json", "../../../tsconfig.backend.json"], + "compilerOptions": { + "rootDir": ".", + "baseUrl": "src", + "paths": { + "@/*": ["./*"] + }, + "tsBuildInfoFile": "dist/typecheck.tsbuildinfo" + }, + "include": ["src/**/*.ts"] +} diff --git a/packages/cli/BREAKING-CHANGES.md b/packages/cli/BREAKING-CHANGES.md index 869ace642e..012892c6ae 100644 --- a/packages/cli/BREAKING-CHANGES.md +++ b/packages/cli/BREAKING-CHANGES.md @@ -2,6 +2,26 @@ This list shows all the versions which include breaking changes and how to upgrade. +# 1.63.0 + +### What changed? + +The worker server used to bind to IPv6 by default. It now binds to IPv4 by default. + +### When is action necessary? + +If you experience a port conflict error when starting a worker server using its default port, set a different port for the worker server with `QUEUE_HEALTH_CHECK_PORT`. + +## 1.57.0 + +### What changed? + +The `verbose` log level was merged into the `debug` log level. + +### When is action necessary? + +If you are setting the env var `N8N_LOG_LEVEL=verbose`, please update your log level to `N8N_LOG_LEVEL=debug`. + ## 1.55.0 ### What changed? diff --git a/packages/cli/package.json b/packages/cli/package.json index 6d11b9cf55..9c7664d8a8 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "1.61.0", + "version": "1.62.1", "description": "n8n Workflow Automation Tool", "main": "dist/index", "types": "dist/index.d.ts", @@ -92,7 +92,8 @@ "@n8n/localtunnel": "3.0.0", "@n8n/n8n-nodes-langchain": "workspace:*", "@n8n/permissions": "workspace:*", - "@n8n/typeorm": "0.3.20-10", + "@n8n/task-runner": "workspace:*", + "@n8n/typeorm": "0.3.20-12", "@n8n_io/ai-assistant-sdk": "1.9.4", "@n8n_io/license-sdk": "2.13.1", "@oclif/core": "4.0.7", @@ -170,7 +171,7 @@ "typedi": "catalog:", "uuid": "catalog:", "validator": "13.7.0", - "winston": "3.8.2", + "winston": "3.14.2", "ws": "8.17.1", "xml2js": "catalog:", "xmllint-wasm": "3.0.1", diff --git a/packages/cli/src/__tests__/license.test.ts b/packages/cli/src/__tests__/license.test.ts index 6f2e06752f..35da918abb 100644 --- a/packages/cli/src/__tests__/license.test.ts +++ b/packages/cli/src/__tests__/license.test.ts @@ -5,7 +5,7 @@ import type { InstanceSettings } from 'n8n-core'; import config from '@/config'; import { N8N_VERSION } from '@/constants'; import { License } from '@/license'; -import type { Logger } from '@/logger'; +import type { Logger } from '@/logging/logger.service'; jest.mock('@n8n_io/license-sdk'); diff --git a/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts b/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts index 6b715f175d..88aee51540 100644 --- a/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts +++ b/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts @@ -1,21 +1,80 @@ +import { mock } from 'jest-mock-extended'; +import type { + IExecuteWorkflowInfo, + IWorkflowExecuteAdditionalData, + ExecuteWorkflowOptions, + IRun, +} from 'n8n-workflow'; +import type PCancelable from 'p-cancelable'; import Container from 'typedi'; +import { ActiveExecutions } from '@/active-executions'; import { CredentialsHelper } from '@/credentials-helper'; +import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; +import { ExecutionRepository } from '@/databases/repositories/execution.repository'; +import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { VariablesService } from '@/environments/variables/variables.service.ee'; import { EventService } from '@/events/event.service'; +import { ExternalHooks } from '@/external-hooks'; import { SecretsHelper } from '@/secrets-helpers'; -import { getBase } from '@/workflow-execute-additional-data'; +import { WorkflowStatisticsService } from '@/services/workflow-statistics.service'; +import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service'; +import { Telemetry } from '@/telemetry'; +import { PermissionChecker } from '@/user-management/permission-checker'; +import { executeWorkflow, getBase } from '@/workflow-execute-additional-data'; import { mockInstance } from '@test/mocking'; +const run = mock({ + data: { resultData: {} }, + finished: true, + mode: 'manual', + startedAt: new Date(), + status: 'new', +}); + +const cancelablePromise = mock>({ + then: jest + .fn() + .mockImplementation(async (onfulfilled) => await Promise.resolve(run).then(onfulfilled)), + catch: jest + .fn() + .mockImplementation(async (onrejected) => await Promise.resolve(run).catch(onrejected)), + finally: jest + .fn() + .mockImplementation(async (onfinally) => await Promise.resolve(run).finally(onfinally)), + [Symbol.toStringTag]: 'PCancelable', +}); + +jest.mock('n8n-core', () => ({ + __esModule: true, + ...jest.requireActual('n8n-core'), + WorkflowExecute: jest.fn().mockImplementation(() => ({ + processRunExecutionData: jest.fn().mockReturnValue(cancelablePromise), + })), +})); + +jest.mock('../workflow-helpers', () => ({ + ...jest.requireActual('../workflow-helpers'), + getDataLastExecutedNodeData: jest.fn().mockReturnValue({ data: { main: [] } }), +})); + describe('WorkflowExecuteAdditionalData', () => { const variablesService = mockInstance(VariablesService); variablesService.getAllCached.mockResolvedValue([]); const credentialsHelper = mockInstance(CredentialsHelper); const secretsHelper = mockInstance(SecretsHelper); const eventService = mockInstance(EventService); + mockInstance(ExternalHooks); Container.set(VariablesService, variablesService); Container.set(CredentialsHelper, credentialsHelper); Container.set(SecretsHelper, secretsHelper); + const executionRepository = mockInstance(ExecutionRepository); + mockInstance(Telemetry); + const workflowRepository = mockInstance(WorkflowRepository); + const activeExecutions = mockInstance(ActiveExecutions); + mockInstance(PermissionChecker); + mockInstance(SubworkflowPolicyChecker); + mockInstance(WorkflowStatisticsService); test('logAiEvent should call MessageEventBus', async () => { const additionalData = await getBase('user-id'); @@ -35,4 +94,18 @@ describe('WorkflowExecuteAdditionalData', () => { expect(eventService.emit).toHaveBeenCalledTimes(1); expect(eventService.emit).toHaveBeenCalledWith(eventName, payload); }); + + it('`executeWorkflow` should set subworkflow execution as running', async () => { + const executionId = '123'; + workflowRepository.get.mockResolvedValue(mock({ id: executionId, nodes: [] })); + activeExecutions.add.mockResolvedValue(executionId); + + await executeWorkflow( + mock(), + mock(), + mock({ loadedWorkflowData: undefined }), + ); + + expect(executionRepository.setRunning).toHaveBeenCalledWith(executionId); + }); }); diff --git a/packages/cli/src/abstract-server.ts b/packages/cli/src/abstract-server.ts index 3c60a3e48d..95ecaccdc5 100644 --- a/packages/cli/src/abstract-server.ts +++ b/packages/cli/src/abstract-server.ts @@ -13,7 +13,7 @@ import { N8N_VERSION, TEMPLATES_DIR, inDevelopment, inTest } from '@/constants'; import * as Db from '@/db'; import { OnShutdown } from '@/decorators/on-shutdown'; import { ExternalHooks } from '@/external-hooks'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { rawBodyReader, bodyParser, corsMiddleware } from '@/middlewares'; import { send, sendErrorResponse } from '@/response-helper'; import { WaitingForms } from '@/waiting-forms'; diff --git a/packages/cli/src/active-executions.ts b/packages/cli/src/active-executions.ts index 8f7661925b..f5835ca164 100644 --- a/packages/cli/src/active-executions.ts +++ b/packages/cli/src/active-executions.ts @@ -13,12 +13,12 @@ import { Service } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { ExecutionNotFoundError } from '@/errors/execution-not-found-error'; import type { - ExecutionPayload, + CreateExecutionPayload, IExecutingWorkflowData, IExecutionDb, IExecutionsCurrentSummary, } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isWorkflowIdValid } from '@/utils'; import { ConcurrencyControlService } from './concurrency/concurrency-control.service'; @@ -52,11 +52,10 @@ export class ActiveExecutions { if (executionId === undefined) { // Is a new execution so save in DB - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: CreateExecutionPayload = { data: executionData.executionData!, mode, finished: false, - startedAt: new Date(), workflowData: executionData.workflowData, status: executionStatus, workflowId: executionData.workflowData.id, @@ -74,7 +73,10 @@ export class ActiveExecutions { executionId = await this.executionRepository.createNewExecution(fullExecutionData); assert(executionId); - await this.concurrencyControl.throttle({ mode, executionId }); + if (config.getEnv('executions.mode') === 'regular') { + await this.concurrencyControl.throttle({ mode, executionId }); + await this.executionRepository.setRunning(executionId); + } executionStatus = 'running'; } else { // Is an existing execution we want to finish so update in DB @@ -86,6 +88,7 @@ export class ActiveExecutions { data: executionData.executionData!, waitTill: null, status: executionStatus, + // this is resuming, so keep `startedAt` as it was }; await this.executionRepository.updateExistingExecution(executionId, execution); diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index 44172ef564..988a238887 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -37,6 +37,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { OnShutdown } from '@/decorators/on-shutdown'; import { ExternalHooks } from '@/external-hooks'; import type { IWorkflowDb } from '@/interfaces'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { ActiveWorkflowsService } from '@/services/active-workflows.service'; import { OrchestrationService } from '@/services/orchestration.service'; @@ -47,7 +48,6 @@ import { WorkflowExecutionService } from '@/workflows/workflow-execution.service import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service'; import { ExecutionService } from './executions/execution.service'; -import { Logger } from './logger'; interface QueuedActivation { activationMode: WorkflowActivateMode; diff --git a/packages/cli/src/auth/auth.service.ts b/packages/cli/src/auth/auth.service.ts index 481f25c897..989396df84 100644 --- a/packages/cli/src/auth/auth.service.ts +++ b/packages/cli/src/auth/auth.service.ts @@ -12,7 +12,7 @@ import { UserRepository } from '@/databases/repositories/user.repository'; import { AuthError } from '@/errors/response-errors/auth.error'; import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { AuthenticatedRequest } from '@/requests'; import { JwtService } from '@/services/jwt.service'; import { UrlService } from '@/services/url.service'; diff --git a/packages/cli/src/commands/base-command.ts b/packages/cli/src/commands/base-command.ts index 857ca231d4..7403dd337a 100644 --- a/packages/cli/src/commands/base-command.ts +++ b/packages/cli/src/commands/base-command.ts @@ -13,13 +13,13 @@ import { generateHostInstanceId } from '@/databases/utils/generators'; import * as Db from '@/db'; import { initErrorHandling } from '@/error-reporting'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { TelemetryEventRelay } from '@/events/telemetry-event-relay'; +import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay'; import { initExpressionEvaluator } from '@/expression-evaluator'; import { ExternalHooks } from '@/external-hooks'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { PostHogClient } from '@/posthog'; import { ShutdownService } from '@/shutdown/shutdown.service'; diff --git a/packages/cli/src/commands/db/__tests__/revert.test.ts b/packages/cli/src/commands/db/__tests__/revert.test.ts index 9c7a37b533..463c9d617c 100644 --- a/packages/cli/src/commands/db/__tests__/revert.test.ts +++ b/packages/cli/src/commands/db/__tests__/revert.test.ts @@ -4,7 +4,7 @@ import { mock } from 'jest-mock-extended'; import { main } from '@/commands/db/revert'; import type { IrreversibleMigration, ReversibleMigration } from '@/databases/types'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { mockInstance } from '@test/mocking'; const logger = mockInstance(Logger); diff --git a/packages/cli/src/commands/db/revert.ts b/packages/cli/src/commands/db/revert.ts index dc3776a6af..7823506ee0 100644 --- a/packages/cli/src/commands/db/revert.ts +++ b/packages/cli/src/commands/db/revert.ts @@ -8,7 +8,7 @@ import { Container } from 'typedi'; import { getConnectionOptions } from '@/databases/config'; import type { Migration } from '@/databases/types'; import { wrapMigration } from '@/databases/utils/migration-helpers'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; // This function is extracted to make it easier to unit test it. // Mocking turned into a mess due to this command using typeorm and the db diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index 61212049d7..36e690c37e 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -21,6 +21,8 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus' import { EventService } from '@/events/event.service'; import { ExecutionService } from '@/executions/execution.service'; import { License } from '@/license'; +import { SingleMainTaskManager } from '@/runners/task-managers/single-main-task-manager'; +import { TaskManager } from '@/runners/task-managers/task-manager'; import { Publisher } from '@/scaling/pubsub/publisher.service'; import { Server } from '@/server'; import { OrchestrationHandlerMainService } from '@/services/orchestration/main/orchestration.handler.main.service'; @@ -220,6 +222,17 @@ export class Start extends BaseCommand { if (!this.globalConfig.endpoints.disableUi) { await this.generateStaticAssets(); } + + if (!this.globalConfig.taskRunners.disabled) { + Container.set(TaskManager, new SingleMainTaskManager()); + const { TaskRunnerServer } = await import('@/runners/task-runner-server'); + const taskRunnerServer = Container.get(TaskRunnerServer); + await taskRunnerServer.start(); + + const { TaskRunnerProcess } = await import('@/runners/task-runner-process'); + const runnerProcess = Container.get(TaskRunnerProcess); + await runnerProcess.start(); + } } async initOrchestration() { @@ -365,10 +378,9 @@ export class Start extends BaseCommand { if (executions.length === 0) return; - this.logger.debug( - '[Startup] Found enqueued executions to run', - executions.map((e) => e.id), - ); + this.logger.debug('[Startup] Found enqueued executions to run', { + executionIds: executions.map((e) => e.id), + }); const ownershipService = Container.get(OwnershipService); const workflowRunner = Container.get(WorkflowRunner); diff --git a/packages/cli/src/commands/webhook.ts b/packages/cli/src/commands/webhook.ts index e8a47e10e0..a0f9e10f80 100644 --- a/packages/cli/src/commands/webhook.ts +++ b/packages/cli/src/commands/webhook.ts @@ -4,7 +4,8 @@ import { Container } from 'typedi'; import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; -import { OrchestrationHandlerWebhookService } from '@/services/orchestration/webhook/orchestration.handler.webhook.service'; +import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler'; +import { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { OrchestrationWebhookService } from '@/services/orchestration/webhook/orchestration.webhook.service'; import { WebhookServer } from '@/webhooks/webhook-server'; @@ -110,6 +111,11 @@ export class Webhook extends BaseCommand { async initOrchestration() { await Container.get(OrchestrationWebhookService).init(); - await Container.get(OrchestrationHandlerWebhookService).init(); + + const subscriber = Container.get(Subscriber); + await subscriber.subscribe('n8n.commands'); + subscriber.setCommandMessageHandler(); + + Container.get(PubSubHandler).init(); } } diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index f5f6b2b79b..8c1aabf74a 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -6,7 +6,7 @@ import config from '@/config'; import { N8N_VERSION, inTest } from '@/constants'; import { EventMessageGeneric } from '@/eventbus/event-message-classes/event-message-generic'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay'; +import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import { JobProcessor } from '@/scaling/job-processor'; import { Publisher } from '@/scaling/pubsub/publisher.service'; import type { ScalingService } from '@/scaling/scaling.service'; diff --git a/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts b/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts index 75239f5e59..6774708099 100644 --- a/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts +++ b/packages/cli/src/concurrency/__tests__/concurrency-control.service.test.ts @@ -11,7 +11,7 @@ import type { ExecutionRepository } from '@/databases/repositories/execution.rep import { InvalidConcurrencyLimitError } from '@/errors/invalid-concurrency-limit.error'; import type { EventService } from '@/events/event.service'; import type { IExecutingWorkflowData } from '@/interfaces'; -import type { Logger } from '@/logger'; +import type { Logger } from '@/logging/logger.service'; import type { Telemetry } from '@/telemetry'; import { ConcurrencyQueue } from '../concurrency-queue'; diff --git a/packages/cli/src/concurrency/concurrency-control.service.ts b/packages/cli/src/concurrency/concurrency-control.service.ts index 45ef2e1206..1665279352 100644 --- a/packages/cli/src/concurrency/concurrency-control.service.ts +++ b/packages/cli/src/concurrency/concurrency-control.service.ts @@ -7,7 +7,8 @@ import { InvalidConcurrencyLimitError } from '@/errors/invalid-concurrency-limit import { UnknownExecutionModeError } from '@/errors/unknown-execution-mode.error'; import { EventService } from '@/events/event.service'; import type { IExecutingWorkflowData } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; +import type { LogMetadata } from '@/logging/types'; import { Telemetry } from '@/telemetry'; import { ConcurrencyQueue } from './concurrency-queue'; @@ -70,7 +71,6 @@ export class ConcurrencyControlService { this.productionQueue.on('execution-released', async (executionId) => { this.log('Execution released', { executionId }); - await this.executionRepository.resetStartedAt(executionId); }); } @@ -171,8 +171,8 @@ export class ConcurrencyControlService { throw new UnknownExecutionModeError(mode); } - private log(message: string, meta?: object) { - this.logger.debug(['[Concurrency Control]', message].join(' '), meta); + private log(message: string, metadata?: LogMetadata) { + this.logger.debug(['[Concurrency Control]', message].join(' '), metadata); } private shouldReport(capacity: number) { diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index e811fe8e10..047df9341e 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -1,7 +1,6 @@ import { GlobalConfig } from '@n8n/config'; import convict from 'convict'; import { InstanceSettings } from 'n8n-core'; -import { LOG_LEVELS } from 'n8n-workflow'; import path from 'path'; import { Container } from 'typedi'; @@ -296,41 +295,6 @@ export const schema = { env: 'EXTERNAL_HOOK_FILES', }, - logs: { - level: { - doc: 'Log output level', - format: LOG_LEVELS, - default: 'info', - env: 'N8N_LOG_LEVEL', - }, - output: { - doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")', - format: String, - default: 'console', - env: 'N8N_LOG_OUTPUT', - }, - file: { - fileCountMax: { - doc: 'Maximum number of files to keep.', - format: Number, - default: 100, - env: 'N8N_LOG_FILE_COUNT_MAX', - }, - fileSizeMax: { - doc: 'Maximum size for each log file in MB.', - format: Number, - default: 16, - env: 'N8N_LOG_FILE_SIZE_MAX', - }, - location: { - doc: 'Log file location; only used if log output is set to file.', - format: String, - default: path.join(Container.get(InstanceSettings).n8nFolder, 'logs/n8n.log'), - env: 'N8N_LOG_FILE_LOCATION', - }, - }, - }, - push: { backend: { format: ['sse', 'websocket'] as const, diff --git a/packages/cli/src/constants.ts b/packages/cli/src/constants.ts index 447b32e42f..5d458ca376 100644 --- a/packages/cli/src/constants.ts +++ b/packages/cli/src/constants.ts @@ -168,6 +168,8 @@ export const ARTIFICIAL_TASK_DATA = { ], }; +/** Lowest priority, meaning shut down happens after other groups */ export const LOWEST_SHUTDOWN_PRIORITY = 0; export const DEFAULT_SHUTDOWN_PRIORITY = 100; +/** Highest priority, meaning shut down happens before all other groups */ export const HIGHEST_SHUTDOWN_PRIORITY = 200; diff --git a/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts b/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts new file mode 100644 index 0000000000..81025fb2ca --- /dev/null +++ b/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts @@ -0,0 +1,79 @@ +import { mock } from 'jest-mock-extended'; +import { randomString } from 'n8n-workflow'; +import { Container } from 'typedi'; + +import type { ApiKey } from '@/databases/entities/api-key'; +import type { User } from '@/databases/entities/user'; +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; +import type { ApiKeysRequest, AuthenticatedRequest } from '@/requests'; +import { API_KEY_PREFIX } from '@/services/public-api-key.service'; +import { mockInstance } from '@test/mocking'; + +import { ApiKeysController } from '../api-keys.controller'; + +describe('ApiKeysController', () => { + const apiKeysRepository = mockInstance(ApiKeyRepository); + const controller = Container.get(ApiKeysController); + + let req: AuthenticatedRequest; + beforeAll(() => { + req = mock({ user: mock({ id: '123' }) }); + }); + + describe('createAPIKey', () => { + it('should create and save an API key', async () => { + const apiKeyData = { + id: '123', + userId: '123', + label: 'My API Key', + apiKey: `${API_KEY_PREFIX}${randomString(42)}`, + createdAt: new Date(), + } as ApiKey; + + apiKeysRepository.upsert.mockImplementation(); + + apiKeysRepository.findOneByOrFail.mockResolvedValue(apiKeyData); + + const newApiKey = await controller.createAPIKey(req); + + expect(apiKeysRepository.upsert).toHaveBeenCalled(); + expect(apiKeyData).toEqual(newApiKey); + }); + }); + + describe('getAPIKeys', () => { + it('should return the users api keys redacted', async () => { + const apiKeyData = { + id: '123', + userId: '123', + label: 'My API Key', + apiKey: `${API_KEY_PREFIX}${randomString(42)}`, + createdAt: new Date(), + } as ApiKey; + + apiKeysRepository.findBy.mockResolvedValue([apiKeyData]); + + const apiKeys = await controller.getAPIKeys(req); + expect(apiKeys[0].apiKey).not.toEqual(apiKeyData.apiKey); + expect(apiKeysRepository.findBy).toHaveBeenCalledWith({ userId: req.user.id }); + }); + }); + + describe('deleteAPIKey', () => { + it('should delete the API key', async () => { + const user = mock({ + id: '123', + password: 'password', + authIdentities: [], + role: 'global:member', + mfaEnabled: false, + }); + const req = mock({ user, params: { id: user.id } }); + await controller.deleteAPIKey(req); + expect(apiKeysRepository.delete).toHaveBeenCalledWith({ + userId: req.user.id, + id: req.params.id, + }); + }); + }); +}); diff --git a/packages/cli/src/controllers/__tests__/me.controller.test.ts b/packages/cli/src/controllers/__tests__/me.controller.test.ts index 7f5f861b0e..37c391a2dc 100644 --- a/packages/cli/src/controllers/__tests__/me.controller.test.ts +++ b/packages/cli/src/controllers/__tests__/me.controller.test.ts @@ -2,14 +2,11 @@ import { UserUpdateRequestDto } from '@n8n/api-types'; import type { Response } from 'express'; import { mock, anyObject } from 'jest-mock-extended'; import jwt from 'jsonwebtoken'; -import { randomString } from 'n8n-workflow'; import { Container } from 'typedi'; import { AUTH_COOKIE_NAME } from '@/constants'; import { MeController } from '@/controllers/me.controller'; -import type { ApiKey } from '@/databases/entities/api-key'; import type { User } from '@/databases/entities/user'; -import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; import { AuthUserRepository } from '@/databases/repositories/auth-user.repository'; import { InvalidAuthTokenRepository } from '@/databases/repositories/invalid-auth-token.repository'; import { UserRepository } from '@/databases/repositories/user.repository'; @@ -21,7 +18,6 @@ import type { PublicUser } from '@/interfaces'; import { License } from '@/license'; import { MfaService } from '@/mfa/mfa.service'; import type { AuthenticatedRequest, MeRequest } from '@/requests'; -import { API_KEY_PREFIX } from '@/services/public-api-key.service'; import { UserService } from '@/services/user.service'; import { mockInstance } from '@test/mocking'; import { badPasswords } from '@test/test-data'; @@ -34,7 +30,6 @@ describe('MeController', () => { const userService = mockInstance(UserService); const userRepository = mockInstance(UserRepository); const mockMfaService = mockInstance(MfaService); - const apiKeysRepository = mockInstance(ApiKeyRepository); mockInstance(AuthUserRepository); mockInstance(InvalidAuthTokenRepository); mockInstance(License).isWithinUsersLimit.mockReturnValue(true); @@ -413,68 +408,4 @@ describe('MeController', () => { await expect(controller.storeSurveyAnswers(req)).rejects.toThrowError(BadRequestError); }); }); - - describe('API Key methods', () => { - let req: AuthenticatedRequest; - beforeAll(() => { - req = mock({ user: mock({ id: '123' }) }); - }); - - describe('createAPIKey', () => { - it('should create and save an API key', async () => { - const apiKeyData = { - id: '123', - userId: '123', - label: 'My API Key', - apiKey: `${API_KEY_PREFIX}${randomString(42)}`, - createdAt: new Date(), - } as ApiKey; - - apiKeysRepository.upsert.mockImplementation(); - - apiKeysRepository.findOneByOrFail.mockResolvedValue(apiKeyData); - - const newApiKey = await controller.createAPIKey(req); - - expect(apiKeysRepository.upsert).toHaveBeenCalled(); - expect(apiKeyData).toEqual(newApiKey); - }); - }); - - describe('getAPIKeys', () => { - it('should return the users api keys redacted', async () => { - const apiKeyData = { - id: '123', - userId: '123', - label: 'My API Key', - apiKey: `${API_KEY_PREFIX}${randomString(42)}`, - createdAt: new Date(), - } as ApiKey; - - apiKeysRepository.findBy.mockResolvedValue([apiKeyData]); - - const apiKeys = await controller.getAPIKeys(req); - expect(apiKeys[0].apiKey).not.toEqual(apiKeyData.apiKey); - expect(apiKeysRepository.findBy).toHaveBeenCalledWith({ userId: req.user.id }); - }); - }); - - describe('deleteAPIKey', () => { - it('should delete the API key', async () => { - const user = mock({ - id: '123', - password: 'password', - authIdentities: [], - role: 'global:member', - mfaEnabled: false, - }); - const req = mock({ user, params: { id: user.id } }); - await controller.deleteAPIKey(req); - expect(apiKeysRepository.delete).toHaveBeenCalledWith({ - userId: req.user.id, - id: req.params.id, - }); - }); - }); - }); }); diff --git a/packages/cli/src/controllers/api-keys.controller.ts b/packages/cli/src/controllers/api-keys.controller.ts new file mode 100644 index 0000000000..db53a00449 --- /dev/null +++ b/packages/cli/src/controllers/api-keys.controller.ts @@ -0,0 +1,56 @@ +import { type RequestHandler } from 'express'; + +import { Delete, Get, Post, RestController } from '@/decorators'; +import { EventService } from '@/events/event.service'; +import { isApiEnabled } from '@/public-api'; +import { ApiKeysRequest, AuthenticatedRequest } from '@/requests'; +import { PublicApiKeyService } from '@/services/public-api-key.service'; + +export const isApiEnabledMiddleware: RequestHandler = (_, res, next) => { + if (isApiEnabled()) { + next(); + } else { + res.status(404).end(); + } +}; + +@RestController('/api-keys') +export class ApiKeysController { + constructor( + private readonly eventService: EventService, + private readonly publicApiKeyService: PublicApiKeyService, + ) {} + + /** + * Create an API Key + */ + @Post('/', { middlewares: [isApiEnabledMiddleware] }) + async createAPIKey(req: AuthenticatedRequest) { + const newApiKey = await this.publicApiKeyService.createPublicApiKeyForUser(req.user); + + this.eventService.emit('public-api-key-created', { user: req.user, publicApi: false }); + + return newApiKey; + } + + /** + * Get API keys + */ + @Get('/', { middlewares: [isApiEnabledMiddleware] }) + async getAPIKeys(req: AuthenticatedRequest) { + const apiKeys = await this.publicApiKeyService.getRedactedApiKeysForUser(req.user); + return apiKeys; + } + + /** + * Delete an API Key + */ + @Delete('/:id', { middlewares: [isApiEnabledMiddleware] }) + async deleteAPIKey(req: ApiKeysRequest.DeleteAPIKey) { + await this.publicApiKeyService.deleteApiKeyForUser(req.user, req.params.id); + + this.eventService.emit('public-api-key-deleted', { user: req.user, publicApi: false }); + + return { success: true }; + } +} diff --git a/packages/cli/src/controllers/auth.controller.ts b/packages/cli/src/controllers/auth.controller.ts index 25f069ad20..c2ee1c92fb 100644 --- a/packages/cli/src/controllers/auth.controller.ts +++ b/packages/cli/src/controllers/auth.controller.ts @@ -14,7 +14,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { EventService } from '@/events/event.service'; import type { PublicUser } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MfaService } from '@/mfa/mfa.service'; import { PostHogClient } from '@/posthog'; import { AuthenticatedRequest, LoginRequest, UserRequest } from '@/requests'; diff --git a/packages/cli/src/controllers/e2e.controller.ts b/packages/cli/src/controllers/e2e.controller.ts index 5137d5b4af..06c4f68c7e 100644 --- a/packages/cli/src/controllers/e2e.controller.ts +++ b/packages/cli/src/controllers/e2e.controller.ts @@ -14,7 +14,7 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus' import type { BooleanLicenseFeature, NumericLicenseFeature } from '@/interfaces'; import type { FeatureReturnType } from '@/license'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MfaService } from '@/mfa/mfa.service'; import { Push } from '@/push'; import type { UserSetupPayload } from '@/requests'; diff --git a/packages/cli/src/controllers/invitation.controller.ts b/packages/cli/src/controllers/invitation.controller.ts index 411ab7a03f..cbd2afb9f4 100644 --- a/packages/cli/src/controllers/invitation.controller.ts +++ b/packages/cli/src/controllers/invitation.controller.ts @@ -12,7 +12,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PostHogClient } from '@/posthog'; import { UserRequest } from '@/requests'; import { PasswordUtility } from '@/services/password.utility'; diff --git a/packages/cli/src/controllers/me.controller.ts b/packages/cli/src/controllers/me.controller.ts index aac1b48833..6cbbda3622 100644 --- a/packages/cli/src/controllers/me.controller.ts +++ b/packages/cli/src/controllers/me.controller.ts @@ -4,37 +4,26 @@ import { UserUpdateRequestDto, } from '@n8n/api-types'; import { plainToInstance } from 'class-transformer'; -import { type RequestHandler, Response } from 'express'; +import { Response } from 'express'; import { AuthService } from '@/auth/auth.service'; import type { User } from '@/databases/entities/user'; import { UserRepository } from '@/databases/repositories/user.repository'; -import { Body, Delete, Get, Patch, Post, RestController } from '@/decorators'; +import { Body, Patch, Post, RestController } from '@/decorators'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { InvalidMfaCodeError } from '@/errors/response-errors/invalid-mfa-code.error'; import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import { validateEntity } from '@/generic-helpers'; import type { PublicUser } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MfaService } from '@/mfa/mfa.service'; -import { isApiEnabled } from '@/public-api'; import { AuthenticatedRequest, MeRequest } from '@/requests'; import { PasswordUtility } from '@/services/password.utility'; -import { PublicApiKeyService } from '@/services/public-api-key.service'; import { UserService } from '@/services/user.service'; import { isSamlLicensedAndEnabled } from '@/sso/saml/saml-helpers'; import { PersonalizationSurveyAnswersV4 } from './survey-answers.dto'; - -export const isApiEnabledMiddleware: RequestHandler = (_, res, next) => { - if (isApiEnabled()) { - next(); - } else { - res.status(404).end(); - } -}; - @RestController('/me') export class MeController { constructor( @@ -46,7 +35,6 @@ export class MeController { private readonly userRepository: UserRepository, private readonly eventService: EventService, private readonly mfaService: MfaService, - private readonly publicApiKeyService: PublicApiKeyService, ) {} /** @@ -217,39 +205,6 @@ export class MeController { return { success: true }; } - /** - * Create an API Key - */ - @Post('/api-keys', { middlewares: [isApiEnabledMiddleware] }) - async createAPIKey(req: AuthenticatedRequest) { - const newApiKey = await this.publicApiKeyService.createPublicApiKeyForUser(req.user); - - this.eventService.emit('public-api-key-created', { user: req.user, publicApi: false }); - - return newApiKey; - } - - /** - * Get API keys - */ - @Get('/api-keys', { middlewares: [isApiEnabledMiddleware] }) - async getAPIKeys(req: AuthenticatedRequest) { - const apiKeys = await this.publicApiKeyService.getRedactedApiKeysForUser(req.user); - return apiKeys; - } - - /** - * Delete an API Key - */ - @Delete('/api-keys/:id', { middlewares: [isApiEnabledMiddleware] }) - async deleteAPIKey(req: MeRequest.DeleteAPIKey) { - await this.publicApiKeyService.deleteApiKeyForUser(req.user, req.params.id); - - this.eventService.emit('public-api-key-deleted', { user: req.user, publicApi: false }); - - return { success: true }; - } - /** * Update the logged-in user's settings. */ diff --git a/packages/cli/src/controllers/oauth/__tests__/oauth1-credential.controller.test.ts b/packages/cli/src/controllers/oauth/__tests__/oauth1-credential.controller.test.ts index b2c0d13f21..68a86269d3 100644 --- a/packages/cli/src/controllers/oauth/__tests__/oauth1-credential.controller.test.ts +++ b/packages/cli/src/controllers/oauth/__tests__/oauth1-credential.controller.test.ts @@ -15,7 +15,7 @@ import { VariablesService } from '@/environments/variables/variables.service.ee' import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { ExternalHooks } from '@/external-hooks'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { OAuthRequest } from '@/requests'; import { SecretsHelper } from '@/secrets-helpers'; import { mockInstance } from '@test/mocking'; diff --git a/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts b/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts index 139ab9a983..9fc98d5557 100644 --- a/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts +++ b/packages/cli/src/controllers/oauth/__tests__/oauth2-credential.controller.test.ts @@ -15,7 +15,7 @@ import { VariablesService } from '@/environments/variables/variables.service.ee' import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { ExternalHooks } from '@/external-hooks'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { OAuthRequest } from '@/requests'; import { SecretsHelper } from '@/secrets-helpers'; import { mockInstance } from '@test/mocking'; diff --git a/packages/cli/src/controllers/oauth/abstract-oauth.controller.ts b/packages/cli/src/controllers/oauth/abstract-oauth.controller.ts index 6750c7c2a3..6e162af988 100644 --- a/packages/cli/src/controllers/oauth/abstract-oauth.controller.ts +++ b/packages/cli/src/controllers/oauth/abstract-oauth.controller.ts @@ -15,7 +15,7 @@ import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { ExternalHooks } from '@/external-hooks'; import type { ICredentialsDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { OAuthRequest } from '@/requests'; import { UrlService } from '@/services/url.service'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; diff --git a/packages/cli/src/controllers/owner.controller.ts b/packages/cli/src/controllers/owner.controller.ts index 76a0191359..47d50ad3f0 100644 --- a/packages/cli/src/controllers/owner.controller.ts +++ b/packages/cli/src/controllers/owner.controller.ts @@ -9,7 +9,7 @@ import { GlobalScope, Post, RestController } from '@/decorators'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { EventService } from '@/events/event.service'; import { validateEntity } from '@/generic-helpers'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PostHogClient } from '@/posthog'; import { OwnerRequest } from '@/requests'; import { PasswordUtility } from '@/services/password.utility'; diff --git a/packages/cli/src/controllers/password-reset.controller.ts b/packages/cli/src/controllers/password-reset.controller.ts index ed566cf129..88155e420a 100644 --- a/packages/cli/src/controllers/password-reset.controller.ts +++ b/packages/cli/src/controllers/password-reset.controller.ts @@ -13,7 +13,7 @@ import { UnprocessableRequestError } from '@/errors/response-errors/unprocessabl import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MfaService } from '@/mfa/mfa.service'; import { PasswordResetRequest } from '@/requests'; import { PasswordUtility } from '@/services/password.utility'; diff --git a/packages/cli/src/controllers/users.controller.ts b/packages/cli/src/controllers/users.controller.ts index c00fe48ad8..8e19be894d 100644 --- a/packages/cli/src/controllers/users.controller.ts +++ b/packages/cli/src/controllers/users.controller.ts @@ -18,7 +18,7 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import type { PublicUser } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { listQueryMiddleware } from '@/middlewares'; import { AuthenticatedRequest, ListQuery, UserRequest } from '@/requests'; import { ProjectService } from '@/services/project.service'; diff --git a/packages/cli/src/controllers/workflow-statistics.controller.ts b/packages/cli/src/controllers/workflow-statistics.controller.ts index 1268643b14..58c99727db 100644 --- a/packages/cli/src/controllers/workflow-statistics.controller.ts +++ b/packages/cli/src/controllers/workflow-statistics.controller.ts @@ -7,7 +7,7 @@ import { WorkflowStatisticsRepository } from '@/databases/repositories/workflow- import { Get, Middleware, RestController } from '@/decorators'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import type { IWorkflowStatisticsDataLoaded } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { StatisticsRequest } from './workflow-statistics.types'; diff --git a/packages/cli/src/crash-journal.ts b/packages/cli/src/crash-journal.ts index 184702c446..577a2f34fe 100644 --- a/packages/cli/src/crash-journal.ts +++ b/packages/cli/src/crash-journal.ts @@ -6,7 +6,7 @@ import { join, dirname } from 'path'; import { Container } from 'typedi'; import { inProduction } from '@/constants'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; export const touchFile = async (filePath: string): Promise => { await mkdir(dirname(filePath), { recursive: true }); diff --git a/packages/cli/src/credentials-overwrites.ts b/packages/cli/src/credentials-overwrites.ts index ec14bf7ecc..ed1b492dc6 100644 --- a/packages/cli/src/credentials-overwrites.ts +++ b/packages/cli/src/credentials-overwrites.ts @@ -5,7 +5,7 @@ import { Service } from 'typedi'; import { CredentialTypes } from '@/credential-types'; import type { ICredentialsOverwrite } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; @Service() export class CredentialsOverwrites { diff --git a/packages/cli/src/credentials/credentials.controller.ts b/packages/cli/src/credentials/credentials.controller.ts index 41d9d163ba..76db501cf7 100644 --- a/packages/cli/src/credentials/credentials.controller.ts +++ b/packages/cli/src/credentials/credentials.controller.ts @@ -23,7 +23,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { EventService } from '@/events/event.service'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { listQueryMiddleware } from '@/middlewares'; import { CredentialRequest } from '@/requests'; import { NamingService } from '@/services/naming.service'; diff --git a/packages/cli/src/credentials/credentials.service.ee.ts b/packages/cli/src/credentials/credentials.service.ee.ts index 116137374a..aad78fe7b7 100644 --- a/packages/cli/src/credentials/credentials.service.ee.ts +++ b/packages/cli/src/credentials/credentials.service.ee.ts @@ -157,14 +157,6 @@ export class EnterpriseCredentialsService { "You can't transfer a credential into the project that's already owning it.", ); } - if (sourceProject.type !== 'team' && sourceProject.type !== 'personal') { - throw new TransferCredentialError( - 'You can only transfer credentials out of personal or team projects.', - ); - } - if (destinationProject.type !== 'team') { - throw new TransferCredentialError('You can only transfer credentials into team projects.'); - } await this.sharedCredentialsRepository.manager.transaction(async (trx) => { // 6. transfer the credential diff --git a/packages/cli/src/credentials/credentials.service.ts b/packages/cli/src/credentials/credentials.service.ts index dc5ab4e6c7..f9bbf89e57 100644 --- a/packages/cli/src/credentials/credentials.service.ts +++ b/packages/cli/src/credentials/credentials.service.ts @@ -33,7 +33,7 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { ExternalHooks } from '@/external-hooks'; import { validateEntity } from '@/generic-helpers'; import type { ICredentialsDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { userHasScopes } from '@/permissions/check-access'; import type { CredentialRequest, ListQuery } from '@/requests'; import { CredentialsTester } from '@/services/credentials-tester.service'; diff --git a/packages/cli/src/databases/entities/execution-entity.ts b/packages/cli/src/databases/entities/execution-entity.ts index f481bb97f4..7b63b63eaf 100644 --- a/packages/cli/src/databases/entities/execution-entity.ts +++ b/packages/cli/src/databases/entities/execution-entity.ts @@ -47,7 +47,14 @@ export class ExecutionEntity { status: ExecutionStatus; @Column(datetimeColumnType) - startedAt: Date; + createdAt: Date; + + /** + * Time when the processing of the execution actually started. This column + * is `null` when an execution is enqueued but has not started yet. + */ + @Column({ type: datetimeColumnType, nullable: true }) + startedAt: Date | null; @Index() @Column({ type: datetimeColumnType, nullable: true }) diff --git a/packages/cli/src/databases/migrations/common/1727427440136-SeparateExecutionCreationFromStart.ts b/packages/cli/src/databases/migrations/common/1727427440136-SeparateExecutionCreationFromStart.ts new file mode 100644 index 0000000000..a44450fa2f --- /dev/null +++ b/packages/cli/src/databases/migrations/common/1727427440136-SeparateExecutionCreationFromStart.ts @@ -0,0 +1,27 @@ +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; + +export class SeparateExecutionCreationFromStart1727427440136 implements ReversibleMigration { + async up({ + schemaBuilder: { addColumns, column, dropNotNull }, + runQuery, + escape, + }: MigrationContext) { + await addColumns('execution_entity', [ + column('createdAt').notNull.timestamp().default('NOW()'), + ]); + + await dropNotNull('execution_entity', 'startedAt'); + + const executionEntity = escape.tableName('execution_entity'); + const createdAt = escape.columnName('createdAt'); + const startedAt = escape.columnName('startedAt'); + + // inaccurate for pre-migration rows but prevents `createdAt` from being nullable + await runQuery(`UPDATE ${executionEntity} SET ${createdAt} = ${startedAt};`); + } + + async down({ schemaBuilder: { dropColumns, addNotNull } }: MigrationContext) { + await dropColumns('execution_entity', ['createdAt']); + await addNotNull('execution_entity', 'startedAt'); + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/index.ts b/packages/cli/src/databases/migrations/mysqldb/index.ts index 288f18edbe..07b910b949 100644 --- a/packages/cli/src/databases/migrations/mysqldb/index.ts +++ b/packages/cli/src/databases/migrations/mysqldb/index.ts @@ -64,6 +64,7 @@ import { CreateInvalidAuthTokenTable1723627610222 } from '../common/172362761022 import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable'; +import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; export const mysqlMigrations: Migration[] = [ InitialMigration1588157391238, @@ -130,4 +131,5 @@ export const mysqlMigrations: Migration[] = [ RefactorExecutionIndices1723796243146, CreateAnnotationTables1724753530828, AddApiKeysTable1724951148974, + SeparateExecutionCreationFromStart1727427440136, ]; diff --git a/packages/cli/src/databases/migrations/postgresdb/index.ts b/packages/cli/src/databases/migrations/postgresdb/index.ts index 077d686b7e..21b90e201d 100644 --- a/packages/cli/src/databases/migrations/postgresdb/index.ts +++ b/packages/cli/src/databases/migrations/postgresdb/index.ts @@ -64,6 +64,7 @@ import { CreateInvalidAuthTokenTable1723627610222 } from '../common/172362761022 import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable'; +import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; export const postgresMigrations: Migration[] = [ InitialMigration1587669153312, @@ -130,4 +131,5 @@ export const postgresMigrations: Migration[] = [ RefactorExecutionIndices1723796243146, CreateAnnotationTables1724753530828, AddApiKeysTable1724951148974, + SeparateExecutionCreationFromStart1727427440136, ]; diff --git a/packages/cli/src/databases/migrations/sqlite/index.ts b/packages/cli/src/databases/migrations/sqlite/index.ts index 62fda4b7d0..2828bb3f59 100644 --- a/packages/cli/src/databases/migrations/sqlite/index.ts +++ b/packages/cli/src/databases/migrations/sqlite/index.ts @@ -61,6 +61,7 @@ import { AddConstraintToExecutionMetadata1720101653148 } from '../common/1720101 import { CreateInvalidAuthTokenTable1723627610222 } from '../common/1723627610222-CreateInvalidAuthTokenTable'; import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices'; import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables'; +import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; const sqliteMigrations: Migration[] = [ InitialMigration1588102412422, @@ -124,6 +125,7 @@ const sqliteMigrations: Migration[] = [ RefactorExecutionIndices1723796243146, CreateAnnotationTables1724753530828, AddApiKeysTable1724951148974, + SeparateExecutionCreationFromStart1727427440136, ]; export { sqliteMigrations }; diff --git a/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts b/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts index ac45b71bc4..48f3119780 100644 --- a/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts +++ b/packages/cli/src/databases/repositories/__tests__/execution.repository.test.ts @@ -12,7 +12,7 @@ import { mockInstance, mockEntityManager } from '@test/mocking'; describe('ExecutionRepository', () => { const entityManager = mockEntityManager(ExecutionEntity); - const globalConfig = mockInstance(GlobalConfig); + const globalConfig = mockInstance(GlobalConfig, { logging: { outputs: ['console'] } }); const binaryDataService = mockInstance(BinaryDataService); const executionRepository = Container.get(ExecutionRepository); const mockDate = new Date('2023-12-28 12:34:56.789Z'); diff --git a/packages/cli/src/databases/repositories/execution.repository.ts b/packages/cli/src/databases/repositories/execution.repository.ts index d76d78c99a..7b26463969 100644 --- a/packages/cli/src/databases/repositories/execution.repository.ts +++ b/packages/cli/src/databases/repositories/execution.repository.ts @@ -42,12 +42,12 @@ import { ExecutionAnnotation } from '@/databases/entities/execution-annotation.e import { PostgresLiveRowsRetrievalError } from '@/errors/postgres-live-rows-retrieval.error'; import type { ExecutionSummaries } from '@/executions/execution.types'; import type { - ExecutionPayload, + CreateExecutionPayload, IExecutionBase, IExecutionFlattedDb, IExecutionResponse, } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { separate } from '@/utils'; import { ExecutionDataRepository } from './execution-data.repository'; @@ -198,7 +198,7 @@ export class ExecutionRepository extends Repository { return executions.map((execution) => { const { executionData, ...rest } = execution; return rest; - }); + }) as IExecutionFlattedDb[] | IExecutionResponse[] | IExecutionBase[]; } reportInvalidExecutions(executions: ExecutionEntity[]) { @@ -297,15 +297,15 @@ export class ExecutionRepository extends Repository { }), ...(options?.includeAnnotation && serializedAnnotation && { annotation: serializedAnnotation }), - }; + } as IExecutionFlattedDb | IExecutionResponse | IExecutionBase; } /** * Insert a new execution and its execution data using a transaction. */ - async createNewExecution(execution: ExecutionPayload): Promise { + async createNewExecution(execution: CreateExecutionPayload): Promise { const { data, workflowData, ...rest } = execution; - const { identifiers: inserted } = await this.insert(rest); + const { identifiers: inserted } = await this.insert({ ...rest, createdAt: new Date() }); const { id: executionId } = inserted[0] as { id: string }; const { connections, nodes, name, settings } = workflowData ?? {}; await this.executionDataRepository.insert({ @@ -340,20 +340,25 @@ export class ExecutionRepository extends Repository { ]); } - async updateStatus(executionId: string, status: ExecutionStatus) { - await this.update({ id: executionId }, { status }); - } + async setRunning(executionId: string) { + const startedAt = new Date(); - async resetStartedAt(executionId: string) { - await this.update({ id: executionId }, { startedAt: new Date() }); + await this.update({ id: executionId }, { status: 'running', startedAt }); + + return startedAt; } async updateExistingExecution(executionId: string, execution: Partial) { - // Se isolate startedAt because it must be set when the execution starts and should never change. - // So we prevent updating it, if it's sent (it usually is and causes problems to executions that - // are resumed after waiting for some time, as a new startedAt is set) - const { id, data, workflowId, workflowData, startedAt, customData, ...executionInformation } = - execution; + const { + id, + data, + workflowId, + workflowData, + createdAt, // must never change + startedAt, // must never change + customData, + ...executionInformation + } = execution; if (Object.keys(executionInformation).length > 0) { await this.update({ id: executionId }, executionInformation); } @@ -721,6 +726,7 @@ export class ExecutionRepository extends Repository { mode: true, retryOf: true, status: true, + createdAt: true, startedAt: true, stoppedAt: true, }; @@ -806,6 +812,7 @@ export class ExecutionRepository extends Repository { // @tech_debt: These transformations should not be needed private toSummary(execution: { id: number | string; + createdAt?: Date | string; startedAt?: Date | string; stoppedAt?: Date | string; waitTill?: Date | string | null; @@ -817,6 +824,13 @@ export class ExecutionRepository extends Repository { return date; }; + if (execution.createdAt) { + execution.createdAt = + execution.createdAt instanceof Date + ? execution.createdAt.toISOString() + : normalizeDateString(execution.createdAt); + } + if (execution.startedAt) { execution.startedAt = execution.startedAt instanceof Date diff --git a/packages/cli/src/databases/subscribers/user-subscriber.ts b/packages/cli/src/databases/subscribers/user-subscriber.ts index 4bb2cc98d3..2f9e698890 100644 --- a/packages/cli/src/databases/subscribers/user-subscriber.ts +++ b/packages/cli/src/databases/subscribers/user-subscriber.ts @@ -3,7 +3,7 @@ import { EventSubscriber } from '@n8n/typeorm'; import { ApplicationError, ErrorReporterProxy } from 'n8n-workflow'; import { Container } from 'typedi'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { Project } from '../entities/project'; import { User } from '../entities/user'; diff --git a/packages/cli/src/databases/types.ts b/packages/cli/src/databases/types.ts index 21bfdc025a..2bb1802bf2 100644 --- a/packages/cli/src/databases/types.ts +++ b/packages/cli/src/databases/types.ts @@ -1,7 +1,7 @@ import type { QueryRunner, ObjectLiteral } from '@n8n/typeorm'; import type { INodeTypes } from 'n8n-workflow'; -import type { Logger } from '@/logger'; +import type { Logger } from '@/logging/logger.service'; import type { createSchemaBuilder } from './dsl'; diff --git a/packages/cli/src/databases/utils/migration-helpers.ts b/packages/cli/src/databases/utils/migration-helpers.ts index 9d29e3fbe5..1093096f43 100644 --- a/packages/cli/src/databases/utils/migration-helpers.ts +++ b/packages/cli/src/databases/utils/migration-helpers.ts @@ -9,7 +9,7 @@ import { Container } from 'typedi'; import { inTest } from '@/constants'; import { createSchemaBuilder } from '@/databases/dsl'; import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@/databases/types'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json'; diff --git a/packages/cli/src/decorators/debounce.ts b/packages/cli/src/decorators/debounce.ts new file mode 100644 index 0000000000..6096ce522a --- /dev/null +++ b/packages/cli/src/decorators/debounce.ts @@ -0,0 +1,37 @@ +import debounce from 'lodash/debounce'; + +/** + * Debounce a class method using `lodash/debounce`. + * + * @param waitMs - Number of milliseconds to debounce method by. + * + * @example + * ``` + * class MyClass { + * @Debounce(1000) + * async myMethod() { + * // debounced + * } + * } + * ``` + */ +export const Debounce = + (waitMs: number): MethodDecorator => + ( + _: object, + methodName: string, + originalDescriptor: PropertyDescriptor, + ): TypedPropertyDescriptor => ({ + configurable: true, + + get() { + const debouncedFn = debounce(originalDescriptor.value, waitMs); + + Object.defineProperty(this, methodName, { + configurable: false, + value: debouncedFn, + }); + + return debouncedFn as T; + }, + }); diff --git a/packages/cli/src/decorators/redactable.ts b/packages/cli/src/decorators/redactable.ts index 51d02c5c3d..e2df19daa6 100644 --- a/packages/cli/src/decorators/redactable.ts +++ b/packages/cli/src/decorators/redactable.ts @@ -1,5 +1,5 @@ import { RedactableError } from '@/errors/redactable.error'; -import type { UserLike } from '@/events/relay-event-map'; +import type { UserLike } from '@/events/maps/relay.event-map'; function toRedactable(userLike: UserLike) { return { diff --git a/packages/cli/src/environments/source-control/source-control-export.service.ee.ts b/packages/cli/src/environments/source-control/source-control-export.service.ee.ts index 321534ff6c..9c495bbb8d 100644 --- a/packages/cli/src/environments/source-control/source-control-export.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-export.service.ee.ts @@ -11,7 +11,7 @@ import { SharedWorkflowRepository } from '@/databases/repositories/shared-workfl import { TagRepository } from '@/databases/repositories/tag.repository'; import { WorkflowTagMappingRepository } from '@/databases/repositories/workflow-tag-mapping.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, diff --git a/packages/cli/src/environments/source-control/source-control-git.service.ee.ts b/packages/cli/src/environments/source-control/source-control-git.service.ee.ts index 14cdb9e81a..99571cdd52 100644 --- a/packages/cli/src/environments/source-control/source-control-git.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-git.service.ee.ts @@ -14,7 +14,7 @@ import type { import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OwnershipService } from '@/services/ownership.service'; import { diff --git a/packages/cli/src/environments/source-control/source-control-helper.ee.ts b/packages/cli/src/environments/source-control/source-control-helper.ee.ts index 29393c5efe..35298d560a 100644 --- a/packages/cli/src/environments/source-control/source-control-helper.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-helper.ee.ts @@ -4,7 +4,7 @@ import path from 'path'; import { Container } from 'typedi'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { SOURCE_CONTROL_GIT_KEY_COMMENT, diff --git a/packages/cli/src/environments/source-control/source-control-import.service.ee.ts b/packages/cli/src/environments/source-control/source-control-import.service.ee.ts index fb491f39d2..b08ae27dd8 100644 --- a/packages/cli/src/environments/source-control/source-control-import.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-import.service.ee.ts @@ -23,7 +23,7 @@ import { VariablesRepository } from '@/databases/repositories/variables.reposito import { WorkflowTagMappingRepository } from '@/databases/repositories/workflow-tag-mapping.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import type { IWorkflowToImport } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isUniqueConstraintError } from '@/response-helper'; import { assertNever } from '@/utils'; diff --git a/packages/cli/src/environments/source-control/source-control-preferences.service.ee.ts b/packages/cli/src/environments/source-control/source-control-preferences.service.ee.ts index a99c5fb3e8..d3a34d784f 100644 --- a/packages/cli/src/environments/source-control/source-control-preferences.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-preferences.service.ee.ts @@ -9,7 +9,7 @@ import Container, { Service } from 'typedi'; import config from '@/config'; import { SettingsRepository } from '@/databases/repositories/settings.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { SOURCE_CONTROL_SSH_FOLDER, diff --git a/packages/cli/src/environments/source-control/source-control.service.ee.ts b/packages/cli/src/environments/source-control/source-control.service.ee.ts index 11340a0e15..32f0b39ef7 100644 --- a/packages/cli/src/environments/source-control/source-control.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control.service.ee.ts @@ -10,7 +10,7 @@ import type { Variables } from '@/databases/entities/variables'; import { TagRepository } from '@/databases/repositories/tag.repository'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { SOURCE_CONTROL_DEFAULT_EMAIL, diff --git a/packages/cli/src/errors/port-taken.error.ts b/packages/cli/src/errors/port-taken.error.ts deleted file mode 100644 index 30c63a679f..0000000000 --- a/packages/cli/src/errors/port-taken.error.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { ApplicationError } from 'n8n-workflow'; - -export class PortTakenError extends ApplicationError { - constructor(port: number) { - super( - `Port ${port} is already in use. Do you already have the n8n main process running on that port?`, - ); - } -} diff --git a/packages/cli/src/errors/response-errors/webhook-not-found.error.ts b/packages/cli/src/errors/response-errors/webhook-not-found.error.ts index 648a7a0106..8b2d8bd26c 100644 --- a/packages/cli/src/errors/response-errors/webhook-not-found.error.ts +++ b/packages/cli/src/errors/response-errors/webhook-not-found.error.ts @@ -47,10 +47,13 @@ export class WebhookNotFoundError extends NotFoundError { ) { const errorMsg = webhookNotFoundErrorMessage({ path, httpMethod, webhookMethods }); - const hintMsg = - hint === 'default' - ? "Click the 'Test workflow' button on the canvas, then try again. (In test mode, the webhook only works for one call after you click this button)" - : "The workflow must be active for a production URL to run successfully. You can activate the workflow using the toggle in the top-right of the editor. Note that unlike test URL calls, production URL calls aren't shown on the canvas (only in the executions list)"; + let hintMsg = ''; + if (!webhookMethods?.length) { + hintMsg = + hint === 'default' + ? "Click the 'Test workflow' button on the canvas, then try again. (In test mode, the webhook only works for one call after you click this button)" + : "The workflow must be active for a production URL to run successfully. You can activate the workflow using the toggle in the top-right of the editor. Note that unlike test URL calls, production URL calls aren't shown on the canvas (only in the executions list)"; + } super(errorMsg, hintMsg); } diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-from-db.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-from-db.ts index d901346d7e..4046855f30 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-from-db.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-from-db.ts @@ -2,7 +2,7 @@ import { MessageEventBusDestinationTypeNames } from 'n8n-workflow'; import { Container } from 'typedi'; import type { EventDestinations } from '@/databases/entities/event-destinations'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MessageEventBusDestinationSentry } from './message-event-bus-destination-sentry.ee'; import { MessageEventBusDestinationSyslog } from './message-event-bus-destination-syslog.ee'; diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee.ts index dc1727cb0c..83db469d79 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee.ts @@ -7,7 +7,7 @@ import { MessageEventBusDestinationTypeNames } from 'n8n-workflow'; import syslog from 'syslog-client'; import Container from 'typedi'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { MessageEventBusDestination } from './message-event-bus-destination.ee'; import { eventMessageGenericDestinationTestEvent } from '../event-message-classes/event-message-generic'; diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination.ee.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination.ee.ts index 4d6725ff2c..7b65767b04 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination.ee.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination.ee.ts @@ -5,7 +5,7 @@ import { v4 as uuid } from 'uuid'; import { EventDestinationsRepository } from '@/databases/repositories/event-destinations.repository'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { EventMessageTypes } from '../event-message-classes'; import type { AbstractEventMessage } from '../event-message-classes/abstract-event-message'; diff --git a/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts b/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts index 6b009f38e9..b9177faa07 100644 --- a/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts +++ b/packages/cli/src/eventbus/message-event-bus-writer/message-event-bus-log-writer.ts @@ -12,7 +12,7 @@ import Container from 'typedi'; import { Worker } from 'worker_threads'; import { inTest } from '@/constants'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { EventMessageTypes } from '../event-message-classes'; import { isEventMessageOptions } from '../event-message-classes/abstract-event-message'; diff --git a/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts b/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts index fec5b4845b..0f622c2317 100644 --- a/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts +++ b/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts @@ -13,7 +13,7 @@ import { EventDestinationsRepository } from '@/databases/repositories/event-dest import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { ExecutionRecoveryService } from '../../executions/execution-recovery.service'; diff --git a/packages/cli/src/events/__tests__/log-streaming-event-relay.test.ts b/packages/cli/src/events/__tests__/log-streaming-event-relay.test.ts index d768218950..4727c8ef72 100644 --- a/packages/cli/src/events/__tests__/log-streaming-event-relay.test.ts +++ b/packages/cli/src/events/__tests__/log-streaming-event-relay.test.ts @@ -3,8 +3,8 @@ import type { INode, IRun, IWorkflowBase } from 'n8n-workflow'; import type { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { EventService } from '@/events/event.service'; -import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay'; -import type { RelayEventMap } from '@/events/relay-event-map'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; +import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import type { IWorkflowDb } from '@/interfaces'; describe('LogStreamingEventRelay', () => { diff --git a/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts b/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts index 9a05835205..6eecb8e812 100644 --- a/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts +++ b/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts @@ -9,8 +9,8 @@ import type { ProjectRelationRepository } from '@/databases/repositories/project import type { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import type { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { EventService } from '@/events/event.service'; -import type { RelayEventMap } from '@/events/relay-event-map'; -import { TelemetryEventRelay } from '@/events/telemetry-event-relay'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; +import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay'; import type { IWorkflowDb } from '@/interfaces'; import type { License } from '@/license'; import type { NodeTypes } from '@/node-types'; @@ -37,6 +37,10 @@ describe('TelemetryEventRelay', () => { includeQueueMetrics: false, }, }, + logging: { + level: 'info', + outputs: ['console'], + }, }); const workflowRepository = mock(); const nodeTypes = mock(); diff --git a/packages/cli/src/events/event.service.ts b/packages/cli/src/events/event.service.ts index 10ba7666ef..b8e00ecea7 100644 --- a/packages/cli/src/events/event.service.ts +++ b/packages/cli/src/events/event.service.ts @@ -2,11 +2,12 @@ import { Service } from 'typedi'; import { TypedEmitter } from '@/typed-emitter'; -import type { AiEventMap } from './ai-event-map'; -import type { QueueMetricsEventMap } from './queue-metrics-event-map'; -import type { RelayEventMap } from './relay-event-map'; +import type { AiEventMap } from './maps/ai.event-map'; +import type { PubSubEventMap } from './maps/pub-sub.event-map'; +import type { QueueMetricsEventMap } from './maps/queue-metrics.event-map'; +import type { RelayEventMap } from './maps/relay.event-map'; -type EventMap = RelayEventMap & QueueMetricsEventMap & AiEventMap; +type EventMap = RelayEventMap & QueueMetricsEventMap & AiEventMap & PubSubEventMap; @Service() export class EventService extends TypedEmitter {} diff --git a/packages/cli/src/events/ai-event-map.ts b/packages/cli/src/events/maps/ai.event-map.ts similarity index 100% rename from packages/cli/src/events/ai-event-map.ts rename to packages/cli/src/events/maps/ai.event-map.ts diff --git a/packages/cli/src/events/maps/pub-sub.event-map.ts b/packages/cli/src/events/maps/pub-sub.event-map.ts new file mode 100644 index 0000000000..9237e79d13 --- /dev/null +++ b/packages/cli/src/events/maps/pub-sub.event-map.ts @@ -0,0 +1,104 @@ +import type { WorkerStatus, PushType } from '@n8n/api-types'; + +import type { IWorkflowDb } from '@/interfaces'; + +export type PubSubEventMap = PubSubCommandMap & PubSubWorkerResponseMap; + +export type PubSubCommandMap = { + // #region Lifecycle + + 'reload-license': never; + + 'restart-event-bus': never; + + 'reload-external-secrets-providers': never; + + // #endregion + + // #region Community packages + + 'community-package-install': { + packageName: string; + packageVersion: string; + }; + + 'community-package-update': { + packageName: string; + packageVersion: string; + }; + + 'community-package-uninstall': { + packageName: string; + }; + + // #endregion + + // #region Worker view + + 'get-worker-id': never; + + 'get-worker-status': never; + + // #endregion + + // #region Multi-main setup + + 'add-webhooks-triggers-and-pollers': { + workflowId: string; + }; + + 'remove-triggers-and-pollers': { + workflowId: string; + }; + + 'display-workflow-activation': { + workflowId: string; + }; + + 'display-workflow-deactivation': { + workflowId: string; + }; + + 'display-workflow-activation-error': { + workflowId: string; + errorMessage: string; + }; + + 'relay-execution-lifecycle-event': { + type: PushType; + args: Record; + pushRef: string; + }; + + 'clear-test-webhooks': { + webhookKey: string; + workflowEntity: IWorkflowDb; + pushRef: string; + }; + + // #endregion +}; + +export type PubSubWorkerResponseMap = { + // #region Lifecycle + + 'restart-event-bus': { + result: 'success' | 'error'; + error?: string; + }; + + 'reload-external-secrets-providers': { + result: 'success' | 'error'; + error?: string; + }; + + // #endregion + + // #region Worker view + + 'get-worker-id': never; + + 'get-worker-status': WorkerStatus; + + // #endregion +}; diff --git a/packages/cli/src/events/queue-metrics-event-map.ts b/packages/cli/src/events/maps/queue-metrics.event-map.ts similarity index 100% rename from packages/cli/src/events/queue-metrics-event-map.ts rename to packages/cli/src/events/maps/queue-metrics.event-map.ts diff --git a/packages/cli/src/events/relay-event-map.ts b/packages/cli/src/events/maps/relay.event-map.ts similarity index 99% rename from packages/cli/src/events/relay-event-map.ts rename to packages/cli/src/events/maps/relay.event-map.ts index a53a36842e..a495820283 100644 --- a/packages/cli/src/events/relay-event-map.ts +++ b/packages/cli/src/events/maps/relay.event-map.ts @@ -11,7 +11,7 @@ import type { ProjectRole } from '@/databases/entities/project-relation'; import type { GlobalRole } from '@/databases/entities/user'; import type { IWorkflowDb } from '@/interfaces'; -import type { AiEventMap } from './ai-event-map'; +import type { AiEventMap } from './ai.event-map'; export type UserLike = { id: string; diff --git a/packages/cli/src/events/event-relay.ts b/packages/cli/src/events/relays/event-relay.ts similarity index 81% rename from packages/cli/src/events/event-relay.ts rename to packages/cli/src/events/relays/event-relay.ts index 3202b69c15..13e7dc01be 100644 --- a/packages/cli/src/events/event-relay.ts +++ b/packages/cli/src/events/relays/event-relay.ts @@ -1,8 +1,7 @@ import { Service } from 'typedi'; -import type { RelayEventMap } from '@/events/relay-event-map'; - -import { EventService } from './event.service'; +import { EventService } from '@/events/event.service'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; @Service() export class EventRelay { diff --git a/packages/cli/src/events/log-streaming-event-relay.ts b/packages/cli/src/events/relays/log-streaming.event-relay.ts similarity index 98% rename from packages/cli/src/events/log-streaming-event-relay.ts rename to packages/cli/src/events/relays/log-streaming.event-relay.ts index 788e5e50c4..c65af2874c 100644 --- a/packages/cli/src/events/log-streaming-event-relay.ts +++ b/packages/cli/src/events/relays/log-streaming.event-relay.ts @@ -3,10 +3,9 @@ import { Service } from 'typedi'; import { Redactable } from '@/decorators/redactable'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { EventRelay } from '@/events/event-relay'; -import type { RelayEventMap } from '@/events/relay-event-map'; - -import { EventService } from './event.service'; +import { EventService } from '@/events/event.service'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; +import { EventRelay } from '@/events/relays/event-relay'; @Service() export class LogStreamingEventRelay extends EventRelay { diff --git a/packages/cli/src/events/telemetry-event-relay.ts b/packages/cli/src/events/relays/telemetry.event-relay.ts similarity index 99% rename from packages/cli/src/events/telemetry-event-relay.ts rename to packages/cli/src/events/relays/telemetry.event-relay.ts index 82beb17198..c813926bf1 100644 --- a/packages/cli/src/events/telemetry-event-relay.ts +++ b/packages/cli/src/events/relays/telemetry.event-relay.ts @@ -12,14 +12,14 @@ import { ProjectRelationRepository } from '@/databases/repositories/project-rela import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { EventService } from '@/events/event.service'; -import type { RelayEventMap } from '@/events/relay-event-map'; +import type { RelayEventMap } from '@/events/maps/relay.event-map'; import { determineFinalExecutionStatus } from '@/execution-lifecycle-hooks/shared/shared-hook-functions'; import type { IExecutionTrackProperties } from '@/interfaces'; import { License } from '@/license'; import { NodeTypes } from '@/node-types'; import { EventRelay } from './event-relay'; -import { Telemetry } from '../telemetry'; +import { Telemetry } from '../../telemetry'; @Service() export class TelemetryEventRelay extends EventRelay { diff --git a/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts b/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts index 80e620fb17..c8e6b3e88f 100644 --- a/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts +++ b/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts @@ -9,7 +9,7 @@ import { ExecutionRepository } from '@/databases/repositories/execution.reposito import { saveExecutionProgress } from '@/execution-lifecycle-hooks/save-execution-progress'; import * as fnModule from '@/execution-lifecycle-hooks/to-save-settings'; import type { IExecutionResponse } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { mockInstance } from '@test/mocking'; mockInstance(Logger); diff --git a/packages/cli/src/execution-lifecycle-hooks/restore-binary-data-id.ts b/packages/cli/src/execution-lifecycle-hooks/restore-binary-data-id.ts index 31b045ade8..d9a1a9a0e9 100644 --- a/packages/cli/src/execution-lifecycle-hooks/restore-binary-data-id.ts +++ b/packages/cli/src/execution-lifecycle-hooks/restore-binary-data-id.ts @@ -4,7 +4,7 @@ import type { IRun, WorkflowExecuteMode } from 'n8n-workflow'; import Container from 'typedi'; import config from '@/config'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; /** * Whenever the execution ID is not available to the binary data service at the diff --git a/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts b/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts index 212990211f..ca9899e1ec 100644 --- a/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts +++ b/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts @@ -4,7 +4,7 @@ import { Container } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { toSaveSettings } from '@/execution-lifecycle-hooks/to-save-settings'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; export async function saveExecutionProgress( workflowData: IWorkflowBase, diff --git a/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts b/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts index d6d55e63e5..9596dd35df 100644 --- a/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts +++ b/packages/cli/src/execution-lifecycle-hooks/shared/shared-hook-functions.ts @@ -3,8 +3,8 @@ import type { ExecutionStatus, IRun, IWorkflowBase } from 'n8n-workflow'; import { Container } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; -import type { ExecutionPayload, IExecutionDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import type { IExecutionDb, UpdateExecutionPayload } from '@/interfaces'; +import { Logger } from '@/logging/logger.service'; import { ExecutionMetadataService } from '@/services/execution-metadata.service'; import { isWorkflowIdValid } from '@/utils'; @@ -46,7 +46,7 @@ export function prepareExecutionDataForDbUpdate(parameters: { 'pinData', ]); - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: UpdateExecutionPayload = { data: runData.data, mode: runData.mode, finished: runData.finished ? runData.finished : false, diff --git a/packages/cli/src/executions/execution-recovery.service.ts b/packages/cli/src/executions/execution-recovery.service.ts index e1e6a1f180..33576d1368 100644 --- a/packages/cli/src/executions/execution-recovery.service.ts +++ b/packages/cli/src/executions/execution-recovery.service.ts @@ -10,7 +10,7 @@ import { NodeCrashedError } from '@/errors/node-crashed.error'; import { WorkflowCrashedError } from '@/errors/workflow-crashed.error'; import { EventService } from '@/events/event.service'; import type { IExecutionResponse } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { Push } from '@/push'; import { getWorkflowHooksMain } from '@/workflow-execute-additional-data'; // @TODO: Dependency cycle diff --git a/packages/cli/src/executions/execution.service.ts b/packages/cli/src/executions/execution.service.ts index 53023fce9a..5f4ec0c535 100644 --- a/packages/cli/src/executions/execution.service.ts +++ b/packages/cli/src/executions/execution.service.ts @@ -32,13 +32,13 @@ import { QueuedExecutionRetryError } from '@/errors/queued-execution-retry.error import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import type { - ExecutionPayload, + CreateExecutionPayload, IExecutionFlattedResponse, IExecutionResponse, IWorkflowDb, } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { WaitTracker } from '@/wait-tracker'; import { WorkflowRunner } from '@/workflow-runner'; @@ -321,11 +321,10 @@ export class ExecutionService { }, }; - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: CreateExecutionPayload = { data: executionData, mode, finished: false, - startedAt: new Date(), workflowData, workflowId: workflow.id, stoppedAt: new Date(), diff --git a/packages/cli/src/external-secrets/external-secrets-manager.ee.ts b/packages/cli/src/external-secrets/external-secrets-manager.ee.ts index 76db778d0a..e175f2969c 100644 --- a/packages/cli/src/external-secrets/external-secrets-manager.ee.ts +++ b/packages/cli/src/external-secrets/external-secrets-manager.ee.ts @@ -10,7 +10,7 @@ import type { SecretsProviderSettings, } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { EXTERNAL_SECRETS_INITIAL_BACKOFF, EXTERNAL_SECRETS_MAX_BACKOFF } from './constants'; diff --git a/packages/cli/src/external-secrets/providers/vault.ts b/packages/cli/src/external-secrets/providers/vault.ts index e325e69935..398c40745d 100644 --- a/packages/cli/src/external-secrets/providers/vault.ts +++ b/packages/cli/src/external-secrets/providers/vault.ts @@ -5,7 +5,7 @@ import { Container } from 'typedi'; import type { SecretsProviderSettings, SecretsProviderState } from '@/interfaces'; import { SecretsProvider } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { DOCS_HELP_NOTICE, EXTERNAL_SECRETS_NAME_REGEX } from '../constants'; import { preferGet } from '../external-secrets-helper.ee'; diff --git a/packages/cli/src/interfaces.ts b/packages/cli/src/interfaces.ts index 4d767862bb..5c29eea093 100644 --- a/packages/cli/src/interfaces.ts +++ b/packages/cli/src/interfaces.ts @@ -115,6 +115,7 @@ export type SaveExecutionDataType = 'all' | 'none'; export interface IExecutionBase { id: string; mode: WorkflowExecuteMode; + createdAt: Date; // set by DB startedAt: Date; stoppedAt?: Date; // empty value means execution is still running workflowId: string; @@ -131,10 +132,11 @@ export interface IExecutionDb extends IExecutionBase { workflowData: IWorkflowBase; } -/** - * Payload for creating or updating an execution. - */ -export type ExecutionPayload = Omit; +/** Payload for creating an execution. */ +export type CreateExecutionPayload = Omit; + +/** Payload for updating an execution. */ +export type UpdateExecutionPayload = Omit; export interface IExecutionResponse extends IExecutionBase { id: string; diff --git a/packages/cli/src/ldap/ldap.service.ee.ts b/packages/cli/src/ldap/ldap.service.ee.ts index 84c79c7651..b552db6974 100644 --- a/packages/cli/src/ldap/ldap.service.ee.ts +++ b/packages/cli/src/ldap/ldap.service.ee.ts @@ -14,7 +14,7 @@ import { SettingsRepository } from '@/databases/repositories/settings.repository import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { getCurrentAuthenticationMethod, isEmailCurrentAuthenticationMethod, diff --git a/packages/cli/src/license.ts b/packages/cli/src/license.ts index 81fb9eb608..0cde2bd922 100644 --- a/packages/cli/src/license.ts +++ b/packages/cli/src/license.ts @@ -6,7 +6,7 @@ import Container, { Service } from 'typedi'; import config from '@/config'; import { SettingsRepository } from '@/databases/repositories/settings.repository'; import { OnShutdown } from '@/decorators/on-shutdown'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { LicenseMetricsService } from '@/metrics/license-metrics.service'; import { OrchestrationService } from '@/services/orchestration.service'; diff --git a/packages/cli/src/license/license.service.ts b/packages/cli/src/license/license.service.ts index d92c181f8d..ee0e27bccb 100644 --- a/packages/cli/src/license/license.service.ts +++ b/packages/cli/src/license/license.service.ts @@ -6,7 +6,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { EventService } from '@/events/event.service'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; type LicenseError = Error & { errorId?: keyof typeof LicenseErrors }; diff --git a/packages/cli/src/load-nodes-and-credentials.ts b/packages/cli/src/load-nodes-and-credentials.ts index 871913afba..662558e3d6 100644 --- a/packages/cli/src/load-nodes-and-credentials.ts +++ b/packages/cli/src/load-nodes-and-credentials.ts @@ -28,7 +28,7 @@ import { CLI_DIR, inE2ETests, } from '@/constants'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; interface LoadedNodesAndCredentials { nodes: INodeTypeData; diff --git a/packages/cli/src/logger.ts b/packages/cli/src/logger.ts deleted file mode 100644 index 7a24bdbf28..0000000000 --- a/packages/cli/src/logger.ts +++ /dev/null @@ -1,117 +0,0 @@ -import callsites from 'callsites'; -import { LoggerProxy, type IDataObject, LOG_LEVELS } from 'n8n-workflow'; -import { basename } from 'path'; -import { Service } from 'typedi'; -import { inspect } from 'util'; -import winston from 'winston'; - -import config from '@/config'; - -const noOp = () => {}; - -@Service() -export class Logger { - private logger: winston.Logger; - - constructor() { - const level = config.getEnv('logs.level'); - - this.logger = winston.createLogger({ - level, - silent: level === 'silent', - }); - - // Change all methods with higher log-level to no-op - for (const levelName of LOG_LEVELS) { - if (this.logger.levels[levelName] > this.logger.levels[level]) { - Object.defineProperty(this, levelName, { value: noOp }); - } - } - - const output = config - .getEnv('logs.output') - .split(',') - .map((line) => line.trim()); - - if (output.includes('console')) { - let format: winston.Logform.Format; - if (level === 'debug') { - format = winston.format.combine( - winston.format.metadata(), - winston.format.timestamp(), - winston.format.colorize({ all: true }), - - winston.format.printf(({ level: logLevel, message, timestamp, metadata }) => { - return `${timestamp} | ${logLevel.padEnd(18)} | ${message}${ - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - Object.keys(metadata).length ? ` ${JSON.stringify(inspect(metadata))}` : '' - }`; - }), - ); - } else { - format = winston.format.printf(({ message }: { message: string }) => message); - } - - this.logger.add( - new winston.transports.Console({ - format, - }), - ); - } - - if (output.includes('file')) { - const fileLogFormat = winston.format.combine( - winston.format.timestamp(), - winston.format.metadata(), - winston.format.json(), - ); - this.logger.add( - new winston.transports.File({ - filename: config.getEnv('logs.file.location'), - format: fileLogFormat, - maxsize: config.getEnv('logs.file.fileSizeMax') * 1048576, // config * 1mb - maxFiles: config.getEnv('logs.file.fileCountMax'), - }), - ); - } - - LoggerProxy.init(this); - } - - private log(level: (typeof LOG_LEVELS)[number], message: string, meta: object = {}): void { - const callsite = callsites(); - // We are using the third array element as the structure is as follows: - // [0]: this file - // [1]: Should be Logger - // [2]: Should point to the caller. - // Note: getting line number is useless because at this point - // We are in runtime, so it means we are looking at compiled js files - const logDetails = {} as IDataObject; - if (callsite[2] !== undefined) { - logDetails.file = basename(callsite[2].getFileName() || ''); - const functionName = callsite[2].getFunctionName(); - if (functionName) { - logDetails.function = functionName; - } - } - this.logger.log(level, message, { ...meta, ...logDetails }); - } - - // Convenience methods below - - error(message: string, meta: object = {}): void { - this.log('error', message, meta); - } - - warn(message: string, meta: object = {}): void { - this.log('warn', message, meta); - } - - info(message: string, meta: object = {}): void { - this.log('info', message, meta); - } - - debug(message: string, meta: object = {}): void { - this.log('debug', message, meta); - } -} diff --git a/packages/cli/src/logging/__tests__/logger.service.test.ts b/packages/cli/src/logging/__tests__/logger.service.test.ts new file mode 100644 index 0000000000..f699443909 --- /dev/null +++ b/packages/cli/src/logging/__tests__/logger.service.test.ts @@ -0,0 +1,145 @@ +import type { GlobalConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; + +import { Logger } from '@/logging/logger.service'; + +describe('Logger', () => { + describe('transports', () => { + test('if `console` selected, should set console transport', () => { + const globalConfig = mock({ + logging: { + level: 'info', + outputs: ['console'], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const { transports } = logger.getInternalLogger(); + + expect(transports).toHaveLength(1); + + const [transport] = transports; + + expect(transport.constructor.name).toBe('Console'); + }); + + test('if `file` selected, should set file transport', () => { + const globalConfig = mock({ + logging: { + level: 'info', + outputs: ['file'], + file: { + fileSizeMax: 100, + fileCountMax: 16, + location: 'logs/n8n.log', + }, + }, + }); + + const logger = new Logger(globalConfig, mock({ n8nFolder: '/tmp' })); + + const { transports } = logger.getInternalLogger(); + + expect(transports).toHaveLength(1); + + const [transport] = transports; + + expect(transport.constructor.name).toBe('File'); + }); + }); + + describe('levels', () => { + test('if `error` selected, should enable `error` level', () => { + const globalConfig = mock({ + logging: { + level: 'error', + outputs: ['console'], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(true); + expect(internalLogger.isWarnEnabled()).toBe(false); + expect(internalLogger.isInfoEnabled()).toBe(false); + expect(internalLogger.isDebugEnabled()).toBe(false); + }); + + test('if `warn` selected, should enable `error` and `warn` levels', () => { + const globalConfig = mock({ + logging: { + level: 'warn', + outputs: ['console'], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(true); + expect(internalLogger.isWarnEnabled()).toBe(true); + expect(internalLogger.isInfoEnabled()).toBe(false); + expect(internalLogger.isDebugEnabled()).toBe(false); + }); + + test('if `info` selected, should enable `error`, `warn`, and `info` levels', () => { + const globalConfig = mock({ + logging: { + level: 'info', + outputs: ['console'], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(true); + expect(internalLogger.isWarnEnabled()).toBe(true); + expect(internalLogger.isInfoEnabled()).toBe(true); + expect(internalLogger.isDebugEnabled()).toBe(false); + }); + + test('if `debug` selected, should enable all levels', () => { + const globalConfig = mock({ + logging: { + level: 'debug', + outputs: ['console'], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(true); + expect(internalLogger.isWarnEnabled()).toBe(true); + expect(internalLogger.isInfoEnabled()).toBe(true); + expect(internalLogger.isDebugEnabled()).toBe(true); + }); + + test('if `silent` selected, should disable all levels', () => { + const globalConfig = mock({ + logging: { + level: 'silent', + outputs: ['console'], + }, + }); + + const logger = new Logger(globalConfig, mock()); + + const internalLogger = logger.getInternalLogger(); + + expect(internalLogger.isErrorEnabled()).toBe(false); + expect(internalLogger.isWarnEnabled()).toBe(false); + expect(internalLogger.isInfoEnabled()).toBe(false); + expect(internalLogger.isDebugEnabled()).toBe(false); + expect(internalLogger.silent).toBe(true); + }); + }); +}); diff --git a/packages/cli/src/logging/constants.ts b/packages/cli/src/logging/constants.ts new file mode 100644 index 0000000000..107327694b --- /dev/null +++ b/packages/cli/src/logging/constants.ts @@ -0,0 +1,3 @@ +export const noOp = () => {}; + +export const LOG_LEVELS = ['error', 'warn', 'info', 'debug', 'silent'] as const; diff --git a/packages/cli/src/logging/logger.service.ts b/packages/cli/src/logging/logger.service.ts new file mode 100644 index 0000000000..9a1b804859 --- /dev/null +++ b/packages/cli/src/logging/logger.service.ts @@ -0,0 +1,148 @@ +import { GlobalConfig } from '@n8n/config'; +import callsites from 'callsites'; +import { InstanceSettings } from 'n8n-core'; +import { LoggerProxy, LOG_LEVELS } from 'n8n-workflow'; +import path, { basename } from 'node:path'; +import { Service } from 'typedi'; +import winston from 'winston'; + +import { isObjectLiteral } from '@/utils'; + +import { noOp } from './constants'; +import type { LogLocationMetadata, LogLevel, LogMetadata } from './types'; + +@Service() +export class Logger { + private readonly internalLogger: winston.Logger; + + private readonly level: LogLevel; + + constructor( + private readonly globalConfig: GlobalConfig, + private readonly instanceSettings: InstanceSettings, + ) { + this.level = this.globalConfig.logging.level; + + const isSilent = this.level === 'silent'; + + this.internalLogger = winston.createLogger({ + level: this.level, + silent: isSilent, + }); + + if (!isSilent) { + this.setLevel(); + + const { outputs } = this.globalConfig.logging; + + if (outputs.includes('console')) this.setConsoleTransport(); + if (outputs.includes('file')) this.setFileTransport(); + } + + LoggerProxy.init(this); + } + + private log(level: LogLevel, message: string, metadata: LogMetadata) { + const location: LogLocationMetadata = {}; + + const caller = callsites().at(2); // zeroth and first are this file, second is caller + + if (caller !== undefined) { + location.file = basename(caller.getFileName() ?? ''); + const fnName = caller.getFunctionName(); + if (fnName) location.function = fnName; + } + + this.internalLogger.log(level, message, { ...metadata, ...location }); + } + + private setLevel() { + const { levels } = this.internalLogger; + + for (const logLevel of LOG_LEVELS) { + if (levels[logLevel] > levels[this.level]) { + // winston defines `{ error: 0, warn: 1, info: 2, debug: 5 }` + // so numerically higher (less severe) log levels become no-op + // to prevent overhead from `callsites` calls + Object.defineProperty(this, logLevel, { value: noOp }); + } + } + } + + private setConsoleTransport() { + const format = + this.level === 'debug' + ? winston.format.combine( + winston.format.metadata(), + winston.format.timestamp(), + winston.format.colorize({ all: true }), + winston.format.printf(({ level, message, timestamp, metadata }) => { + const _metadata = this.toPrintable(metadata); + return `${timestamp} | ${level.padEnd(18)} | ${message}${_metadata}`; + }), + ) + : winston.format.printf(({ message }: { message: string }) => message); + + this.internalLogger.add(new winston.transports.Console({ format })); + } + + private toPrintable(metadata: unknown) { + if (isObjectLiteral(metadata) && Object.keys(metadata).length > 0) { + return ' ' + JSON.stringify(metadata); + } + + return ''; + } + + private setFileTransport() { + const format = winston.format.combine( + winston.format.timestamp(), + winston.format.metadata(), + winston.format.json(), + ); + + const filename = path.join( + this.instanceSettings.n8nFolder, + this.globalConfig.logging.file.location, + ); + + const { fileSizeMax, fileCountMax } = this.globalConfig.logging.file; + + this.internalLogger.add( + new winston.transports.File({ + filename, + format, + maxsize: fileSizeMax * 1_048_576, // config * 1 MiB in bytes + maxFiles: fileCountMax, + }), + ); + } + + // #region Convenience methods + + error(message: string, metadata: LogMetadata = {}) { + this.log('error', message, metadata); + } + + warn(message: string, metadata: LogMetadata = {}) { + this.log('warn', message, metadata); + } + + info(message: string, metadata: LogMetadata = {}) { + this.log('info', message, metadata); + } + + debug(message: string, metadata: LogMetadata = {}) { + this.log('debug', message, metadata); + } + + // #endregion + + // #region For testing only + + getInternalLogger() { + return this.internalLogger; + } + + // #endregion +} diff --git a/packages/cli/src/logging/types.ts b/packages/cli/src/logging/types.ts new file mode 100644 index 0000000000..94b02d8ad7 --- /dev/null +++ b/packages/cli/src/logging/types.ts @@ -0,0 +1,7 @@ +import type { LOG_LEVELS } from './constants'; + +export type LogLevel = (typeof LOG_LEVELS)[number]; + +export type LogLocationMetadata = Partial<{ file: string; function: string }>; + +export type LogMetadata = Record | Error; diff --git a/packages/cli/src/posthog/__tests__/posthog.test.ts b/packages/cli/src/posthog/__tests__/posthog.test.ts index f2869afcbf..5c8fe282bf 100644 --- a/packages/cli/src/posthog/__tests__/posthog.test.ts +++ b/packages/cli/src/posthog/__tests__/posthog.test.ts @@ -1,3 +1,5 @@ +import type { GlobalConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; import { InstanceSettings } from 'n8n-core'; import { PostHog } from 'posthog-node'; @@ -15,6 +17,8 @@ describe('PostHog', () => { const instanceSettings = mockInstance(InstanceSettings, { instanceId }); + const globalConfig = mock({ logging: { level: 'debug' } }); + beforeAll(() => { config.set('diagnostics.config.posthog.apiKey', apiKey); config.set('diagnostics.config.posthog.apiHost', apiHost); @@ -26,7 +30,7 @@ describe('PostHog', () => { }); it('inits PostHog correctly', async () => { - const ph = new PostHogClient(instanceSettings); + const ph = new PostHogClient(instanceSettings, globalConfig); await ph.init(); expect(PostHog.prototype.constructor).toHaveBeenCalledWith(apiKey, { host: apiHost }); @@ -35,7 +39,7 @@ describe('PostHog', () => { it('does not initialize or track if diagnostics are not enabled', async () => { config.set('diagnostics.enabled', false); - const ph = new PostHogClient(instanceSettings); + const ph = new PostHogClient(instanceSettings, globalConfig); await ph.init(); ph.track({ @@ -55,7 +59,7 @@ describe('PostHog', () => { test: true, }; - const ph = new PostHogClient(instanceSettings); + const ph = new PostHogClient(instanceSettings, globalConfig); await ph.init(); ph.track({ @@ -75,7 +79,7 @@ describe('PostHog', () => { it('gets feature flags', async () => { const createdAt = new Date(); - const ph = new PostHogClient(instanceSettings); + const ph = new PostHogClient(instanceSettings, globalConfig); await ph.init(); await ph.getFeatureFlags({ diff --git a/packages/cli/src/posthog/index.ts b/packages/cli/src/posthog/index.ts index 08a501f0fc..8dec9755b3 100644 --- a/packages/cli/src/posthog/index.ts +++ b/packages/cli/src/posthog/index.ts @@ -1,3 +1,4 @@ +import { GlobalConfig } from '@n8n/config'; import { InstanceSettings } from 'n8n-core'; import type { FeatureFlags, ITelemetryTrackProperties } from 'n8n-workflow'; import type { PostHog } from 'posthog-node'; @@ -10,7 +11,10 @@ import type { PublicUser } from '@/interfaces'; export class PostHogClient { private postHog?: PostHog; - constructor(private readonly instanceSettings: InstanceSettings) {} + constructor( + private readonly instanceSettings: InstanceSettings, + private readonly globalConfig: GlobalConfig, + ) {} async init() { const enabled = config.getEnv('diagnostics.enabled'); @@ -23,7 +27,7 @@ export class PostHogClient { host: config.getEnv('diagnostics.config.posthog.apiHost'), }); - const logLevel = config.getEnv('logs.level'); + const logLevel = this.globalConfig.logging.level; if (logLevel === 'debug') { this.postHog.debug(true); } diff --git a/packages/cli/src/push/__tests__/websocket.push.test.ts b/packages/cli/src/push/__tests__/websocket.push.test.ts index f62038c6b3..209f91b17e 100644 --- a/packages/cli/src/push/__tests__/websocket.push.test.ts +++ b/packages/cli/src/push/__tests__/websocket.push.test.ts @@ -4,7 +4,7 @@ import { Container } from 'typedi'; import type WebSocket from 'ws'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { WebSocketPush } from '@/push/websocket.push'; import { mockInstance } from '@test/mocking'; diff --git a/packages/cli/src/push/abstract.push.ts b/packages/cli/src/push/abstract.push.ts index f3ae6606ed..c56fa4c042 100644 --- a/packages/cli/src/push/abstract.push.ts +++ b/packages/cli/src/push/abstract.push.ts @@ -2,7 +2,7 @@ import type { PushPayload, PushType } from '@n8n/api-types'; import { assert, jsonStringify } from 'n8n-workflow'; import type { User } from '@/databases/entities/user'; -import type { Logger } from '@/logger'; +import type { Logger } from '@/logging/logger.service'; import type { OnPushMessage } from '@/push/types'; import { TypedEmitter } from '@/typed-emitter'; diff --git a/packages/cli/src/push/sse.push.ts b/packages/cli/src/push/sse.push.ts index 96af003b4b..85d16a3b42 100644 --- a/packages/cli/src/push/sse.push.ts +++ b/packages/cli/src/push/sse.push.ts @@ -1,7 +1,7 @@ import { Service } from 'typedi'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import SSEChannel from 'sse-channel'; import { AbstractPush } from './abstract.push'; diff --git a/packages/cli/src/push/websocket.push.ts b/packages/cli/src/push/websocket.push.ts index 013663cab6..dc60d70901 100644 --- a/packages/cli/src/push/websocket.push.ts +++ b/packages/cli/src/push/websocket.push.ts @@ -3,7 +3,7 @@ import { Service } from 'typedi'; import type WebSocket from 'ws'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { AbstractPush } from './abstract.push'; diff --git a/packages/cli/src/requests.ts b/packages/cli/src/requests.ts index ab4c32ad19..b8fa4b99ca 100644 --- a/packages/cli/src/requests.ts +++ b/packages/cli/src/requests.ts @@ -180,13 +180,20 @@ export declare namespace CredentialRequest { >; } +// ---------------------------------- +// /api-keys +// ---------------------------------- + +export declare namespace ApiKeysRequest { + export type DeleteAPIKey = AuthenticatedRequest<{ id: string }>; +} + // ---------------------------------- // /me // ---------------------------------- export declare namespace MeRequest { export type SurveyAnswers = AuthenticatedRequest<{}, {}, IPersonalizationSurveyAnswersV4>; - export type DeleteAPIKey = AuthenticatedRequest<{ id: string }>; } export interface UserSetupPayload { diff --git a/packages/cli/src/response-helper.ts b/packages/cli/src/response-helper.ts index c9f7270d5f..0dff1625cb 100644 --- a/packages/cli/src/response-helper.ts +++ b/packages/cli/src/response-helper.ts @@ -10,9 +10,9 @@ import picocolors from 'picocolors'; import Container from 'typedi'; import { inDevelopment } from '@/constants'; +import { Logger } from '@/logging/logger.service'; import { ResponseError } from './errors/response-errors/abstract/response.error'; -import { Logger } from './logger'; export function sendSuccessResponse( res: Response, diff --git a/packages/cli/src/runners/__tests__/task-broker.test.ts b/packages/cli/src/runners/__tests__/task-broker.test.ts new file mode 100644 index 0000000000..f5b91a3f2c --- /dev/null +++ b/packages/cli/src/runners/__tests__/task-broker.test.ts @@ -0,0 +1,504 @@ +import { mock } from 'jest-mock-extended'; + +import { TaskRejectError } from '../errors'; +import type { RunnerMessage, TaskResultData } from '../runner-types'; +import { TaskBroker } from '../task-broker.service'; +import type { TaskOffer, TaskRequest, TaskRunner } from '../task-broker.service'; + +describe('TaskBroker', () => { + let taskBroker: TaskBroker; + + beforeEach(() => { + taskBroker = new TaskBroker(mock()); + jest.restoreAllMocks(); + }); + + describe('expireTasks', () => { + it('should remove expired task offers and keep valid task offers', () => { + const now = process.hrtime.bigint(); + + const validOffer: TaskOffer = { + offerId: 'valid', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), // 1 second in the future + }; + + const expiredOffer1: TaskOffer = { + offerId: 'expired1', + runnerId: 'runner2', + taskType: 'taskType1', + validFor: 1000, + validUntil: now - BigInt(1000 * 1_000_000), // 1 second in the past + }; + + const expiredOffer2: TaskOffer = { + offerId: 'expired2', + runnerId: 'runner3', + taskType: 'taskType1', + validFor: 2000, + validUntil: now - BigInt(2000 * 1_000_000), // 2 seconds in the past + }; + + taskBroker.setPendingTaskOffers([validOffer, expiredOffer1, expiredOffer2]); + + taskBroker.expireTasks(); + + const offers = taskBroker.getPendingTaskOffers(); + + expect(offers).toHaveLength(1); + expect(offers[0]).toEqual(validOffer); + }); + }); + + describe('registerRunner', () => { + it('should add a runner to known runners', () => { + const runnerId = 'runner1'; + const runner = mock({ id: runnerId }); + const messageCallback = jest.fn(); + + taskBroker.registerRunner(runner, messageCallback); + + const knownRunners = taskBroker.getKnownRunners(); + const runnerIds = [...knownRunners.keys()]; + + expect(runnerIds).toHaveLength(1); + expect(runnerIds[0]).toEqual(runnerId); + + expect(knownRunners.get(runnerId)?.runner).toEqual(runner); + expect(knownRunners.get(runnerId)?.messageCallback).toEqual(messageCallback); + }); + }); + + describe('registerRequester', () => { + it('should add a requester to known requesters', () => { + const requesterId = 'requester1'; + const messageCallback = jest.fn(); + + taskBroker.registerRequester(requesterId, messageCallback); + + const knownRequesters = taskBroker.getKnownRequesters(); + const requesterIds = [...knownRequesters.keys()]; + + expect(requesterIds).toHaveLength(1); + expect(requesterIds[0]).toEqual(requesterId); + + expect(knownRequesters.get(requesterId)).toEqual(messageCallback); + }); + }); + + describe('deregisterRunner', () => { + it('should remove a runner from known runners', () => { + const runnerId = 'runner1'; + const runner = mock({ id: runnerId }); + const messageCallback = jest.fn(); + + taskBroker.registerRunner(runner, messageCallback); + taskBroker.deregisterRunner(runnerId); + + const knownRunners = taskBroker.getKnownRunners(); + const runnerIds = Object.keys(knownRunners); + + expect(runnerIds).toHaveLength(0); + }); + }); + + describe('deregisterRequester', () => { + it('should remove a requester from known requesters', () => { + const requesterId = 'requester1'; + const messageCallback = jest.fn(); + + taskBroker.registerRequester(requesterId, messageCallback); + taskBroker.deregisterRequester(requesterId); + + const knownRequesters = taskBroker.getKnownRequesters(); + const requesterIds = Object.keys(knownRequesters); + + expect(requesterIds).toHaveLength(0); + }); + }); + + describe('taskRequested', () => { + it('should match a pending offer to an incoming request', async () => { + const now = process.hrtime.bigint(); + + const offer: TaskOffer = { + offerId: 'offer1', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + taskBroker.setPendingTaskOffers([offer]); + + const request: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + }; + + jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); // allow Jest to exit cleanly + + taskBroker.taskRequested(request); + + expect(taskBroker.acceptOffer).toHaveBeenCalled(); + expect(taskBroker.getPendingTaskOffers()).toHaveLength(0); + }); + }); + + describe('taskOffered', () => { + it('should match a pending request to an incoming offer', () => { + const now = process.hrtime.bigint(); + + const request: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + acceptInProgress: false, + }; + + taskBroker.setPendingTaskRequests([request]); + + const offer: TaskOffer = { + offerId: 'offer1', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); // allow Jest to exit cleanly + + taskBroker.taskOffered(offer); + + expect(taskBroker.acceptOffer).toHaveBeenCalled(); + expect(taskBroker.getPendingTaskOffers()).toHaveLength(0); + }); + }); + + describe('settleTasks', () => { + it('should match task offers with task requests by task type', () => { + const now = process.hrtime.bigint(); + + const offer1: TaskOffer = { + offerId: 'offer1', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + const offer2: TaskOffer = { + offerId: 'offer2', + runnerId: 'runner2', + taskType: 'taskType2', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + const request1: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + acceptInProgress: false, + }; + + const request2: TaskRequest = { + requestId: 'request2', + requesterId: 'requester2', + taskType: 'taskType2', + acceptInProgress: false, + }; + + const request3: TaskRequest = { + requestId: 'request3', + requesterId: 'requester3', + taskType: 'taskType3', // will have no match + acceptInProgress: false, + }; + + taskBroker.setPendingTaskOffers([offer1, offer2]); + taskBroker.setPendingTaskRequests([request1, request2, request3]); + + const acceptOfferSpy = jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); + + taskBroker.settleTasks(); + + expect(acceptOfferSpy).toHaveBeenCalledTimes(2); + expect(acceptOfferSpy).toHaveBeenCalledWith(offer1, request1); + expect(acceptOfferSpy).toHaveBeenCalledWith(offer2, request2); + + const remainingOffers = taskBroker.getPendingTaskOffers(); + expect(remainingOffers).toHaveLength(0); + }); + + it('should not match a request whose acceptance is in progress', () => { + const now = process.hrtime.bigint(); + + const offer: TaskOffer = { + offerId: 'offer1', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), + }; + + const request: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + acceptInProgress: true, + }; + + taskBroker.setPendingTaskOffers([offer]); + taskBroker.setPendingTaskRequests([request]); + + const acceptOfferSpy = jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); + + taskBroker.settleTasks(); + + expect(acceptOfferSpy).not.toHaveBeenCalled(); + + const remainingOffers = taskBroker.getPendingTaskOffers(); + expect(remainingOffers).toHaveLength(1); + expect(remainingOffers[0]).toEqual(offer); + + const remainingRequests = taskBroker.getPendingTaskRequests(); + expect(remainingRequests).toHaveLength(1); + expect(remainingRequests[0]).toEqual(request); + }); + + it('should expire tasks before settling', () => { + const now = process.hrtime.bigint(); + + const validOffer: TaskOffer = { + offerId: 'valid', + runnerId: 'runner1', + taskType: 'taskType1', + validFor: 1000, + validUntil: now + BigInt(1000 * 1_000_000), // 1 second in the future + }; + + const expiredOffer: TaskOffer = { + offerId: 'expired', + runnerId: 'runner2', + taskType: 'taskType2', // will be removed before matching + validFor: 1000, + validUntil: now - BigInt(1000 * 1_000_000), // 1 second in the past + }; + + const request1: TaskRequest = { + requestId: 'request1', + requesterId: 'requester1', + taskType: 'taskType1', + acceptInProgress: false, + }; + + const request2: TaskRequest = { + requestId: 'request2', + requesterId: 'requester2', + taskType: 'taskType2', + acceptInProgress: false, + }; + + taskBroker.setPendingTaskOffers([validOffer, expiredOffer]); + taskBroker.setPendingTaskRequests([request1, request2]); + + const acceptOfferSpy = jest.spyOn(taskBroker, 'acceptOffer').mockResolvedValue(); + + taskBroker.settleTasks(); + + expect(acceptOfferSpy).toHaveBeenCalledTimes(1); + expect(acceptOfferSpy).toHaveBeenCalledWith(validOffer, request1); + + const remainingOffers = taskBroker.getPendingTaskOffers(); + expect(remainingOffers).toHaveLength(0); + }); + }); + + describe('onRunnerMessage', () => { + it('should handle `runner:taskaccepted` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + + const message: RunnerMessage.ToN8n.TaskAccepted = { + type: 'runner:taskaccepted', + taskId, + }; + + const accept = jest.fn(); + const reject = jest.fn(); + + taskBroker.setRunnerAcceptRejects({ [taskId]: { accept, reject } }); + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + + await taskBroker.onRunnerMessage(runnerId, message); + + const runnerAcceptRejects = taskBroker.getRunnerAcceptRejects(); + + expect(accept).toHaveBeenCalled(); + expect(reject).not.toHaveBeenCalled(); + expect(runnerAcceptRejects.get(taskId)).toBeUndefined(); + }); + + it('should handle `runner:taskrejected` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const rejectionReason = 'Task execution failed'; + + const message: RunnerMessage.ToN8n.TaskRejected = { + type: 'runner:taskrejected', + taskId, + reason: rejectionReason, + }; + + const accept = jest.fn(); + const reject = jest.fn(); + + taskBroker.setRunnerAcceptRejects({ [taskId]: { accept, reject } }); + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + + await taskBroker.onRunnerMessage(runnerId, message); + + const runnerAcceptRejects = taskBroker.getRunnerAcceptRejects(); + + expect(accept).not.toHaveBeenCalled(); + expect(reject).toHaveBeenCalledWith(new TaskRejectError(rejectionReason)); + expect(runnerAcceptRejects.get(taskId)).toBeUndefined(); + }); + + it('should handle `runner:taskdone` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const data = mock(); + + const message: RunnerMessage.ToN8n.TaskDone = { + type: 'runner:taskdone', + taskId, + data, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:taskdone', + taskId, + data, + }); + + expect(taskBroker.getTasks().get(taskId)).toBeUndefined(); + }); + + it('should handle `runner:taskerror` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const errorMessage = 'Task execution failed'; + + const message: RunnerMessage.ToN8n.TaskError = { + type: 'runner:taskerror', + taskId, + error: errorMessage, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:taskerror', + taskId, + error: errorMessage, + }); + + expect(taskBroker.getTasks().get(taskId)).toBeUndefined(); + }); + + it('should handle `runner:taskdatarequest` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const requestId = 'request1'; + const requestType = 'input'; + const param = 'test_param'; + + const message: RunnerMessage.ToN8n.TaskDataRequest = { + type: 'runner:taskdatarequest', + taskId, + requestId, + requestType, + param, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:taskdatarequest', + taskId, + requestId, + requestType, + param, + }); + }); + + it('should handle `runner:rpc` message', async () => { + const runnerId = 'runner1'; + const taskId = 'task1'; + const requesterId = 'requester1'; + const callId = 'call1'; + const rpcName = 'helpers.httpRequestWithAuthentication'; + const rpcParams = ['param1', 'param2']; + + const message: RunnerMessage.ToN8n.RPC = { + type: 'runner:rpc', + taskId, + callId, + name: rpcName, + params: rpcParams, + }; + + const requesterMessageCallback = jest.fn(); + + taskBroker.registerRunner(mock({ id: runnerId }), jest.fn()); + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + taskBroker.registerRequester(requesterId, requesterMessageCallback); + + await taskBroker.onRunnerMessage(runnerId, message); + + expect(requesterMessageCallback).toHaveBeenCalledWith({ + type: 'broker:rpc', + taskId, + callId, + name: rpcName, + params: rpcParams, + }); + }); + }); +}); diff --git a/packages/cli/src/runners/auth/__tests__/task-runner-auth.controller.test.ts b/packages/cli/src/runners/auth/__tests__/task-runner-auth.controller.test.ts new file mode 100644 index 0000000000..7d43f91458 --- /dev/null +++ b/packages/cli/src/runners/auth/__tests__/task-runner-auth.controller.test.ts @@ -0,0 +1,115 @@ +import { GlobalConfig } from '@n8n/config'; +import type { NextFunction, Response } from 'express'; +import { mock } from 'jest-mock-extended'; + +import { CacheService } from '@/services/cache/cache.service'; +import { mockInstance } from '@test/mocking'; + +import { BadRequestError } from '../../../errors/response-errors/bad-request.error'; +import { ForbiddenError } from '../../../errors/response-errors/forbidden.error'; +import type { AuthlessRequest } from '../../../requests'; +import type { TaskRunnerServerInitRequest } from '../../runner-types'; +import { TaskRunnerAuthController } from '../task-runner-auth.controller'; +import { TaskRunnerAuthService } from '../task-runner-auth.service'; + +describe('TaskRunnerAuthController', () => { + const globalConfig = mockInstance(GlobalConfig, { + cache: { + backend: 'memory', + memory: { + maxSize: 1024, + ttl: 9999, + }, + }, + taskRunners: { + authToken: 'random-secret', + }, + }); + const TTL = 100; + const cacheService = new CacheService(globalConfig); + const authService = new TaskRunnerAuthService(globalConfig, cacheService, TTL); + const authController = new TaskRunnerAuthController(authService); + + const createMockGrantTokenReq = (token?: string) => + ({ + body: { + token, + }, + }) as unknown as AuthlessRequest; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('createGrantToken', () => { + it('should throw BadRequestError when auth token is missing', async () => { + const req = createMockGrantTokenReq(); + + // Act + await expect(authController.createGrantToken(req)).rejects.toThrowError(BadRequestError); + }); + + it('should throw ForbiddenError when auth token is invalid', async () => { + const req = createMockGrantTokenReq('invalid'); + + // Act + await expect(authController.createGrantToken(req)).rejects.toThrowError(ForbiddenError); + }); + + it('should return rant token when auth token is valid', async () => { + const req = createMockGrantTokenReq('random-secret'); + + // Act + await expect(authController.createGrantToken(req)).resolves.toStrictEqual({ + token: expect.any(String), + }); + }); + }); + + describe('authMiddleware', () => { + const res = mock(); + const next = jest.fn() as NextFunction; + + const createMockReqWithToken = (token?: string) => + mock({ + headers: { + authorization: `Bearer ${token}`, + }, + }); + + beforeEach(() => { + res.status.mockReturnThis(); + }); + + it('should respond with 401 when grant token is missing', async () => { + const req = mock({}); + + await authController.authMiddleware(req, res, next); + + expect(next).not.toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(401); + expect(res.json).toHaveBeenCalledWith({ code: 401, message: 'Unauthorized' }); + }); + + it('should respond with 403 when grant token is invalid', async () => { + const req = createMockReqWithToken('invalid'); + + await authController.authMiddleware(req, res, next); + + expect(next).not.toHaveBeenCalled(); + expect(res.status).toHaveBeenCalledWith(403); + expect(res.json).toHaveBeenCalledWith({ code: 403, message: 'Forbidden' }); + }); + + it('should call next() when grant token is valid', async () => { + const { token: validToken } = await authController.createGrantToken( + createMockGrantTokenReq('random-secret'), + ); + + await authController.authMiddleware(createMockReqWithToken(validToken), res, next); + + expect(next).toHaveBeenCalled(); + expect(res.status).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/packages/cli/src/runners/auth/__tests__/task-runner-auth.service.test.ts b/packages/cli/src/runners/auth/__tests__/task-runner-auth.service.test.ts new file mode 100644 index 0000000000..a1321945e3 --- /dev/null +++ b/packages/cli/src/runners/auth/__tests__/task-runner-auth.service.test.ts @@ -0,0 +1,95 @@ +import { GlobalConfig } from '@n8n/config'; +import { sleep } from 'n8n-workflow'; + +import config from '@/config'; +import { CacheService } from '@/services/cache/cache.service'; +import { retryUntil } from '@test-integration/retry-until'; + +import { mockInstance } from '../../../../test/shared/mocking'; +import { TaskRunnerAuthService } from '../task-runner-auth.service'; + +describe('TaskRunnerAuthService', () => { + config.set('taskRunners.authToken', 'random-secret'); + + const globalConfig = mockInstance(GlobalConfig, { + cache: { + backend: 'memory', + memory: { + maxSize: 1024, + ttl: 9999, + }, + }, + taskRunners: { + authToken: 'random-secret', + }, + }); + const TTL = 100; + const cacheService = new CacheService(globalConfig); + const authService = new TaskRunnerAuthService(globalConfig, cacheService, TTL); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('isValidAuthToken', () => { + it('should be valid for the configured token', () => { + expect(authService.isValidAuthToken('random-secret')); + }); + + it('should be invalid for anything else', () => { + expect(authService.isValidAuthToken('!random-secret')); + }); + }); + + describe('createGrantToken', () => { + it('should generate a random token', async () => { + expect(typeof (await authService.createGrantToken())).toBe('string'); + }); + + it('should store the generated token in cache', async () => { + // Arrange + const cacheSetSpy = jest.spyOn(cacheService, 'set'); + + // Act + const token = await authService.createGrantToken(); + + // Assert + expect(cacheSetSpy).toHaveBeenCalledWith(`grant-token:${token}`, '1', TTL); + }); + }); + + describe('tryConsumeGrantToken', () => { + it('should return false for an invalid grant token', async () => { + expect(await authService.tryConsumeGrantToken('random-secret')).toBe(false); + }); + + it('should return true for a valid grant token', async () => { + // Arrange + const grantToken = await authService.createGrantToken(); + + // Act + expect(await authService.tryConsumeGrantToken(grantToken)).toBe(true); + }); + + it('should return false for a already used grant token', async () => { + // Arrange + const grantToken = await authService.createGrantToken(); + + // Act + expect(await authService.tryConsumeGrantToken(grantToken)).toBe(true); + expect(await authService.tryConsumeGrantToken(grantToken)).toBe(false); + }); + + it('should return false for an expired grant token', async () => { + // Arrange + const grantToken = await authService.createGrantToken(); + + // Act + await sleep(TTL + 1); + + await retryUntil(async () => + expect(await authService.tryConsumeGrantToken(grantToken)).toBe(false), + ); + }); + }); +}); diff --git a/packages/cli/src/runners/auth/task-runner-auth.controller.ts b/packages/cli/src/runners/auth/task-runner-auth.controller.ts new file mode 100644 index 0000000000..a117dfca0d --- /dev/null +++ b/packages/cli/src/runners/auth/task-runner-auth.controller.ts @@ -0,0 +1,62 @@ +import type { NextFunction, Response } from 'express'; +import { Service } from 'typedi'; + +import type { AuthlessRequest } from '@/requests'; + +import { taskRunnerAuthRequestBodySchema } from './task-runner-auth.schema'; +import { TaskRunnerAuthService } from './task-runner-auth.service'; +import { BadRequestError } from '../../errors/response-errors/bad-request.error'; +import { ForbiddenError } from '../../errors/response-errors/forbidden.error'; +import type { TaskRunnerServerInitRequest } from '../runner-types'; + +/** + * Controller responsible for authenticating Task Runner connections + */ +@Service() +export class TaskRunnerAuthController { + constructor(private readonly taskRunnerAuthService: TaskRunnerAuthService) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + this.authMiddleware = this.authMiddleware.bind(this); + } + + /** + * Validates the provided auth token and creates and responds with a grant token, + * which can be used to initiate a task runner connection. + */ + async createGrantToken(req: AuthlessRequest) { + const result = await taskRunnerAuthRequestBodySchema.safeParseAsync(req.body); + if (!result.success) { + throw new BadRequestError(result.error.errors[0].code); + } + + const { token: authToken } = result.data; + if (!this.taskRunnerAuthService.isValidAuthToken(authToken)) { + throw new ForbiddenError(); + } + + const grantToken = await this.taskRunnerAuthService.createGrantToken(); + return { + token: grantToken, + }; + } + + /** + * Middleware to authenticate task runner init requests + */ + async authMiddleware(req: TaskRunnerServerInitRequest, res: Response, next: NextFunction) { + const authHeader = req.headers.authorization; + if (typeof authHeader !== 'string' || !authHeader.startsWith('Bearer ')) { + res.status(401).json({ code: 401, message: 'Unauthorized' }); + return; + } + + const grantToken = authHeader.slice('Bearer '.length); + const isConsumed = await this.taskRunnerAuthService.tryConsumeGrantToken(grantToken); + if (!isConsumed) { + res.status(403).json({ code: 403, message: 'Forbidden' }); + return; + } + + next(); + } +} diff --git a/packages/cli/src/runners/auth/task-runner-auth.schema.ts b/packages/cli/src/runners/auth/task-runner-auth.schema.ts new file mode 100644 index 0000000000..c3ab2c17f2 --- /dev/null +++ b/packages/cli/src/runners/auth/task-runner-auth.schema.ts @@ -0,0 +1,5 @@ +import { z } from 'zod'; + +export const taskRunnerAuthRequestBodySchema = z.object({ + token: z.string().min(1), +}); diff --git a/packages/cli/src/runners/auth/task-runner-auth.service.ts b/packages/cli/src/runners/auth/task-runner-auth.service.ts new file mode 100644 index 0000000000..5907cf6678 --- /dev/null +++ b/packages/cli/src/runners/auth/task-runner-auth.service.ts @@ -0,0 +1,56 @@ +import { GlobalConfig } from '@n8n/config'; +import { randomBytes } from 'crypto'; +import { Service } from 'typedi'; + +import { Time } from '@/constants'; +import { CacheService } from '@/services/cache/cache.service'; + +const GRANT_TOKEN_TTL = 15 * Time.seconds.toMilliseconds; + +@Service() +export class TaskRunnerAuthService { + constructor( + private readonly globalConfig: GlobalConfig, + private readonly cacheService: CacheService, + // For unit testing purposes + private readonly grantTokenTtl = GRANT_TOKEN_TTL, + ) {} + + isValidAuthToken(token: string) { + return token === this.globalConfig.taskRunners.authToken; + } + + /** + * @returns grant token that can be used to establish a task runner connection + */ + async createGrantToken() { + const grantToken = this.generateGrantToken(); + + const key = this.cacheKeyForGrantToken(grantToken); + await this.cacheService.set(key, '1', this.grantTokenTtl); + + return grantToken; + } + + /** + * Checks if the given `grantToken` is a valid token and marks it as + * used. + */ + async tryConsumeGrantToken(grantToken: string) { + const key = this.cacheKeyForGrantToken(grantToken); + const consumed = await this.cacheService.get(key); + // Not found from cache --> Invalid token + if (consumed === undefined) return false; + + await this.cacheService.delete(key); + return true; + } + + private generateGrantToken() { + return randomBytes(32).toString('hex'); + } + + private cacheKeyForGrantToken(grantToken: string) { + return `grant-token:${grantToken}`; + } +} diff --git a/packages/cli/src/runners/errors.ts b/packages/cli/src/runners/errors.ts new file mode 100644 index 0000000000..cc53e18fd4 --- /dev/null +++ b/packages/cli/src/runners/errors.ts @@ -0,0 +1,9 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class TaskRejectError extends ApplicationError { + constructor(public reason: string) { + super(`Task rejected with reason: ${reason}`, { level: 'info' }); + } +} + +export class TaskError extends ApplicationError {} diff --git a/packages/cli/src/runners/runner-types.ts b/packages/cli/src/runners/runner-types.ts new file mode 100644 index 0000000000..f615754e02 --- /dev/null +++ b/packages/cli/src/runners/runner-types.ts @@ -0,0 +1,243 @@ +import type { Response } from 'express'; +import type { INodeExecutionData } from 'n8n-workflow'; +import type WebSocket from 'ws'; + +import type { TaskRunner } from './task-broker.service'; +import type { AuthlessRequest } from '../requests'; + +export type DataRequestType = 'input' | 'node' | 'all'; + +export interface TaskResultData { + result: INodeExecutionData[]; + customData?: Record; +} + +export interface TaskRunnerServerInitRequest + extends AuthlessRequest<{}, {}, {}, { id: TaskRunner['id']; token?: string }> { + ws: WebSocket; +} + +export type TaskRunnerServerInitResponse = Response & { req: TaskRunnerServerInitRequest }; + +export namespace N8nMessage { + export namespace ToRunner { + export interface InfoRequest { + type: 'broker:inforequest'; + } + + export interface RunnerRegistered { + type: 'broker:runnerregistered'; + } + + export interface TaskOfferAccept { + type: 'broker:taskofferaccept'; + taskId: string; + offerId: string; + } + + export interface TaskCancel { + type: 'broker:taskcancel'; + taskId: string; + reason: string; + } + + export interface TaskSettings { + type: 'broker:tasksettings'; + taskId: string; + settings: unknown; + } + + export interface RPCResponse { + type: 'broker:rpcresponse'; + callId: string; + taskId: string; + status: 'success' | 'error'; + data: unknown; + } + + export interface TaskDataResponse { + type: 'broker:taskdataresponse'; + taskId: string; + requestId: string; + data: unknown; + } + + export type All = + | InfoRequest + | TaskOfferAccept + | TaskCancel + | TaskSettings + | RunnerRegistered + | RPCResponse + | TaskDataResponse; + } + + export namespace ToRequester { + export interface TaskReady { + type: 'broker:taskready'; + requestId: string; + taskId: string; + } + + export interface TaskDone { + type: 'broker:taskdone'; + taskId: string; + data: TaskResultData; + } + + export interface TaskError { + type: 'broker:taskerror'; + taskId: string; + error: unknown; + } + + export interface TaskDataRequest { + type: 'broker:taskdatarequest'; + taskId: string; + requestId: string; + requestType: DataRequestType; + param?: string; + } + + export interface RPC { + type: 'broker:rpc'; + callId: string; + taskId: string; + name: (typeof RPC_ALLOW_LIST)[number]; + params: unknown[]; + } + + export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC; + } +} + +export namespace RequesterMessage { + export namespace ToN8n { + export interface TaskSettings { + type: 'requester:tasksettings'; + taskId: string; + settings: unknown; + } + + export interface TaskCancel { + type: 'requester:taskcancel'; + taskId: string; + reason: string; + } + + export interface TaskDataResponse { + type: 'requester:taskdataresponse'; + taskId: string; + requestId: string; + data: unknown; + } + + export interface RPCResponse { + type: 'requester:rpcresponse'; + taskId: string; + callId: string; + status: 'success' | 'error'; + data: unknown; + } + + export interface TaskRequest { + type: 'requester:taskrequest'; + requestId: string; + taskType: string; + } + + export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest; + } +} + +export namespace RunnerMessage { + export namespace ToN8n { + export interface Info { + type: 'runner:info'; + name: string; + types: string[]; + } + + export interface TaskAccepted { + type: 'runner:taskaccepted'; + taskId: string; + } + + export interface TaskRejected { + type: 'runner:taskrejected'; + taskId: string; + reason: string; + } + + export interface TaskDone { + type: 'runner:taskdone'; + taskId: string; + data: TaskResultData; + } + + export interface TaskError { + type: 'runner:taskerror'; + taskId: string; + error: unknown; + } + + export interface TaskOffer { + type: 'runner:taskoffer'; + offerId: string; + taskType: string; + validFor: number; + } + + export interface TaskDataRequest { + type: 'runner:taskdatarequest'; + taskId: string; + requestId: string; + requestType: DataRequestType; + param?: string; + } + + export interface RPC { + type: 'runner:rpc'; + callId: string; + taskId: string; + name: (typeof RPC_ALLOW_LIST)[number]; + params: unknown[]; + } + + export type All = + | Info + | TaskDone + | TaskError + | TaskAccepted + | TaskRejected + | TaskOffer + | RPC + | TaskDataRequest; + } +} + +export const RPC_ALLOW_LIST = [ + 'logNodeOutput', + 'helpers.httpRequestWithAuthentication', + 'helpers.requestWithAuthenticationPaginated', + // "helpers.normalizeItems" + // "helpers.constructExecutionMetaData" + // "helpers.assertBinaryData" + 'helpers.getBinaryDataBuffer', + // "helpers.copyInputItems" + // "helpers.returnJsonArray" + 'helpers.getSSHClient', + 'helpers.createReadStream', + // "helpers.getStoragePath" + 'helpers.writeContentToFile', + 'helpers.prepareBinaryData', + 'helpers.setBinaryDataBuffer', + 'helpers.copyBinaryFile', + 'helpers.binaryToBuffer', + // "helpers.binaryToString" + // "helpers.getBinaryPath" + 'helpers.getBinaryStream', + 'helpers.getBinaryMetadata', + 'helpers.createDeferredPromise', + 'helpers.httpRequest', +] as const; diff --git a/packages/cli/src/runners/runner-ws-server.ts b/packages/cli/src/runners/runner-ws-server.ts new file mode 100644 index 0000000000..ef9e52f5f5 --- /dev/null +++ b/packages/cli/src/runners/runner-ws-server.ts @@ -0,0 +1,189 @@ +import { GlobalConfig } from '@n8n/config'; +import type { Application } from 'express'; +import { ServerResponse, type Server } from 'http'; +import { ApplicationError } from 'n8n-workflow'; +import type { Socket } from 'net'; +import Container, { Service } from 'typedi'; +import { parse as parseUrl } from 'url'; +import { Server as WSServer } from 'ws'; +import type WebSocket from 'ws'; + +import { Logger } from '@/logging/logger.service'; +import { send } from '@/response-helper'; +import { TaskRunnerAuthController } from '@/runners/auth/task-runner-auth.controller'; + +import type { + RunnerMessage, + N8nMessage, + TaskRunnerServerInitRequest, + TaskRunnerServerInitResponse, +} from './runner-types'; +import { TaskBroker, type MessageCallback, type TaskRunner } from './task-broker.service'; + +function heartbeat(this: WebSocket) { + this.isAlive = true; +} + +function getEndpointBasePath(restEndpoint: string) { + const globalConfig = Container.get(GlobalConfig); + + let path = globalConfig.taskRunners.path; + if (path.startsWith('/')) { + path = path.slice(1); + } + if (path.endsWith('/')) { + path = path.slice(-1); + } + + return `/${restEndpoint}/${path}`; +} + +function getWsEndpoint(restEndpoint: string) { + return `${getEndpointBasePath(restEndpoint)}/_ws`; +} + +@Service() +export class TaskRunnerService { + runnerConnections: Map = new Map(); + + constructor( + private readonly logger: Logger, + private readonly taskBroker: TaskBroker, + ) {} + + sendMessage(id: TaskRunner['id'], message: N8nMessage.ToRunner.All) { + this.runnerConnections.get(id)?.send(JSON.stringify(message)); + } + + add(id: TaskRunner['id'], connection: WebSocket) { + connection.isAlive = true; + connection.on('pong', heartbeat); + + let isConnected = false; + + const onMessage = (data: WebSocket.RawData) => { + try { + const buffer = Array.isArray(data) ? Buffer.concat(data) : Buffer.from(data); + + const message: RunnerMessage.ToN8n.All = JSON.parse( + buffer.toString('utf8'), + ) as RunnerMessage.ToN8n.All; + + if (!isConnected && message.type !== 'runner:info') { + return; + } else if (!isConnected && message.type === 'runner:info') { + this.removeConnection(id); + isConnected = true; + + this.runnerConnections.set(id, connection); + + this.taskBroker.registerRunner( + { + id, + taskTypes: message.types, + lastSeen: new Date(), + name: message.name, + }, + this.sendMessage.bind(this, id) as MessageCallback, + ); + + this.sendMessage(id, { type: 'broker:runnerregistered' }); + + this.logger.info(`Runner "${message.name}"(${id}) has been registered`); + return; + } + + void this.taskBroker.onRunnerMessage(id, message); + } catch (error) { + this.logger.error(`Couldn't parse message from runner "${id}"`, { + error: error as unknown, + id, + data, + }); + } + }; + + // Makes sure to remove the session if the connection is closed + connection.once('close', () => { + connection.off('pong', heartbeat); + connection.off('message', onMessage); + this.removeConnection(id); + }); + + connection.on('message', onMessage); + connection.send( + JSON.stringify({ type: 'broker:inforequest' } as N8nMessage.ToRunner.InfoRequest), + ); + } + + removeConnection(id: TaskRunner['id']) { + const connection = this.runnerConnections.get(id); + if (connection) { + this.taskBroker.deregisterRunner(id); + connection.close(); + this.runnerConnections.delete(id); + } + } + + handleRequest(req: TaskRunnerServerInitRequest, _res: TaskRunnerServerInitResponse) { + this.add(req.query.id, req.ws); + } +} + +// Checks for upgrade requests on the runners path and upgrades the connection +// then, passes the request back to the app to handle the routing +export const setupRunnerServer = (restEndpoint: string, server: Server, app: Application) => { + const globalConfig = Container.get(GlobalConfig); + const { authToken } = globalConfig.taskRunners; + + if (!authToken) { + throw new ApplicationError( + 'Authentication token must be configured when task runners are enabled. Use N8N_RUNNERS_AUTH_TOKEN environment variable to set it.', + ); + } + + const endpoint = getWsEndpoint(restEndpoint); + const wsServer = new WSServer({ noServer: true }); + server.on('upgrade', (request: TaskRunnerServerInitRequest, socket: Socket, head) => { + if (parseUrl(request.url).pathname !== endpoint) { + // We can't close the connection here since the Push connections + // are using the same HTTP server and upgrade requests and this + // gets triggered for both + return; + } + + wsServer.handleUpgrade(request, socket, head, (ws) => { + request.ws = ws; + + const response = new ServerResponse(request); + response.writeHead = (statusCode) => { + if (statusCode > 200) ws.close(); + return response; + }; + + // @ts-expect-error Hidden API? + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + app.handle(request, response); + }); + }); +}; + +export const setupRunnerHandler = (restEndpoint: string, app: Application) => { + const wsEndpoint = getWsEndpoint(restEndpoint); + const authEndpoint = `${getEndpointBasePath(restEndpoint)}/auth`; + + const taskRunnerAuthController = Container.get(TaskRunnerAuthController); + const taskRunnerService = Container.get(TaskRunnerService); + app.use( + wsEndpoint, + // eslint-disable-next-line @typescript-eslint/unbound-method + taskRunnerAuthController.authMiddleware, + (req: TaskRunnerServerInitRequest, res: TaskRunnerServerInitResponse) => + taskRunnerService.handleRequest(req, res), + ); + + app.post( + authEndpoint, + send(async (req) => await taskRunnerAuthController.createGrantToken(req)), + ); +}; diff --git a/packages/cli/src/runners/task-broker.service.ts b/packages/cli/src/runners/task-broker.service.ts new file mode 100644 index 0000000000..829910b468 --- /dev/null +++ b/packages/cli/src/runners/task-broker.service.ts @@ -0,0 +1,553 @@ +import { ApplicationError } from 'n8n-workflow'; +import { nanoid } from 'nanoid'; +import { Service } from 'typedi'; + +import { Logger } from '@/logging/logger.service'; + +import { TaskRejectError } from './errors'; +import type { N8nMessage, RunnerMessage, RequesterMessage, TaskResultData } from './runner-types'; + +export interface TaskRunner { + id: string; + name?: string; + taskTypes: string[]; + lastSeen: Date; +} + +export interface Task { + id: string; + runnerId: TaskRunner['id']; + requesterId: string; + taskType: string; +} + +export interface TaskOffer { + offerId: string; + runnerId: TaskRunner['id']; + taskType: string; + validFor: number; + validUntil: bigint; +} + +export interface TaskRequest { + requestId: string; + requesterId: string; + taskType: string; + + acceptInProgress?: boolean; +} + +export type MessageCallback = (message: N8nMessage.ToRunner.All) => Promise | void; +export type RequesterMessageCallback = ( + message: N8nMessage.ToRequester.All, +) => Promise | void; + +type RunnerAcceptCallback = () => void; +type RequesterAcceptCallback = (settings: RequesterMessage.ToN8n.TaskSettings['settings']) => void; +type TaskRejectCallback = (reason: TaskRejectError) => void; + +@Service() +export class TaskBroker { + private knownRunners: Map< + TaskRunner['id'], + { runner: TaskRunner; messageCallback: MessageCallback } + > = new Map(); + + private requesters: Map = new Map(); + + private tasks: Map = new Map(); + + private runnerAcceptRejects: Map< + Task['id'], + { accept: RunnerAcceptCallback; reject: TaskRejectCallback } + > = new Map(); + + private requesterAcceptRejects: Map< + Task['id'], + { accept: RequesterAcceptCallback; reject: TaskRejectCallback } + > = new Map(); + + private pendingTaskOffers: TaskOffer[] = []; + + private pendingTaskRequests: TaskRequest[] = []; + + constructor(private readonly logger: Logger) {} + + expireTasks() { + const now = process.hrtime.bigint(); + const invalidOffers: number[] = []; + for (let i = 0; i < this.pendingTaskOffers.length; i++) { + if (this.pendingTaskOffers[i].validUntil < now) { + invalidOffers.push(i); + } + } + + // We reverse the list so the later indexes are valid after deleting earlier ones + invalidOffers.reverse().forEach((i) => this.pendingTaskOffers.splice(i, 1)); + } + + registerRunner(runner: TaskRunner, messageCallback: MessageCallback) { + this.knownRunners.set(runner.id, { runner, messageCallback }); + } + + deregisterRunner(runnerId: string) { + this.knownRunners.delete(runnerId); + } + + registerRequester(requesterId: string, messageCallback: RequesterMessageCallback) { + this.requesters.set(requesterId, messageCallback); + } + + deregisterRequester(requesterId: string) { + this.requesters.delete(requesterId); + } + + private async messageRunner(runnerId: TaskRunner['id'], message: N8nMessage.ToRunner.All) { + await this.knownRunners.get(runnerId)?.messageCallback(message); + } + + private async messageRequester(requesterId: string, message: N8nMessage.ToRequester.All) { + await this.requesters.get(requesterId)?.(message); + } + + async onRunnerMessage(runnerId: TaskRunner['id'], message: RunnerMessage.ToN8n.All) { + const runner = this.knownRunners.get(runnerId); + if (!runner) { + return; + } + switch (message.type) { + case 'runner:taskaccepted': + this.handleRunnerAccept(message.taskId); + break; + case 'runner:taskrejected': + this.handleRunnerReject(message.taskId, message.reason); + break; + case 'runner:taskoffer': + this.taskOffered({ + runnerId, + taskType: message.taskType, + offerId: message.offerId, + validFor: message.validFor, + validUntil: process.hrtime.bigint() + BigInt(message.validFor * 1_000_000), + }); + break; + case 'runner:taskdone': + await this.taskDoneHandler(message.taskId, message.data); + break; + case 'runner:taskerror': + await this.taskErrorHandler(message.taskId, message.error); + break; + case 'runner:taskdatarequest': + await this.handleDataRequest( + message.taskId, + message.requestId, + message.requestType, + message.param, + ); + break; + + case 'runner:rpc': + await this.handleRpcRequest(message.taskId, message.callId, message.name, message.params); + break; + // Already handled + case 'runner:info': + break; + } + } + + async handleRpcRequest( + taskId: Task['id'], + callId: string, + name: RunnerMessage.ToN8n.RPC['name'], + params: unknown[], + ) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.messageRequester(task.requesterId, { + type: 'broker:rpc', + taskId, + callId, + name, + params, + }); + } + + handleRunnerAccept(taskId: Task['id']) { + const acceptReject = this.runnerAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.accept(); + this.runnerAcceptRejects.delete(taskId); + } + } + + handleRunnerReject(taskId: Task['id'], reason: string) { + const acceptReject = this.runnerAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.reject(new TaskRejectError(reason)); + this.runnerAcceptRejects.delete(taskId); + } + } + + async handleDataRequest( + taskId: Task['id'], + requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'], + requestType: RunnerMessage.ToN8n.TaskDataRequest['requestType'], + param?: string, + ) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.messageRequester(task.requesterId, { + type: 'broker:taskdatarequest', + taskId, + requestId, + requestType, + param, + }); + } + + async handleResponse( + taskId: Task['id'], + requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'], + data: unknown, + ) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.messageRunner(task.requesterId, { + type: 'broker:taskdataresponse', + taskId, + requestId, + data, + }); + } + + async onRequesterMessage(requesterId: string, message: RequesterMessage.ToN8n.All) { + switch (message.type) { + case 'requester:tasksettings': + this.handleRequesterAccept(message.taskId, message.settings); + break; + case 'requester:taskcancel': + await this.cancelTask(message.taskId, message.reason); + break; + case 'requester:taskrequest': + this.taskRequested({ + taskType: message.taskType, + requestId: message.requestId, + requesterId, + }); + break; + case 'requester:taskdataresponse': + await this.handleRequesterDataResponse(message.taskId, message.requestId, message.data); + break; + case 'requester:rpcresponse': + await this.handleRequesterRpcResponse( + message.taskId, + message.callId, + message.status, + message.data, + ); + break; + } + } + + async handleRequesterRpcResponse( + taskId: string, + callId: string, + status: RequesterMessage.ToN8n.RPCResponse['status'], + data: unknown, + ) { + const runner = await this.getRunnerOrFailTask(taskId); + await this.messageRunner(runner.id, { + type: 'broker:rpcresponse', + taskId, + callId, + status, + data, + }); + } + + async handleRequesterDataResponse(taskId: Task['id'], requestId: string, data: unknown) { + const runner = await this.getRunnerOrFailTask(taskId); + + await this.messageRunner(runner.id, { + type: 'broker:taskdataresponse', + taskId, + requestId, + data, + }); + } + + handleRequesterAccept( + taskId: Task['id'], + settings: RequesterMessage.ToN8n.TaskSettings['settings'], + ) { + const acceptReject = this.requesterAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.accept(settings); + this.requesterAcceptRejects.delete(taskId); + } + } + + handleRequesterReject(taskId: Task['id'], reason: string) { + const acceptReject = this.requesterAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.reject(new TaskRejectError(reason)); + this.requesterAcceptRejects.delete(taskId); + } + } + + private async cancelTask(taskId: Task['id'], reason: string) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + this.tasks.delete(taskId); + + await this.messageRunner(task.runnerId, { + type: 'broker:taskcancel', + taskId, + reason, + }); + } + + private async failTask(taskId: Task['id'], reason: string) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + this.tasks.delete(taskId); + // TODO: special message type? + await this.messageRequester(task.requesterId, { + type: 'broker:taskerror', + taskId, + error: reason, + }); + } + + private async getRunnerOrFailTask(taskId: Task['id']): Promise { + const task = this.tasks.get(taskId); + if (!task) { + throw new ApplicationError(`Cannot find runner, failed to find task (${taskId})`, { + level: 'error', + }); + } + const runner = this.knownRunners.get(task.runnerId); + if (!runner) { + const reason = `Cannot find runner, failed to find runner (${task.runnerId})`; + await this.failTask(taskId, reason); + throw new ApplicationError(reason, { + level: 'error', + }); + } + return runner.runner; + } + + async sendTaskSettings(taskId: Task['id'], settings: unknown) { + const runner = await this.getRunnerOrFailTask(taskId); + await this.messageRunner(runner.id, { + type: 'broker:tasksettings', + taskId, + settings, + }); + } + + async taskDoneHandler(taskId: Task['id'], data: TaskResultData) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.requesters.get(task.requesterId)?.({ + type: 'broker:taskdone', + taskId: task.id, + data, + }); + this.tasks.delete(task.id); + } + + async taskErrorHandler(taskId: Task['id'], error: unknown) { + const task = this.tasks.get(taskId); + if (!task) { + return; + } + await this.requesters.get(task.requesterId)?.({ + type: 'broker:taskerror', + taskId: task.id, + error, + }); + this.tasks.delete(task.id); + } + + async acceptOffer(offer: TaskOffer, request: TaskRequest): Promise { + const taskId = nanoid(8); + + try { + const acceptPromise = new Promise((resolve, reject) => { + this.runnerAcceptRejects.set(taskId, { accept: resolve as () => void, reject }); + + // TODO: customisable timeout + setTimeout(() => { + reject('Runner timed out'); + }, 2000); + }); + + await this.messageRunner(offer.runnerId, { + type: 'broker:taskofferaccept', + offerId: offer.offerId, + taskId, + }); + + await acceptPromise; + } catch (e) { + request.acceptInProgress = false; + if (e instanceof TaskRejectError) { + this.logger.info(`Task (${taskId}) rejected by Runner with reason "${e.reason}"`); + return; + } + throw e; + } + + const task: Task = { + id: taskId, + taskType: offer.taskType, + runnerId: offer.runnerId, + requesterId: request.requesterId, + }; + + this.tasks.set(taskId, task); + const requestIndex = this.pendingTaskRequests.findIndex( + (r) => r.requestId === request.requestId, + ); + if (requestIndex === -1) { + this.logger.error( + `Failed to find task request (${request.requestId}) after a task was accepted. This shouldn't happen, and might be a race condition.`, + ); + return; + } + this.pendingTaskRequests.splice(requestIndex, 1); + + try { + const acceptPromise = new Promise( + (resolve, reject) => { + this.requesterAcceptRejects.set(taskId, { + accept: resolve as (settings: RequesterMessage.ToN8n.TaskSettings['settings']) => void, + reject, + }); + + // TODO: customisable timeout + setTimeout(() => { + reject('Requester timed out'); + }, 2000); + }, + ); + + await this.messageRequester(request.requesterId, { + type: 'broker:taskready', + requestId: request.requestId, + taskId, + }); + + const settings = await acceptPromise; + await this.sendTaskSettings(task.id, settings); + } catch (e) { + if (e instanceof TaskRejectError) { + await this.cancelTask(task.id, e.reason); + this.logger.info(`Task (${taskId}) rejected by Requester with reason "${e.reason}"`); + return; + } + await this.cancelTask(task.id, 'Unknown reason'); + throw e; + } + } + + // Find matching task offers and requests, then let the runner + // know that an offer has been accepted + // + // *DO NOT MAKE THIS FUNCTION ASYNC* + // This function relies on never yielding. + // If you need to make this function async, you'll need to + // implement some kind of locking for the requests and task + // lists + settleTasks() { + this.expireTasks(); + + for (const request of this.pendingTaskRequests) { + if (request.acceptInProgress) { + continue; + } + const offerIndex = this.pendingTaskOffers.findIndex((o) => o.taskType === request.taskType); + if (offerIndex === -1) { + continue; + } + const offer = this.pendingTaskOffers[offerIndex]; + + request.acceptInProgress = true; + this.pendingTaskOffers.splice(offerIndex, 1); + + void this.acceptOffer(offer, request); + } + } + + taskRequested(request: TaskRequest) { + this.pendingTaskRequests.push(request); + this.settleTasks(); + } + + taskOffered(offer: TaskOffer) { + this.pendingTaskOffers.push(offer); + this.settleTasks(); + } + + /** + * For testing only + */ + + getTasks() { + return this.tasks; + } + + getPendingTaskOffers() { + return this.pendingTaskOffers; + } + + getPendingTaskRequests() { + return this.pendingTaskRequests; + } + + getKnownRunners() { + return this.knownRunners; + } + + getKnownRequesters() { + return this.requesters; + } + + getRunnerAcceptRejects() { + return this.runnerAcceptRejects; + } + + setTasks(tasks: Record) { + this.tasks = new Map(Object.entries(tasks)); + } + + setPendingTaskOffers(pendingTaskOffers: TaskOffer[]) { + this.pendingTaskOffers = pendingTaskOffers; + } + + setPendingTaskRequests(pendingTaskRequests: TaskRequest[]) { + this.pendingTaskRequests = pendingTaskRequests; + } + + setRunnerAcceptRejects( + runnerAcceptRejects: Record< + string, + { accept: RunnerAcceptCallback; reject: TaskRejectCallback } + >, + ) { + this.runnerAcceptRejects = new Map(Object.entries(runnerAcceptRejects)); + } +} diff --git a/packages/cli/src/runners/task-managers/single-main-task-manager.ts b/packages/cli/src/runners/task-managers/single-main-task-manager.ts new file mode 100644 index 0000000000..b5b60df72b --- /dev/null +++ b/packages/cli/src/runners/task-managers/single-main-task-manager.ts @@ -0,0 +1,30 @@ +import Container from 'typedi'; + +import { TaskManager } from './task-manager'; +import type { RequesterMessage } from '../runner-types'; +import type { RequesterMessageCallback } from '../task-broker.service'; +import { TaskBroker } from '../task-broker.service'; + +export class SingleMainTaskManager extends TaskManager { + taskBroker: TaskBroker; + + id: string = 'single-main'; + + constructor() { + super(); + this.registerRequester(); + } + + registerRequester() { + this.taskBroker = Container.get(TaskBroker); + + this.taskBroker.registerRequester( + this.id, + this.onMessage.bind(this) as RequesterMessageCallback, + ); + } + + sendMessage(message: RequesterMessage.ToN8n.All) { + void this.taskBroker.onRequesterMessage(this.id, message); + } +} diff --git a/packages/cli/src/runners/task-managers/task-manager.ts b/packages/cli/src/runners/task-managers/task-manager.ts new file mode 100644 index 0000000000..9f7e492fbe --- /dev/null +++ b/packages/cli/src/runners/task-managers/task-manager.ts @@ -0,0 +1,410 @@ +import { + type IExecuteFunctions, + type Workflow, + type IRunExecutionData, + type INodeExecutionData, + type ITaskDataConnections, + type INode, + type WorkflowParameters, + type INodeParameters, + type WorkflowExecuteMode, + type IExecuteData, + type IDataObject, + type IWorkflowExecuteAdditionalData, +} from 'n8n-workflow'; +import { nanoid } from 'nanoid'; + +import { TaskError } from '@/runners/errors'; + +import { + RPC_ALLOW_LIST, + type TaskResultData, + type N8nMessage, + type RequesterMessage, +} from '../runner-types'; + +export type RequestAccept = (jobId: string) => void; +export type RequestReject = (reason: string) => void; + +export type TaskAccept = (data: TaskResultData) => void; +export type TaskReject = (error: unknown) => void; + +export interface TaskData { + executeFunctions: IExecuteFunctions; + inputData: ITaskDataConnections; + node: INode; + + workflow: Workflow; + runExecutionData: IRunExecutionData; + runIndex: number; + itemIndex: number; + activeNodeName: string; + connectionInputData: INodeExecutionData[]; + siblingParameters: INodeParameters; + mode: WorkflowExecuteMode; + executeData?: IExecuteData; + defaultReturnRunIndex: number; + selfData: IDataObject; + contextNodeName: string; + additionalData: IWorkflowExecuteAdditionalData; +} + +export interface PartialAdditionalData { + executionId?: string; + restartExecutionId?: string; + restApiUrl: string; + instanceBaseUrl: string; + formWaitingBaseUrl: string; + webhookBaseUrl: string; + webhookWaitingBaseUrl: string; + webhookTestBaseUrl: string; + currentNodeParameters?: INodeParameters; + executionTimeoutTimestamp?: number; + userId?: string; + variables: IDataObject; +} + +export interface AllCodeTaskData { + workflow: Omit; + inputData: ITaskDataConnections; + node: INode; + + runExecutionData: IRunExecutionData; + runIndex: number; + itemIndex: number; + activeNodeName: string; + connectionInputData: INodeExecutionData[]; + siblingParameters: INodeParameters; + mode: WorkflowExecuteMode; + executeData?: IExecuteData; + defaultReturnRunIndex: number; + selfData: IDataObject; + contextNodeName: string; + additionalData: PartialAdditionalData; +} + +export interface TaskRequest { + requestId: string; + taskType: string; + settings: unknown; + data: TaskData; +} + +export interface Task { + taskId: string; + settings: unknown; + data: TaskData; +} + +interface ExecuteFunctionObject { + [name: string]: ((...args: unknown[]) => unknown) | ExecuteFunctionObject; +} + +const workflowToParameters = (workflow: Workflow): Omit => { + return { + id: workflow.id, + name: workflow.name, + active: workflow.active, + connections: workflow.connectionsBySourceNode, + nodes: Object.values(workflow.nodes), + pinData: workflow.pinData, + settings: workflow.settings, + staticData: workflow.staticData, + }; +}; + +export class TaskManager { + requestAcceptRejects: Map = new Map(); + + taskAcceptRejects: Map = new Map(); + + pendingRequests: Map = new Map(); + + tasks: Map = new Map(); + + async startTask( + additionalData: IWorkflowExecuteAdditionalData, + taskType: string, + settings: unknown, + executeFunctions: IExecuteFunctions, + inputData: ITaskDataConnections, + node: INode, + workflow: Workflow, + runExecutionData: IRunExecutionData, + runIndex: number, + itemIndex: number, + activeNodeName: string, + connectionInputData: INodeExecutionData[], + siblingParameters: INodeParameters, + mode: WorkflowExecuteMode, + executeData?: IExecuteData, + defaultReturnRunIndex = -1, + selfData: IDataObject = {}, + contextNodeName: string = activeNodeName, + ): Promise { + const data: TaskData = { + workflow, + runExecutionData, + runIndex, + connectionInputData, + inputData, + node, + executeFunctions, + itemIndex, + siblingParameters, + mode, + executeData, + defaultReturnRunIndex, + selfData, + contextNodeName, + activeNodeName, + additionalData, + }; + + const request: TaskRequest = { + requestId: nanoid(), + taskType, + settings, + data, + }; + + this.pendingRequests.set(request.requestId, request); + + const taskIdPromise = new Promise((resolve, reject) => { + this.requestAcceptRejects.set(request.requestId, { + accept: resolve, + reject, + }); + }); + + this.sendMessage({ + type: 'requester:taskrequest', + requestId: request.requestId, + taskType, + }); + + const taskId = await taskIdPromise; + + const task: Task = { + taskId, + data, + settings, + }; + this.tasks.set(task.taskId, task); + + try { + const dataPromise = new Promise((resolve, reject) => { + this.taskAcceptRejects.set(task.taskId, { + accept: resolve, + reject, + }); + }); + + this.sendMessage({ + type: 'requester:tasksettings', + taskId, + settings, + }); + + const resultData = await dataPromise; + // Set custom execution data (`$execution.customData`) if sent + if (resultData.customData) { + Object.entries(resultData.customData).forEach(([k, v]) => { + if (!runExecutionData.resultData.metadata) { + runExecutionData.resultData.metadata = {}; + } + runExecutionData.resultData.metadata[k] = v; + }); + } + return resultData.result as T; + } catch (e) { + if (typeof e === 'string') { + throw new TaskError(e, { + level: 'error', + }); + } + throw e; + } finally { + this.tasks.delete(taskId); + } + } + + sendMessage(_message: RequesterMessage.ToN8n.All) {} + + onMessage(message: N8nMessage.ToRequester.All) { + switch (message.type) { + case 'broker:taskready': + this.taskReady(message.requestId, message.taskId); + break; + case 'broker:taskdone': + this.taskDone(message.taskId, message.data); + break; + case 'broker:taskerror': + this.taskError(message.taskId, message.error); + break; + case 'broker:taskdatarequest': + this.sendTaskData(message.taskId, message.requestId, message.requestType); + break; + case 'broker:rpc': + void this.handleRpc(message.taskId, message.callId, message.name, message.params); + break; + } + } + + taskReady(requestId: string, taskId: string) { + const acceptReject = this.requestAcceptRejects.get(requestId); + if (!acceptReject) { + this.rejectTask( + taskId, + 'Request ID not found. In multi-main setup, it is possible for one of the mains to have reported ready state already.', + ); + return; + } + + acceptReject.accept(taskId); + this.requestAcceptRejects.delete(requestId); + } + + rejectTask(jobId: string, reason: string) { + this.sendMessage({ + type: 'requester:taskcancel', + taskId: jobId, + reason, + }); + } + + taskDone(taskId: string, data: TaskResultData) { + const acceptReject = this.taskAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.accept(data); + this.taskAcceptRejects.delete(taskId); + } + } + + taskError(taskId: string, error: unknown) { + const acceptReject = this.taskAcceptRejects.get(taskId); + if (acceptReject) { + acceptReject.reject(error); + this.taskAcceptRejects.delete(taskId); + } + } + + sendTaskData( + taskId: string, + requestId: string, + requestType: N8nMessage.ToRequester.TaskDataRequest['requestType'], + ) { + const job = this.tasks.get(taskId); + if (!job) { + // TODO: logging + return; + } + if (requestType === 'all') { + const jd = job.data; + const ad = jd.additionalData; + const data: AllCodeTaskData = { + workflow: workflowToParameters(jd.workflow), + connectionInputData: jd.connectionInputData, + inputData: jd.inputData, + itemIndex: jd.itemIndex, + activeNodeName: jd.activeNodeName, + contextNodeName: jd.contextNodeName, + defaultReturnRunIndex: jd.defaultReturnRunIndex, + mode: jd.mode, + node: jd.node, + runExecutionData: jd.runExecutionData, + runIndex: jd.runIndex, + selfData: jd.selfData, + siblingParameters: jd.siblingParameters, + executeData: jd.executeData, + additionalData: { + formWaitingBaseUrl: ad.formWaitingBaseUrl, + instanceBaseUrl: ad.instanceBaseUrl, + restApiUrl: ad.restApiUrl, + variables: ad.variables, + webhookBaseUrl: ad.webhookBaseUrl, + webhookTestBaseUrl: ad.webhookTestBaseUrl, + webhookWaitingBaseUrl: ad.webhookWaitingBaseUrl, + currentNodeParameters: ad.currentNodeParameters, + executionId: ad.executionId, + executionTimeoutTimestamp: ad.executionTimeoutTimestamp, + restartExecutionId: ad.restartExecutionId, + userId: ad.userId, + }, + }; + this.sendMessage({ + type: 'requester:taskdataresponse', + taskId, + requestId, + data, + }); + } + } + + async handleRpc( + taskId: string, + callId: string, + name: N8nMessage.ToRequester.RPC['name'], + params: unknown[], + ) { + const job = this.tasks.get(taskId); + if (!job) { + // TODO: logging + return; + } + + try { + if (!RPC_ALLOW_LIST.includes(name)) { + this.sendMessage({ + type: 'requester:rpcresponse', + taskId, + callId, + status: 'error', + data: 'Method not allowed', + }); + return; + } + const splitPath = name.split('.'); + + const funcs = job.data.executeFunctions; + + let func: ((...args: unknown[]) => Promise) | undefined = undefined; + let funcObj: ExecuteFunctionObject[string] | undefined = + funcs as unknown as ExecuteFunctionObject; + for (const part of splitPath) { + funcObj = (funcObj as ExecuteFunctionObject)[part] ?? undefined; + if (!funcObj) { + break; + } + } + func = funcObj as unknown as (...args: unknown[]) => Promise; + if (!func) { + this.sendMessage({ + type: 'requester:rpcresponse', + taskId, + callId, + status: 'error', + data: 'Could not find method', + }); + return; + } + const data = (await func.call(funcs, ...params)) as unknown; + + this.sendMessage({ + type: 'requester:rpcresponse', + taskId, + callId, + status: 'success', + data, + }); + } catch (e) { + this.sendMessage({ + type: 'requester:rpcresponse', + taskId, + callId, + status: 'error', + data: e, + }); + } + } +} diff --git a/packages/cli/src/runners/task-runner-process.ts b/packages/cli/src/runners/task-runner-process.ts new file mode 100644 index 0000000000..5f420ab568 --- /dev/null +++ b/packages/cli/src/runners/task-runner-process.ts @@ -0,0 +1,89 @@ +import { GlobalConfig } from '@n8n/config'; +import * as a from 'node:assert/strict'; +import { spawn } from 'node:child_process'; +import { Service } from 'typedi'; + +import { TaskRunnerAuthService } from './auth/task-runner-auth.service'; +import { OnShutdown } from '../decorators/on-shutdown'; + +type ChildProcess = ReturnType; + +/** + * Manages the JS task runner process as a child process + */ +@Service() +export class TaskRunnerProcess { + public get isRunning() { + return this.process !== null; + } + + /** The process ID of the task runner process */ + public get pid() { + return this.process?.pid; + } + + private process: ChildProcess | null = null; + + /** Promise that resolves after the process has exited */ + private runPromise: Promise | null = null; + + private isShuttingDown = false; + + constructor( + private readonly globalConfig: GlobalConfig, + private readonly authService: TaskRunnerAuthService, + ) {} + + async start() { + a.ok(!this.process, 'Task Runner Process already running'); + + const grantToken = await this.authService.createGrantToken(); + const startScript = require.resolve('@n8n/task-runner'); + + this.process = spawn('node', [startScript], { + env: { + PATH: process.env.PATH, + N8N_RUNNERS_GRANT_TOKEN: grantToken, + N8N_RUNNERS_N8N_URI: `127.0.0.1:${this.globalConfig.taskRunners.port}`, + }, + }); + + this.process.stdout?.pipe(process.stdout); + this.process.stderr?.pipe(process.stderr); + + this.monitorProcess(this.process); + } + + @OnShutdown() + async stop() { + if (!this.process) { + return; + } + + this.isShuttingDown = true; + + // TODO: Timeout & force kill + this.process.kill(); + await this.runPromise; + + this.isShuttingDown = false; + } + + private monitorProcess(process: ChildProcess) { + this.runPromise = new Promise((resolve) => { + process.on('exit', (code) => { + this.onProcessExit(code, resolve); + }); + }); + } + + private onProcessExit(_code: number | null, resolveFn: () => void) { + this.process = null; + resolveFn(); + + // If we are not shutting down, restart the process + if (!this.isShuttingDown) { + setImmediate(async () => await this.start()); + } + } +} diff --git a/packages/cli/src/runners/task-runner-server.ts b/packages/cli/src/runners/task-runner-server.ts new file mode 100644 index 0000000000..fc31c100a3 --- /dev/null +++ b/packages/cli/src/runners/task-runner-server.ts @@ -0,0 +1,201 @@ +import { GlobalConfig } from '@n8n/config'; +import compression from 'compression'; +import express from 'express'; +import * as a from 'node:assert/strict'; +import { randomBytes } from 'node:crypto'; +import { ServerResponse, type Server, createServer as createHttpServer } from 'node:http'; +import type { AddressInfo, Socket } from 'node:net'; +import { parse as parseUrl } from 'node:url'; +import { Service } from 'typedi'; +import { Server as WSServer } from 'ws'; + +import { inTest, LOWEST_SHUTDOWN_PRIORITY } from '@/constants'; +import { OnShutdown } from '@/decorators/on-shutdown'; +import { Logger } from '@/logging/logger.service'; +import { bodyParser, rawBodyReader } from '@/middlewares'; +import { send } from '@/response-helper'; +import { TaskRunnerAuthController } from '@/runners/auth/task-runner-auth.controller'; +import type { + TaskRunnerServerInitRequest, + TaskRunnerServerInitResponse, +} from '@/runners/runner-types'; +import { TaskRunnerService } from '@/runners/runner-ws-server'; + +/** + * Task Runner HTTP & WS server + */ +@Service() +export class TaskRunnerServer { + private server: Server | undefined; + + private wsServer: WSServer | undefined; + + readonly app: express.Application; + + public get port() { + return (this.server?.address() as AddressInfo)?.port; + } + + private get upgradeEndpoint() { + return `${this.getEndpointBasePath()}/_ws`; + } + + constructor( + private readonly logger: Logger, + private readonly globalConfig: GlobalConfig, + private readonly taskRunnerAuthController: TaskRunnerAuthController, + private readonly taskRunnerService: TaskRunnerService, + ) { + this.app = express(); + this.app.disable('x-powered-by'); + + if (!this.globalConfig.taskRunners.authToken) { + // Generate an auth token if one is not set + this.globalConfig.taskRunners.authToken = randomBytes(32).toString('hex'); + } + } + + async start(): Promise { + await this.setupHttpServer(); + + this.setupWsServer(); + + if (!inTest) { + await this.setupErrorHandlers(); + } + + this.setupCommonMiddlewares(); + + this.configureRoutes(); + } + + @OnShutdown(LOWEST_SHUTDOWN_PRIORITY) + async stop(): Promise { + if (this.wsServer) { + this.wsServer.close(); + this.wsServer = undefined; + } + if (this.server) { + await new Promise((resolve) => this.server?.close(() => resolve())); + this.server = undefined; + } + } + + /** Creates an HTTP server and listens to the configured port */ + private async setupHttpServer() { + const { app } = this; + + this.server = createHttpServer(app); + + const { + taskRunners: { port, listen_address: address }, + } = this.globalConfig; + + this.server.on('error', (error: Error & { code: string }) => { + if (error.code === 'EADDRINUSE') { + this.logger.info( + `n8n Task Runner's port ${port} is already in use. Do you have another instance of n8n running already?`, + ); + process.exit(1); + } + }); + + await new Promise((resolve) => { + a.ok(this.server); + this.server.listen(port, address, () => resolve()); + }); + + this.logger.info(`n8n Task Runner server ready on ${address}, port ${port}`); + } + + /** Creates WebSocket server for handling upgrade requests */ + private setupWsServer() { + const { authToken } = this.globalConfig.taskRunners; + a.ok(authToken); + a.ok(this.server); + + this.wsServer = new WSServer({ noServer: true }); + this.server.on('upgrade', this.handleUpgradeRequest); + } + + private async setupErrorHandlers() { + const { app } = this; + + // Augment errors sent to Sentry + const { + Handlers: { requestHandler, errorHandler }, + } = await import('@sentry/node'); + app.use(requestHandler()); + app.use(errorHandler()); + } + + private setupCommonMiddlewares() { + // Compress the response data + this.app.use(compression()); + + this.app.use(rawBodyReader); + this.app.use(bodyParser); + } + + private configureRoutes() { + this.app.use( + this.upgradeEndpoint, + // eslint-disable-next-line @typescript-eslint/unbound-method + this.taskRunnerAuthController.authMiddleware, + (req: TaskRunnerServerInitRequest, res: TaskRunnerServerInitResponse) => + this.taskRunnerService.handleRequest(req, res), + ); + + const authEndpoint = `${this.getEndpointBasePath()}/auth`; + this.app.post( + authEndpoint, + send(async (req) => await this.taskRunnerAuthController.createGrantToken(req)), + ); + } + + private handleUpgradeRequest = ( + request: TaskRunnerServerInitRequest, + socket: Socket, + head: Buffer, + ) => { + if (parseUrl(request.url).pathname !== this.upgradeEndpoint) { + socket.write('HTTP/1.1 404 Not Found\r\n\r\n'); + socket.destroy(); + return; + } + + if (!this.wsServer) { + // This might happen if the server is shutting down and we receive an upgrade request + socket.write('HTTP/1.1 503 Service Unavailable\r\n\r\n'); + socket.destroy(); + return; + } + + this.wsServer.handleUpgrade(request, socket, head, (ws) => { + request.ws = ws; + + const response = new ServerResponse(request); + response.writeHead = (statusCode) => { + if (statusCode > 200) ws.close(100); + return response; + }; + + // @ts-expect-error Delegate the request to the express app. This function is not exposed + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + this.app.handle(request, response); + }); + }; + + /** Returns the normalized base path for the task runner endpoints */ + private getEndpointBasePath() { + let path = this.globalConfig.taskRunners.path; + if (!path.startsWith('/')) { + path = `/${path}`; + } + if (path.endsWith('/')) { + path = path.slice(-1); + } + + return path; + } +} diff --git a/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts b/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts new file mode 100644 index 0000000000..c637b2faf9 --- /dev/null +++ b/packages/cli/src/scaling/__tests__/pubsub-handler.test.ts @@ -0,0 +1,142 @@ +import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; + +import type { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; +import { EventService } from '@/events/event.service'; +import type { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; +import type { License } from '@/license'; +import type { CommunityPackagesService } from '@/services/community-packages.service'; + +import { PubSubHandler } from '../pubsub/pubsub-handler'; + +describe('PubSubHandler', () => { + const eventService = new EventService(); + const license = mock(); + const eventbus = mock(); + const externalSecretsManager = mock(); + const communityPackagesService = mock(); + + describe('in webhook process', () => { + const instanceSettings = mock({ instanceType: 'webhook' }); + + it('should set up handlers in webhook process', () => { + // @ts-expect-error Spying on private method + const setupWebhookHandlersSpy = jest.spyOn(PubSubHandler.prototype, 'setupWebhookHandlers'); + + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + ).init(); + + expect(setupWebhookHandlersSpy).toHaveBeenCalled(); + }); + + it('should reload license on `reload-license` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + ).init(); + + eventService.emit('reload-license'); + + expect(license.reload).toHaveBeenCalled(); + }); + + it('should restart event bus on `restart-event-bus` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + ).init(); + + eventService.emit('restart-event-bus'); + + expect(eventbus.restart).toHaveBeenCalled(); + }); + + it('should reload providers on `reload-external-secrets-providers` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + ).init(); + + eventService.emit('reload-external-secrets-providers'); + + expect(externalSecretsManager.reloadAllProviders).toHaveBeenCalled(); + }); + + it('should install community package on `community-package-install` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + ).init(); + + eventService.emit('community-package-install', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should update community package on `community-package-update` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + ).init(); + + eventService.emit('community-package-update', { + packageName: 'test-package', + packageVersion: '1.0.0', + }); + + expect(communityPackagesService.installOrUpdateNpmPackage).toHaveBeenCalledWith( + 'test-package', + '1.0.0', + ); + }); + + it('should uninstall community package on `community-package-uninstall` event', () => { + new PubSubHandler( + eventService, + instanceSettings, + license, + eventbus, + externalSecretsManager, + communityPackagesService, + ).init(); + + eventService.emit('community-package-uninstall', { + packageName: 'test-package', + }); + + expect(communityPackagesService.removeNpmPackage).toHaveBeenCalledWith('test-package'); + }); + }); +}); diff --git a/packages/cli/src/scaling/__tests__/scaling.service.test.ts b/packages/cli/src/scaling/__tests__/scaling.service.test.ts index 9beae22af6..f1ae78f838 100644 --- a/packages/cli/src/scaling/__tests__/scaling.service.test.ts +++ b/packages/cli/src/scaling/__tests__/scaling.service.test.ts @@ -11,7 +11,7 @@ import { mockInstance } from '@test/mocking'; import { JOB_TYPE_NAME, QUEUE_NAME } from '../constants'; import type { JobProcessor } from '../job-processor'; import { ScalingService } from '../scaling.service'; -import type { Job, JobData, JobOptions, JobQueue } from '../scaling.types'; +import type { Job, JobData, JobQueue } from '../scaling.types'; const queue = mock({ client: { ping: jest.fn() }, @@ -208,10 +208,13 @@ describe('ScalingService', () => { queue.add.mockResolvedValue(mock({ id: '456' })); const jobData = mock({ executionId: '123' }); - const jobOptions = mock(); - await scalingService.addJob(jobData, jobOptions); + await scalingService.addJob(jobData, { priority: 100 }); - expect(queue.add).toHaveBeenCalledWith(JOB_TYPE_NAME, jobData, jobOptions); + expect(queue.add).toHaveBeenCalledWith(JOB_TYPE_NAME, jobData, { + priority: 100, + removeOnComplete: true, + removeOnFail: true, + }); }); }); diff --git a/packages/cli/src/scaling/__tests__/subscriber.service.test.ts b/packages/cli/src/scaling/__tests__/subscriber.service.test.ts index 14e67a8d4d..31e8486b8c 100644 --- a/packages/cli/src/scaling/__tests__/subscriber.service.test.ts +++ b/packages/cli/src/scaling/__tests__/subscriber.service.test.ts @@ -17,14 +17,14 @@ describe('Subscriber', () => { describe('constructor', () => { it('should init Redis client in scaling mode', () => { - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); expect(subscriber.getClient()).toEqual(client); }); it('should not init Redis client in regular mode', () => { config.set('executions.mode', 'regular'); - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); expect(subscriber.getClient()).toBeUndefined(); }); @@ -32,7 +32,7 @@ describe('Subscriber', () => { describe('shutdown', () => { it('should disconnect Redis client', () => { - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); subscriber.shutdown(); expect(client.disconnect).toHaveBeenCalled(); }); @@ -40,7 +40,7 @@ describe('Subscriber', () => { describe('subscribe', () => { it('should subscribe to pubsub channel', async () => { - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); await subscriber.subscribe('n8n.commands'); @@ -50,7 +50,7 @@ describe('Subscriber', () => { describe('setMessageHandler', () => { it('should set message handler function for channel', () => { - const subscriber = new Subscriber(mock(), redisClientService); + const subscriber = new Subscriber(mock(), redisClientService, mock()); const channel = 'n8n.commands'; const handlerFn = jest.fn(); diff --git a/packages/cli/src/scaling/__tests__/worker-server.test.ts b/packages/cli/src/scaling/__tests__/worker-server.test.ts index d5716bfac4..7f3a21a778 100644 --- a/packages/cli/src/scaling/__tests__/worker-server.test.ts +++ b/packages/cli/src/scaling/__tests__/worker-server.test.ts @@ -5,7 +5,6 @@ import type { InstanceSettings } from 'n8n-core'; import { AssertionError } from 'node:assert'; import * as http from 'node:http'; -import { PortTakenError } from '@/errors/port-taken.error'; import type { ExternalHooks } from '@/external-hooks'; import type { PrometheusMetricsService } from '@/metrics/prometheus-metrics.service'; import { bodyParser, rawBodyReader } from '@/middlewares'; @@ -34,7 +33,7 @@ describe('WorkerServer', () => { beforeEach(() => { globalConfig = mock({ queue: { - health: { active: true, port: 5678 }, + health: { active: true, port: 5678, address: '0.0.0.0' }, }, credentials: { overwrite: { endpoint: '' }, @@ -59,8 +58,11 @@ describe('WorkerServer', () => { ).toThrowError(AssertionError); }); - it('should throw if port taken', async () => { + it('should exit if port taken', async () => { const server = mock(); + const procesExitSpy = jest + .spyOn(process, 'exit') + .mockImplementation(() => undefined as never); jest.spyOn(http, 'createServer').mockReturnValue(server); @@ -69,18 +71,19 @@ describe('WorkerServer', () => { return server; }); - expect( - () => - new WorkerServer( - globalConfig, - mock(), - mock(), - mock(), - externalHooks, - instanceSettings, - prometheusMetricsService, - ), - ).toThrowError(PortTakenError); + new WorkerServer( + globalConfig, + mock(), + mock(), + mock(), + externalHooks, + instanceSettings, + prometheusMetricsService, + ); + + expect(procesExitSpy).toHaveBeenCalledWith(1); + + procesExitSpy.mockRestore(); }); }); @@ -89,8 +92,9 @@ describe('WorkerServer', () => { const server = mock(); jest.spyOn(http, 'createServer').mockReturnValue(server); - server.listen.mockImplementation((_port, callback: () => void) => { - callback(); + server.listen.mockImplementation((...args: unknown[]) => { + const callback = args.find((arg) => typeof arg === 'function'); + if (callback) callback(); return server; }); @@ -123,8 +127,9 @@ describe('WorkerServer', () => { const server = mock(); jest.spyOn(http, 'createServer').mockReturnValue(server); - server.listen.mockImplementation((_port, callback: () => void) => { - callback(); + server.listen.mockImplementation((...args: unknown[]) => { + const callback = args.find((arg) => typeof arg === 'function'); + if (callback) callback(); return server; }); @@ -177,8 +182,9 @@ describe('WorkerServer', () => { prometheusMetricsService, ); - server.listen.mockImplementation((_port, callback: () => void) => { - callback(); + server.listen.mockImplementation((...args: unknown[]) => { + const callback = args.find((arg) => typeof arg === 'function'); + if (callback) callback(); return server; }); diff --git a/packages/cli/src/scaling/job-processor.ts b/packages/cli/src/scaling/job-processor.ts index 3155b0d90f..49e1383ac6 100644 --- a/packages/cli/src/scaling/job-processor.ts +++ b/packages/cli/src/scaling/job-processor.ts @@ -8,7 +8,7 @@ import { Service } from 'typedi'; import config from '@/config'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; @@ -47,7 +47,7 @@ export class JobProcessor { this.logger.info(`[JobProcessor] Starting job ${job.id} (execution ${executionId})`); - await this.executionRepository.updateStatus(executionId, 'running'); + const startedAt = await this.executionRepository.setRunning(executionId); let { staticData } = execution.workflowData; @@ -137,7 +137,7 @@ export class JobProcessor { workflowId: execution.workflowId, workflowName: execution.workflowData.name, mode: execution.mode, - startedAt: execution.startedAt, + startedAt, retryOf: execution.retryOf ?? '', status: execution.status, }; diff --git a/packages/cli/src/scaling/pubsub/publisher.service.ts b/packages/cli/src/scaling/pubsub/publisher.service.ts index f015890d48..7a35b94c3e 100644 --- a/packages/cli/src/scaling/pubsub/publisher.service.ts +++ b/packages/cli/src/scaling/pubsub/publisher.service.ts @@ -2,7 +2,7 @@ import type { Redis as SingleNodeClient, Cluster as MultiNodeClient } from 'iore import { Service } from 'typedi'; import config from '@/config'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { RedisClientService } from '@/services/redis-client.service'; import type { PubSub } from './pubsub.types'; @@ -24,8 +24,6 @@ export class Publisher { if (config.getEnv('executions.mode') !== 'queue') return; this.client = this.redisClientService.createClient({ type: 'publisher(n8n)' }); - - this.client.on('error', (error) => this.logger.error(error.message)); } getClient() { diff --git a/packages/cli/src/scaling/pubsub/pubsub-handler.ts b/packages/cli/src/scaling/pubsub/pubsub-handler.ts new file mode 100644 index 0000000000..8b7a91e4dd --- /dev/null +++ b/packages/cli/src/scaling/pubsub/pubsub-handler.ts @@ -0,0 +1,61 @@ +import { InstanceSettings } from 'n8n-core'; +import { Service } from 'typedi'; + +import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; +import { EventService } from '@/events/event.service'; +import type { PubSubEventMap } from '@/events/maps/pub-sub.event-map'; +import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; +import { License } from '@/license'; +import { CommunityPackagesService } from '@/services/community-packages.service'; + +/** + * Responsible for handling events emitted from messages received via a pubsub channel. + */ +@Service() +export class PubSubHandler { + constructor( + private readonly eventService: EventService, + private readonly instanceSettings: InstanceSettings, + private readonly license: License, + private readonly eventbus: MessageEventBus, + private readonly externalSecretsManager: ExternalSecretsManager, + private readonly communityPackagesService: CommunityPackagesService, + ) {} + + init() { + if (this.instanceSettings.instanceType === 'webhook') this.setupWebhookHandlers(); + } + + private setupHandlers( + map: { + [EventName in EventNames]?: (event: PubSubEventMap[EventName]) => void | Promise; + }, + ) { + for (const [eventName, handlerFn] of Object.entries(map) as Array< + [EventNames, (event: PubSubEventMap[EventNames]) => void | Promise] + >) { + this.eventService.on(eventName, async (event) => { + await handlerFn(event); + }); + } + } + + // #region Webhook process + + private setupWebhookHandlers() { + this.setupHandlers({ + 'reload-license': async () => await this.license.reload(), + 'restart-event-bus': async () => await this.eventbus.restart(), + 'reload-external-secrets-providers': async () => + await this.externalSecretsManager.reloadAllProviders(), + 'community-package-install': async ({ packageName, packageVersion }) => + await this.communityPackagesService.installOrUpdateNpmPackage(packageName, packageVersion), + 'community-package-update': async ({ packageName, packageVersion }) => + await this.communityPackagesService.installOrUpdateNpmPackage(packageName, packageVersion), + 'community-package-uninstall': async ({ packageName }) => + await this.communityPackagesService.removeNpmPackage(packageName), + }); + } + + // #endregion +} diff --git a/packages/cli/src/scaling/pubsub/pubsub.types.ts b/packages/cli/src/scaling/pubsub/pubsub.types.ts index 13643440fb..ac83659212 100644 --- a/packages/cli/src/scaling/pubsub/pubsub.types.ts +++ b/packages/cli/src/scaling/pubsub/pubsub.types.ts @@ -1,6 +1,4 @@ -import type { PushType, WorkerStatus } from '@n8n/api-types'; - -import type { IWorkflowDb } from '@/interfaces'; +import type { PubSubCommandMap, PubSubWorkerResponseMap } from '@/events/maps/pub-sub.event-map'; import type { Resolve } from '@/utlity.types'; import type { COMMAND_PUBSUB_CHANNEL, WORKER_RESPONSE_PUBSUB_CHANNEL } from '../constants'; @@ -20,92 +18,17 @@ export namespace PubSub { // commands // ---------------------------------- - export type CommandMap = { - // #region Lifecycle - - 'reload-license': never; - - 'restart-event-bus': never; - - 'reload-external-secrets-providers': never; - - // #endregion - - // #region Community packages - - 'community-package-install': { - packageName: string; - packageVersion: string; - }; - - 'community-package-update': { - packageName: string; - packageVersion: string; - }; - - 'community-package-uninstall': { - packageName: string; - }; - - // #endregion - - // #region Worker view - - 'get-worker-id': never; - - 'get-worker-status': never; - - // #endregion - - // #region Multi-main setup - - 'add-webhooks-triggers-and-pollers': { - workflowId: string; - }; - - 'remove-triggers-and-pollers': { - workflowId: string; - }; - - 'display-workflow-activation': { - workflowId: string; - }; - - 'display-workflow-deactivation': { - workflowId: string; - }; - - 'display-workflow-activation-error': { - workflowId: string; - errorMessage: string; - }; - - 'relay-execution-lifecycle-event': { - type: PushType; - args: Record; - pushRef: string; - }; - - 'clear-test-webhooks': { - webhookKey: string; - workflowEntity: IWorkflowDb; - pushRef: string; - }; - - // #endregion - }; - - type _ToCommand = { + type _ToCommand = { senderId: string; targets?: string[]; command: CommandKey; - } & (CommandMap[CommandKey] extends never + } & (PubSubCommandMap[CommandKey] extends never ? { payload?: never } // some commands carry no payload - : { payload: CommandMap[CommandKey] }); + : { payload: PubSubCommandMap[CommandKey] }); - type ToCommand = Resolve<_ToCommand>; + type ToCommand = Resolve<_ToCommand>; - namespace Command { + namespace Commands { export type ReloadLicense = ToCommand<'reload-license'>; export type RestartEventBus = ToCommand<'restart-event-bus'>; export type ReloadExternalSecretsProviders = ToCommand<'reload-external-secrets-providers'>; @@ -125,63 +48,39 @@ export namespace PubSub { /** Command sent via the `n8n.commands` pubsub channel. */ export type Command = - | Command.ReloadLicense - | Command.RestartEventBus - | Command.ReloadExternalSecretsProviders - | Command.CommunityPackageInstall - | Command.CommunityPackageUpdate - | Command.CommunityPackageUninstall - | Command.GetWorkerId - | Command.GetWorkerStatus - | Command.AddWebhooksTriggersAndPollers - | Command.RemoveTriggersAndPollers - | Command.DisplayWorkflowActivation - | Command.DisplayWorkflowDeactivation - | Command.DisplayWorkflowActivationError - | Command.RelayExecutionLifecycleEvent - | Command.ClearTestWebhooks; + | Commands.ReloadLicense + | Commands.RestartEventBus + | Commands.ReloadExternalSecretsProviders + | Commands.CommunityPackageInstall + | Commands.CommunityPackageUpdate + | Commands.CommunityPackageUninstall + | Commands.GetWorkerId + | Commands.GetWorkerStatus + | Commands.AddWebhooksTriggersAndPollers + | Commands.RemoveTriggersAndPollers + | Commands.DisplayWorkflowActivation + | Commands.DisplayWorkflowDeactivation + | Commands.DisplayWorkflowActivationError + | Commands.RelayExecutionLifecycleEvent + | Commands.ClearTestWebhooks; // ---------------------------------- // worker responses // ---------------------------------- - export type WorkerResponseMap = { - // #region Lifecycle - - 'restart-event-bus': { - result: 'success' | 'error'; - error?: string; - }; - - 'reload-external-secrets-providers': { - result: 'success' | 'error'; - error?: string; - }; - - // #endregion - - // #region Worker view - - 'get-worker-id': never; - - 'get-worker-status': WorkerStatus; - - // #endregion - }; - - type _ToWorkerResponse = { + type _ToWorkerResponse = { workerId: string; targets?: string[]; command: WorkerResponseKey; - } & (WorkerResponseMap[WorkerResponseKey] extends never + } & (PubSubWorkerResponseMap[WorkerResponseKey] extends never ? { payload?: never } // some responses carry no payload - : { payload: WorkerResponseMap[WorkerResponseKey] }); + : { payload: PubSubWorkerResponseMap[WorkerResponseKey] }); - type ToWorkerResponse = Resolve< + type ToWorkerResponse = Resolve< _ToWorkerResponse >; - namespace WorkerResponse { + namespace WorkerResponses { export type RestartEventBus = ToWorkerResponse<'restart-event-bus'>; export type ReloadExternalSecretsProviders = ToWorkerResponse<'reload-external-secrets-providers'>; @@ -191,8 +90,8 @@ export namespace PubSub { /** Response sent via the `n8n.worker-response` pubsub channel. */ export type WorkerResponse = - | WorkerResponse.RestartEventBus - | WorkerResponse.ReloadExternalSecretsProviders - | WorkerResponse.GetWorkerId - | WorkerResponse.GetWorkerStatus; + | WorkerResponses.RestartEventBus + | WorkerResponses.ReloadExternalSecretsProviders + | WorkerResponses.GetWorkerId + | WorkerResponses.GetWorkerStatus; } diff --git a/packages/cli/src/scaling/pubsub/subscriber.service.ts b/packages/cli/src/scaling/pubsub/subscriber.service.ts index e1951f924e..f9a2567f8d 100644 --- a/packages/cli/src/scaling/pubsub/subscriber.service.ts +++ b/packages/cli/src/scaling/pubsub/subscriber.service.ts @@ -1,8 +1,11 @@ import type { Redis as SingleNodeClient, Cluster as MultiNodeClient } from 'ioredis'; +import debounce from 'lodash/debounce'; +import { jsonParse } from 'n8n-workflow'; import { Service } from 'typedi'; import config from '@/config'; -import { Logger } from '@/logger'; +import { EventService } from '@/events/event.service'; +import { Logger } from '@/logging/logger.service'; import { RedisClientService } from '@/services/redis-client.service'; import type { PubSub } from './pubsub.types'; @@ -21,14 +24,13 @@ export class Subscriber { constructor( private readonly logger: Logger, private readonly redisClientService: RedisClientService, + private readonly eventService: EventService, ) { // @TODO: Once this class is only ever initialized in scaling mode, throw in the next line instead. if (config.getEnv('executions.mode') !== 'queue') return; this.client = this.redisClientService.createClient({ type: 'subscriber(n8n)' }); - this.client.on('error', (error) => this.logger.error(error.message)); - this.client.on('message', (channel: PubSub.Channel, message) => { this.handlers.get(channel)?.(message); }); @@ -64,4 +66,39 @@ export class Subscriber { } // #endregion + + // #region Commands + + setCommandMessageHandler() { + const handlerFn = debounce((str: string) => { + const msg = this.parseCommandMessage(str); + if (msg) this.eventService.emit(msg.command, msg.payload); + }, 300); + + this.setMessageHandler('n8n.commands', handlerFn); + } + + private parseCommandMessage(str: string) { + const msg = jsonParse(str, { fallbackValue: null }); + + if (!msg) { + this.logger.debug('Received invalid string via command channel', { message: str }); + + return null; + } + + this.logger.debug('Received message via command channel', msg); + + const queueModeId = config.getEnv('redis.queueModeId'); + + if (msg.senderId === queueModeId || (msg.targets && !msg.targets.includes(queueModeId))) { + this.logger.debug('Disregarding message - not for this instance', msg); + + return null; + } + + return msg; + } + + // #endregion } diff --git a/packages/cli/src/scaling/scaling.service.ts b/packages/cli/src/scaling/scaling.service.ts index 552802ba70..f9d805140d 100644 --- a/packages/cli/src/scaling/scaling.service.ts +++ b/packages/cli/src/scaling/scaling.service.ts @@ -1,7 +1,14 @@ import { GlobalConfig } from '@n8n/config'; import { InstanceSettings } from 'n8n-core'; -import { ApplicationError, BINARY_ENCODING, sleep, jsonStringify } from 'n8n-workflow'; +import { + ApplicationError, + BINARY_ENCODING, + sleep, + jsonStringify, + ErrorReporterProxy, +} from 'n8n-workflow'; import type { IExecuteResponsePromiseData } from 'n8n-workflow'; +import { strict } from 'node:assert'; import Container, { Service } from 'typedi'; import { ActiveExecutions } from '@/active-executions'; @@ -11,7 +18,7 @@ import { ExecutionRepository } from '@/databases/repositories/execution.reposito import { OnShutdown } from '@/decorators/on-shutdown'; import { MaxStalledCountError } from '@/errors/max-stalled-count.error'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { JOB_TYPE_NAME, QUEUE_NAME } from './constants'; @@ -77,11 +84,22 @@ export class ScalingService { this.assertWorker(); this.assertQueue(); - void this.queue.process( - JOB_TYPE_NAME, - concurrency, - async (job: Job) => await this.jobProcessor.processJob(job), - ); + void this.queue.process(JOB_TYPE_NAME, concurrency, async (job: Job) => { + try { + await this.jobProcessor.processJob(job); + } catch (error: unknown) { + // Errors thrown here will be sent to the main instance by bull. Logging + // them out and rethrowing them allows to find out which worker had the + // issue. + this.logger.error('[ScalingService] Executing a job errored', { + jobId: job.id, + executionId: job.data.executionId, + error, + }); + ErrorReporterProxy.error(error); + throw error; + } + }); this.logger.debug('[ScalingService] Worker setup completed'); } @@ -124,12 +142,24 @@ export class ScalingService { return { active, waiting }; } - async addJob(jobData: JobData, jobOptions: JobOptions) { - const { executionId } = jobData; + /** + * Add a job to the queue. + * + * @param jobData Data of the job to add to the queue. + * @param priority Priority of the job, from `1` (highest) to `MAX_SAFE_INTEGER` (lowest). + */ + async addJob(jobData: JobData, { priority }: { priority: number }) { + strict(priority > 0 && priority <= Number.MAX_SAFE_INTEGER); + + const jobOptions: JobOptions = { + priority, + removeOnComplete: true, + removeOnFail: true, + }; const job = await this.queue.add(JOB_TYPE_NAME, jobData, jobOptions); - this.logger.info(`[ScalingService] Added job ${job.id} (execution ${executionId})`); + this.logger.info(`[ScalingService] Added job ${job.id} (execution ${jobData.executionId})`); return job; } @@ -173,42 +203,6 @@ export class ScalingService { // #region Listeners private registerListeners() { - let latestAttemptTs = 0; - let cumulativeTimeoutMs = 0; - - const MAX_TIMEOUT_MS = this.globalConfig.queue.bull.redis.timeoutThreshold; - const RESET_LENGTH_MS = 30_000; - - this.queue.on('error', (error: Error) => { - this.logger.error('[ScalingService] Queue errored', { error }); - - /** - * On Redis connection failure, try to reconnect. On every failed attempt, - * increment a cumulative timeout - if this exceeds a limit, exit the - * process. Reset the cumulative timeout if >30s between retries. - */ - if (error.message.includes('ECONNREFUSED')) { - const nowTs = Date.now(); - if (nowTs - latestAttemptTs > RESET_LENGTH_MS) { - latestAttemptTs = nowTs; - cumulativeTimeoutMs = 0; - } else { - cumulativeTimeoutMs += nowTs - latestAttemptTs; - latestAttemptTs = nowTs; - if (cumulativeTimeoutMs > MAX_TIMEOUT_MS) { - this.logger.error('[ScalingService] Redis unavailable after max timeout'); - this.logger.error('[ScalingService] Exiting process...'); - process.exit(1); - } - } - - this.logger.warn('[ScalingService] Redis unavailable - retrying to connect...'); - return; - } - - throw error; - }); - const { instanceType } = this.instanceSettings; if (instanceType === 'main' || instanceType === 'webhook') { this.registerMainOrWebhookListeners(); @@ -228,6 +222,8 @@ export class ScalingService { }); this.queue.on('error', (error: Error) => { + if ('code' in error && error.code === 'ECONNREFUSED') return; // handled by RedisClientService.retryStrategy + if (error.message.includes('job stalled more than maxStalledCount')) { throw new MaxStalledCountError(error); } @@ -242,6 +238,8 @@ export class ScalingService { process.exit(1); } + this.logger.error('[ScalingService] Queue errored', { error }); + throw error; }); } @@ -250,6 +248,14 @@ export class ScalingService { * Register listeners on a `main` or `webhook` process for Bull queue events. */ private registerMainOrWebhookListeners() { + this.queue.on('error', (error: Error) => { + if ('code' in error && error.code === 'ECONNREFUSED') return; // handled by RedisClientService.retryStrategy + + this.logger.error('[ScalingService] Queue errored', { error }); + + throw error; + }); + this.queue.on('global:progress', (_jobId: JobId, msg: unknown) => { if (!this.isPubSubMessage(msg)) return; diff --git a/packages/cli/src/scaling/worker-server.ts b/packages/cli/src/scaling/worker-server.ts index abc6a3a024..3343ce4e49 100644 --- a/packages/cli/src/scaling/worker-server.ts +++ b/packages/cli/src/scaling/worker-server.ts @@ -12,11 +12,10 @@ import { CredentialsOverwrites } from '@/credentials-overwrites'; import * as Db from '@/db'; import { CredentialsOverwritesAlreadySetError } from '@/errors/credentials-overwrites-already-set.error'; import { NonJsonBodyError } from '@/errors/non-json-body.error'; -import { PortTakenError } from '@/errors/port-taken.error'; import { ServiceUnavailableError } from '@/errors/response-errors/service-unavailable.error'; import { ExternalHooks } from '@/external-hooks'; import type { ICredentialsOverwrite } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PrometheusMetricsService } from '@/metrics/prometheus-metrics.service'; import { rawBodyReader, bodyParser } from '@/middlewares'; import * as ResponseHelper from '@/response-helper'; @@ -40,6 +39,8 @@ export type WorkerServerEndpointsConfig = { export class WorkerServer { private readonly port: number; + private readonly address: string; + private readonly server: Server; private readonly app: Application; @@ -66,9 +67,15 @@ export class WorkerServer { this.server = http.createServer(this.app); this.port = this.globalConfig.queue.health.port; + this.address = this.globalConfig.queue.health.address; this.server.on('error', (error: NodeJS.ErrnoException) => { - if (error.code === 'EADDRINUSE') throw new PortTakenError(this.port); + if (error.code === 'EADDRINUSE') { + this.logger.error( + `Port ${this.port} is already in use, possibly by the n8n main process server. Please set a different port for the worker server.`, + ); + process.exit(1); + } }); } @@ -79,7 +86,7 @@ export class WorkerServer { await this.mountEndpoints(); - await new Promise((resolve) => this.server.listen(this.port, resolve)); + await new Promise((resolve) => this.server.listen(this.port, this.address, resolve)); await this.externalHooks.run('worker.ready'); diff --git a/packages/cli/src/security-audit/risk-reporters/instance-risk-reporter.ts b/packages/cli/src/security-audit/risk-reporters/instance-risk-reporter.ts index 4792bf8b6a..b0d6ccfad3 100644 --- a/packages/cli/src/security-audit/risk-reporters/instance-risk-reporter.ts +++ b/packages/cli/src/security-audit/risk-reporters/instance-risk-reporter.ts @@ -6,7 +6,7 @@ import { Service } from 'typedi'; import config from '@/config'; import { getN8nPackageJson, inDevelopment } from '@/constants'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isApiEnabled } from '@/public-api'; import { ENV_VARS_DOCS_URL, diff --git a/packages/cli/src/server.ts b/packages/cli/src/server.ts index 0840714b6a..b83e2bdb2a 100644 --- a/packages/cli/src/server.ts +++ b/packages/cli/src/server.ts @@ -21,7 +21,7 @@ import { CredentialsOverwrites } from '@/credentials-overwrites'; import { ControllerRegistry } from '@/decorators'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { EventService } from '@/events/event.service'; -import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay'; +import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import type { ICredentialsOverwrite } from '@/interfaces'; import { isLdapEnabled } from '@/ldap/helpers.ee'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; @@ -56,6 +56,7 @@ import '@/controllers/translation.controller'; import '@/controllers/users.controller'; import '@/controllers/user-settings.controller'; import '@/controllers/workflow-statistics.controller'; +import '@/controllers/api-keys.controller'; import '@/credentials/credentials.controller'; import '@/eventbus/event-bus.controller'; import '@/events/events.controller'; diff --git a/packages/cli/src/services/__tests__/workflow-statistics.service.test.ts b/packages/cli/src/services/__tests__/workflow-statistics.service.test.ts index a8d73cbfff..fdecb7ae5a 100644 --- a/packages/cli/src/services/__tests__/workflow-statistics.service.test.ts +++ b/packages/cli/src/services/__tests__/workflow-statistics.service.test.ts @@ -7,7 +7,7 @@ import { } from '@n8n/typeorm'; import { mocked } from 'jest-mock'; import { mock } from 'jest-mock-extended'; -import type { IRun, WorkflowExecuteMode } from 'n8n-workflow'; +import type { INode, IRun, WorkflowExecuteMode } from 'n8n-workflow'; import { Container } from 'typedi'; import config from '@/config'; @@ -167,6 +167,22 @@ describe('WorkflowStatisticsService', () => { }); }); + test('should emit event with no `userId` if workflow is owned by team project', async () => { + const workflowId = '123'; + ownershipService.getPersonalProjectOwnerCached.mockResolvedValueOnce(null); + const node = mock({ id: '123', type: 'n8n-nodes-base.noOp', credentials: {} }); + + await workflowStatisticsService.nodeFetchedData(workflowId, node); + + expect(eventService.emit).toHaveBeenCalledWith('first-workflow-data-loaded', { + userId: '', + project: fakeProject.id, + workflowId, + nodeType: node.type, + nodeId: node.id, + }); + }); + test('should create metrics with credentials when the db is updated', async () => { // Call the function with a production success result, ensure metrics hook gets called const workflowId = '1'; diff --git a/packages/cli/src/services/active-workflows.service.ts b/packages/cli/src/services/active-workflows.service.ts index f2aaf9293d..61aa875d1a 100644 --- a/packages/cli/src/services/active-workflows.service.ts +++ b/packages/cli/src/services/active-workflows.service.ts @@ -5,7 +5,7 @@ import type { User } from '@/databases/entities/user'; import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; @Service() export class ActiveWorkflowsService { diff --git a/packages/cli/src/services/ai-assistant.service.ts b/packages/cli/src/services/ai-assistant.service.ts index 77234165c1..76b6e3fffb 100644 --- a/packages/cli/src/services/ai-assistant.service.ts +++ b/packages/cli/src/services/ai-assistant.service.ts @@ -1,3 +1,4 @@ +import { GlobalConfig } from '@n8n/config'; import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk'; import { AiAssistantClient } from '@n8n_io/ai-assistant-sdk'; import { assert, type IUser } from 'n8n-workflow'; @@ -14,7 +15,10 @@ import { License } from '../license'; export class AiAssistantService { private client: AiAssistantClient | undefined; - constructor(private readonly licenseService: License) {} + constructor( + private readonly licenseService: License, + private readonly globalConfig: GlobalConfig, + ) {} async init() { const aiAssistantEnabled = this.licenseService.isAiAssistantEnabled(); @@ -25,7 +29,7 @@ export class AiAssistantService { const licenseCert = await this.licenseService.loadCertStr(); const consumerId = this.licenseService.getConsumerId(); const baseUrl = config.get('aiAssistant.baseUrl'); - const logLevel = config.getEnv('logs.level'); + const logLevel = this.globalConfig.logging.level; this.client = new AiAssistantClient({ licenseCert, diff --git a/packages/cli/src/services/cache/cache.service.ts b/packages/cli/src/services/cache/cache.service.ts index 3eda66ecb8..aefe9310fc 100644 --- a/packages/cli/src/services/cache/cache.service.ts +++ b/packages/cli/src/services/cache/cache.service.ts @@ -89,6 +89,9 @@ export class CacheService extends TypedEmitter { // storing // ---------------------------------- + /** + * @param ttl Time to live in milliseconds + */ async set(key: string, value: unknown, ttl?: number) { if (!this.cache) await this.init(); diff --git a/packages/cli/src/services/community-packages.service.ts b/packages/cli/src/services/community-packages.service.ts index 500518ae02..b157119cf2 100644 --- a/packages/cli/src/services/community-packages.service.ts +++ b/packages/cli/src/services/community-packages.service.ts @@ -22,7 +22,7 @@ import { FeatureNotLicensedError } from '@/errors/feature-not-licensed.error'; import type { CommunityPackages } from '@/interfaces'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { toError } from '@/utils'; import { OrchestrationService } from './orchestration.service'; diff --git a/packages/cli/src/services/credentials-tester.service.ts b/packages/cli/src/services/credentials-tester.service.ts index b66d4a474f..30504e464b 100644 --- a/packages/cli/src/services/credentials-tester.service.ts +++ b/packages/cli/src/services/credentials-tester.service.ts @@ -35,7 +35,7 @@ import { Service } from 'typedi'; import { CredentialTypes } from '@/credential-types'; import type { User } from '@/databases/entities/user'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index 6ac3a1863c..ea8a135ff2 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -17,7 +17,7 @@ import { getVariablesLimit } from '@/environments/variables/environment-helpers' import { getLdapLoginLabel } from '@/ldap/helpers.ee'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isApiEnabled } from '@/public-api'; import type { CommunityPackagesService } from '@/services/community-packages.service'; import { getSamlLoginLabel } from '@/sso/saml/saml-helpers'; @@ -88,6 +88,7 @@ export class FrontendService { endpointFormWaiting: this.globalConfig.endpoints.formWaiting, endpointWebhook: this.globalConfig.endpoints.webhook, endpointWebhookTest: this.globalConfig.endpoints.webhookTest, + endpointWebhookWaiting: this.globalConfig.endpoints.webhookWaiting, saveDataErrorExecution: config.getEnv('executions.saveDataOnError'), saveDataSuccessExecution: config.getEnv('executions.saveDataOnSuccess'), saveManualExecutions: config.getEnv('executions.saveDataManualExecutions'), @@ -123,7 +124,7 @@ export class FrontendService { apiKey: config.getEnv('diagnostics.config.posthog.apiKey'), autocapture: false, disableSessionRecording: config.getEnv('deployment.type') !== 'cloud', - debug: config.getEnv('logs.level') === 'debug', + debug: this.globalConfig.logging.level === 'debug', }, personalizationSurveyEnabled: config.getEnv('personalization.enabled') && config.getEnv('diagnostics.enabled'), @@ -153,7 +154,7 @@ export class FrontendService { }, }, workflowTagsDisabled: config.getEnv('workflowTagsDisabled'), - logLevel: config.getEnv('logs.level'), + logLevel: this.globalConfig.logging.level, hiringBannerEnabled: config.getEnv('hiringBanner.enabled'), aiAssistant: { enabled: false, diff --git a/packages/cli/src/services/import.service.ts b/packages/cli/src/services/import.service.ts index 5691fc941c..a486ff8396 100644 --- a/packages/cli/src/services/import.service.ts +++ b/packages/cli/src/services/import.service.ts @@ -11,7 +11,7 @@ import { CredentialsRepository } from '@/databases/repositories/credentials.repo import { TagRepository } from '@/databases/repositories/tag.repository'; import * as Db from '@/db'; import type { ICredentialsDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { replaceInvalidCredentials } from '@/workflow-helpers'; @Service() diff --git a/packages/cli/src/services/orchestration.service.ts b/packages/cli/src/services/orchestration.service.ts index 1ee7c26876..666fe48ac6 100644 --- a/packages/cli/src/services/orchestration.service.ts +++ b/packages/cli/src/services/orchestration.service.ts @@ -3,9 +3,9 @@ import type { WorkflowActivateMode } from 'n8n-workflow'; import Container, { Service } from 'typedi'; import config from '@/config'; -import { Logger } from '@/logger'; +import type { PubSubCommandMap } from '@/events/maps/pub-sub.event-map'; +import { Logger } from '@/logging/logger.service'; import type { Publisher } from '@/scaling/pubsub/publisher.service'; -import type { PubSub } from '@/scaling/pubsub/pubsub.types'; import type { Subscriber } from '@/scaling/pubsub/subscriber.service'; import { MultiMainSetup } from './orchestration/main/multi-main-setup.ee'; @@ -97,9 +97,9 @@ export class OrchestrationService { // pubsub // ---------------------------------- - async publish( + async publish( commandKey: CommandKey, - payload?: PubSub.CommandMap[CommandKey], + payload?: PubSubCommandMap[CommandKey], ) { if (!this.sanityCheck()) return; diff --git a/packages/cli/src/services/orchestration/helpers.ts b/packages/cli/src/services/orchestration/helpers.ts index a5470138dc..f36bb4adf9 100644 --- a/packages/cli/src/services/orchestration/helpers.ts +++ b/packages/cli/src/services/orchestration/helpers.ts @@ -2,7 +2,7 @@ import { jsonParse } from 'n8n-workflow'; import os from 'node:os'; import { Container } from 'typedi'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { COMMAND_PUBSUB_CHANNEL } from '@/scaling/constants'; import type { PubSub } from '@/scaling/pubsub/pubsub.types'; diff --git a/packages/cli/src/services/orchestration/main/handle-command-message-main.ts b/packages/cli/src/services/orchestration/main/handle-command-message-main.ts index 49fce27aef..909f5976a5 100644 --- a/packages/cli/src/services/orchestration/main/handle-command-message-main.ts +++ b/packages/cli/src/services/orchestration/main/handle-command-message-main.ts @@ -7,7 +7,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { Push } from '@/push'; import { CommunityPackagesService } from '@/services/community-packages.service'; import { OrchestrationService } from '@/services/orchestration.service'; diff --git a/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts b/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts index 7842c71463..a3b5912fb4 100644 --- a/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts +++ b/packages/cli/src/services/orchestration/main/handle-worker-response-message-main.ts @@ -1,7 +1,7 @@ import { jsonParse } from 'n8n-workflow'; import Container from 'typedi'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { WORKER_RESPONSE_PUBSUB_CHANNEL } from '@/scaling/constants'; import type { PubSub } from '@/scaling/pubsub/pubsub.types'; diff --git a/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts b/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts index 98dbce7fde..bb1b52519c 100644 --- a/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts +++ b/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts @@ -4,7 +4,7 @@ import { Service } from 'typedi'; import config from '@/config'; import { TIME } from '@/constants'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { Publisher } from '@/scaling/pubsub/publisher.service'; import { RedisClientService } from '@/services/redis-client.service'; import { TypedEmitter } from '@/typed-emitter'; diff --git a/packages/cli/src/services/orchestration/webhook/handle-command-message-webhook.ts b/packages/cli/src/services/orchestration/webhook/handle-command-message-webhook.ts deleted file mode 100644 index 3555139a99..0000000000 --- a/packages/cli/src/services/orchestration/webhook/handle-command-message-webhook.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { InstanceSettings } from 'n8n-core'; -import Container from 'typedi'; -import { Logger } from 'winston'; - -import config from '@/config'; -import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; -import { License } from '@/license'; -import { CommunityPackagesService } from '@/services/community-packages.service'; - -import { messageToRedisServiceCommandObject, debounceMessageReceiver } from '../helpers'; - -export async function handleCommandMessageWebhook(messageString: string) { - const queueModeId = config.getEnv('redis.queueModeId'); - const isMainInstance = Container.get(InstanceSettings).instanceType === 'main'; - const message = messageToRedisServiceCommandObject(messageString); - const logger = Container.get(Logger); - - if (message) { - logger.debug( - `RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`, - ); - - if ( - message.senderId === queueModeId || - (message.targets && !message.targets.includes(queueModeId)) - ) { - // Skipping command message because it's not for this instance - logger.debug( - `Skipping command message ${message.command} because it's not for this instance.`, - ); - return message; - } - - switch (message.command) { - case 'reload-license': - if (!debounceMessageReceiver(message, 500)) { - return { ...message, payload: { result: 'debounced' } }; - } - - if (isMainInstance && !config.getEnv('multiMainSetup.enabled')) { - // at this point in time, only a single main instance is supported, thus this command _should_ never be caught currently - logger.error( - 'Received command to reload license via Redis, but this should not have happened and is not supported on the main instance yet.', - ); - return message; - } - await Container.get(License).reload(); - break; - case 'restart-event-bus': - if (!debounceMessageReceiver(message, 200)) { - return { ...message, payload: { result: 'debounced' } }; - } - await Container.get(MessageEventBus).restart(); - case 'reload-external-secrets-providers': - if (!debounceMessageReceiver(message, 200)) { - return { ...message, payload: { result: 'debounced' } }; - } - await Container.get(ExternalSecretsManager).reloadAllProviders(); - break; - case 'community-package-install': - case 'community-package-update': - case 'community-package-uninstall': - if (!debounceMessageReceiver(message, 200)) { - return message; - } - const { packageName } = message.payload; - const communityPackagesService = Container.get(CommunityPackagesService); - if (message.command === 'community-package-uninstall') { - await communityPackagesService.removeNpmPackage(packageName); - } else { - await communityPackagesService.installOrUpdateNpmPackage( - packageName, - message.payload.packageVersion, - ); - } - break; - - default: - break; - } - - return message; - } - - return; -} diff --git a/packages/cli/src/services/orchestration/webhook/orchestration.handler.webhook.service.ts b/packages/cli/src/services/orchestration/webhook/orchestration.handler.webhook.service.ts deleted file mode 100644 index de7bded68e..0000000000 --- a/packages/cli/src/services/orchestration/webhook/orchestration.handler.webhook.service.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Service } from 'typedi'; - -import { Subscriber } from '@/scaling/pubsub/subscriber.service'; - -import { handleCommandMessageWebhook } from './handle-command-message-webhook'; -import { OrchestrationHandlerService } from '../../orchestration.handler.base.service'; - -@Service() -export class OrchestrationHandlerWebhookService extends OrchestrationHandlerService { - constructor(private readonly subscriber: Subscriber) { - super(); - } - - async initSubscriber() { - await this.subscriber.subscribe('n8n.commands'); - - this.subscriber.setMessageHandler('n8n.commands', handleCommandMessageWebhook); - } -} diff --git a/packages/cli/src/services/orchestration/worker/handle-command-message-worker.ts b/packages/cli/src/services/orchestration/worker/handle-command-message-worker.ts index 3c5b108010..ae11ac96fe 100644 --- a/packages/cli/src/services/orchestration/worker/handle-command-message-worker.ts +++ b/packages/cli/src/services/orchestration/worker/handle-command-message-worker.ts @@ -6,7 +6,7 @@ import { N8N_VERSION } from '@/constants'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { COMMAND_PUBSUB_CHANNEL } from '@/scaling/constants'; import type { PubSub } from '@/scaling/pubsub/pubsub.types'; import { CommunityPackagesService } from '@/services/community-packages.service'; diff --git a/packages/cli/src/services/pruning.service.ts b/packages/cli/src/services/pruning.service.ts index b0ebc99dfd..48d4b0db3b 100644 --- a/packages/cli/src/services/pruning.service.ts +++ b/packages/cli/src/services/pruning.service.ts @@ -6,7 +6,7 @@ import config from '@/config'; import { inTest, TIME } from '@/constants'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { OnShutdown } from '@/decorators/on-shutdown'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from './orchestration.service'; diff --git a/packages/cli/src/services/redis-client.service.ts b/packages/cli/src/services/redis-client.service.ts index dc0d3b8cde..f205a756c5 100644 --- a/packages/cli/src/services/redis-client.service.ts +++ b/packages/cli/src/services/redis-client.service.ts @@ -3,18 +3,42 @@ import ioRedis from 'ioredis'; import type { Cluster, RedisOptions } from 'ioredis'; import { Service } from 'typedi'; -import { Logger } from '@/logger'; +import { Debounce } from '@/decorators/debounce'; +import { Logger } from '@/logging/logger.service'; +import { TypedEmitter } from '@/typed-emitter'; import type { RedisClientType } from '../scaling/redis/redis.types'; +type RedisEventMap = { + 'connection-lost': number; + 'connection-recovered': never; +}; + @Service() -export class RedisClientService { +export class RedisClientService extends TypedEmitter { private readonly clients = new Set(); + private readonly config = { + /** How long (in ms) to try to reconnect for before exiting. */ + maxTimeout: this.globalConfig.queue.bull.redis.timeoutThreshold, + + /** How long (in ms) to wait between reconnection attempts. */ + retryInterval: 1000, + + /** How long (in ms) to wait before resetting the cumulative timeout. */ + resetLength: 30_000, + }; + + /** Whether any client has lost connection to Redis. */ + private lostConnection = false; + constructor( private readonly logger: Logger, private readonly globalConfig: GlobalConfig, - ) {} + ) { + super(); + this.registerListeners(); + } createClient(arg: { type: RedisClientType; extraOptions?: RedisOptions }) { const client = @@ -22,6 +46,19 @@ export class RedisClientService { ? this.createClusterClient(arg) : this.createRegularClient(arg); + client.on('error', (error) => { + if ('code' in error && error.code === 'ECONNREFUSED') return; // handled by retryStrategy + + this.logger.error(`[Redis client] ${error.message}`, { error }); + }); + + client.on('ready', () => { + if (this.lostConnection) { + this.emit('connection-recovered'); + this.lostConnection = false; + } + }); + this.clients.add(client); return client; @@ -118,32 +155,29 @@ export class RedisClientService { * Reset the cumulative timeout if >30s between reconnection attempts. */ private retryStrategy() { - const RETRY_INTERVAL = 500; // ms - const RESET_LENGTH = 30_000; // ms - const MAX_TIMEOUT = this.globalConfig.queue.bull.redis.timeoutThreshold; - let lastAttemptTs = 0; let cumulativeTimeout = 0; return () => { const nowTs = Date.now(); - if (nowTs - lastAttemptTs > RESET_LENGTH) { + if (nowTs - lastAttemptTs > this.config.resetLength) { cumulativeTimeout = 0; lastAttemptTs = nowTs; } else { cumulativeTimeout += nowTs - lastAttemptTs; lastAttemptTs = nowTs; - if (cumulativeTimeout > MAX_TIMEOUT) { - this.logger.error(`[Redis] Unable to connect after max timeout of ${MAX_TIMEOUT} ms`); - this.logger.error('Exiting process...'); + if (cumulativeTimeout > this.config.maxTimeout) { + const maxTimeout = Math.round(this.config.maxTimeout / 1000) + 's'; + this.logger.error(`Unable to connect to Redis after trying to connect for ${maxTimeout}`); + this.logger.error('Exiting process due to Redis connection error'); process.exit(1); } } - this.logger.warn('Redis unavailable - trying to reconnect...'); + this.emit('connection-lost', cumulativeTimeout); - return RETRY_INTERVAL; + return this.config.retryInterval; }; } @@ -156,4 +190,40 @@ export class RedisClientService { return { host, port: parseInt(port) }; }); } + + @Debounce(1000) + emit( + event: Event, + ...args: Array + ): boolean { + return super.emit(event, ...args); + } + + private registerListeners() { + const { maxTimeout: maxTimeoutMs, retryInterval: retryIntervalMs } = this.config; + + const retryInterval = this.formatTimeout(retryIntervalMs); + const maxTimeout = this.formatTimeout(maxTimeoutMs); + + this.on('connection-lost', (cumulativeTimeoutMs) => { + const cumulativeTimeout = this.formatTimeout(cumulativeTimeoutMs); + const reconnectionMsg = `Trying to reconnect in ${retryInterval}...`; + const timeoutDetails = `${cumulativeTimeout}/${maxTimeout}`; + + this.logger.warn(`Lost Redis connection. ${reconnectionMsg} (${timeoutDetails})`); + + this.lostConnection = true; + }); + + this.on('connection-recovered', () => { + this.logger.info('Recovered Redis connection'); + }); + } + + private formatTimeout(timeoutMs: number) { + const timeoutSeconds = timeoutMs / 1000; + const roundedTimeout = Math.round(timeoutSeconds * 10) / 10; + + return roundedTimeout + 's'; + } } diff --git a/packages/cli/src/services/user.service.ts b/packages/cli/src/services/user.service.ts index 3d8fde6c00..1668878a8c 100644 --- a/packages/cli/src/services/user.service.ts +++ b/packages/cli/src/services/user.service.ts @@ -7,7 +7,7 @@ import { UserRepository } from '@/databases/repositories/user.repository'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { EventService } from '@/events/event.service'; import type { Invitation, PublicUser } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { PostHogClient } from '@/posthog'; import type { UserRequest } from '@/requests'; import { UrlService } from '@/services/url.service'; diff --git a/packages/cli/src/services/workflow-statistics.service.ts b/packages/cli/src/services/workflow-statistics.service.ts index 42c6d14638..53cbac5094 100644 --- a/packages/cli/src/services/workflow-statistics.service.ts +++ b/packages/cli/src/services/workflow-statistics.service.ts @@ -4,7 +4,7 @@ import { Service } from 'typedi'; import { StatisticsNames } from '@/databases/entities/workflow-statistics'; import { WorkflowStatisticsRepository } from '@/databases/repositories/workflow-statistics.repository'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UserService } from '@/services/user.service'; import { TypedEmitter } from '@/typed-emitter'; @@ -110,7 +110,7 @@ export class WorkflowStatisticsService extends TypedEmitter Promise | void; export type ServiceClass = Class>; diff --git a/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts b/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts index 418f29c688..5dda04dc18 100644 --- a/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts +++ b/packages/cli/src/sso/saml/__tests__/saml.service.ee.test.ts @@ -2,7 +2,7 @@ import type express from 'express'; import { mock } from 'jest-mock-extended'; import type { IdentityProviderInstance, ServiceProviderInstance } from 'samlify'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; import * as samlHelpers from '@/sso/saml/saml-helpers'; import { SamlService } from '@/sso/saml/saml.service.ee'; diff --git a/packages/cli/src/sso/saml/saml-validator.ts b/packages/cli/src/sso/saml/saml-validator.ts index 6705320bee..06a93cc4fb 100644 --- a/packages/cli/src/sso/saml/saml-validator.ts +++ b/packages/cli/src/sso/saml/saml-validator.ts @@ -1,7 +1,7 @@ import { Container } from 'typedi'; import type { XMLFileInfo } from 'xmllint-wasm'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; let xml: XMLFileInfo; let xmldsigCore: XMLFileInfo; diff --git a/packages/cli/src/sso/saml/saml.service.ee.ts b/packages/cli/src/sso/saml/saml.service.ee.ts index f30fa6ff8e..6b07919730 100644 --- a/packages/cli/src/sso/saml/saml.service.ee.ts +++ b/packages/cli/src/sso/saml/saml.service.ee.ts @@ -12,7 +12,7 @@ import { SettingsRepository } from '@/databases/repositories/settings.repository import { UserRepository } from '@/databases/repositories/user.repository'; import { AuthError } from '@/errors/response-errors/auth.error'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; import { SAML_PREFERENCES_DB_KEY } from './constants'; diff --git a/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts b/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts index f630392280..cf9b122e72 100644 --- a/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts +++ b/packages/cli/src/subworkflows/subworkflow-policy-checker.service.ts @@ -5,7 +5,7 @@ import { Service } from 'typedi'; import type { Project } from '@/databases/entities/project'; import { SubworkflowPolicyDenialError } from '@/errors/subworkflow-policy-denial.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { AccessService } from '@/services/access.service'; import { OwnershipService } from '@/services/ownership.service'; import { UrlService } from '@/services/url.service'; diff --git a/packages/cli/src/telemetry/__tests__/telemetry.test.ts b/packages/cli/src/telemetry/__tests__/telemetry.test.ts index 3851e4523a..04a6cecfca 100644 --- a/packages/cli/src/telemetry/__tests__/telemetry.test.ts +++ b/packages/cli/src/telemetry/__tests__/telemetry.test.ts @@ -1,3 +1,4 @@ +import type { GlobalConfig } from '@n8n/config'; import type RudderStack from '@rudderstack/rudder-sdk-node'; import { mock } from 'jest-mock-extended'; import { InstanceSettings } from 'n8n-core'; @@ -41,10 +42,17 @@ describe('Telemetry', () => { beforeEach(async () => { spyTrack.mockClear(); - const postHog = new PostHogClient(instanceSettings); + const postHog = new PostHogClient(instanceSettings, mock()); await postHog.init(); - telemetry = new Telemetry(mock(), postHog, mock(), instanceSettings, mock()); + telemetry = new Telemetry( + mock(), + postHog, + mock(), + instanceSettings, + mock(), + mock({ logging: { level: 'info', outputs: ['console'] } }), + ); // @ts-expect-error Assigning to private property telemetry.rudderStack = mockRudderStack; }); @@ -259,6 +267,44 @@ describe('Telemetry', () => { expect(execBuffer['2'].prod_success?.first).toEqual(execTime1); }); }); + + describe('Rudderstack', () => { + test("should call rudderStack.identify() with a fake IP address to instruct Rudderstack to not use the user's IP address", () => { + const traits = { + name: 'Test User', + age: 30, + isActive: true, + }; + + telemetry.identify(traits); + + const expectedArgs = { + userId: instanceId, + traits: { ...traits, instanceId }, + context: { + ip: '0.0.0.0', // RudderStack anonymized IP + }, + }; + + expect(mockRudderStack.identify).toHaveBeenCalledWith(expectedArgs); + }); + + test("should call rudderStack.track() with a fake IP address to instruct Rudderstack to not use the user's IP address", () => { + const eventName = 'Test Event'; + const properties = { user_id: '1234' }; + + telemetry.track(eventName, properties); + + expect(mockRudderStack.track).toHaveBeenCalledWith( + expect.objectContaining({ + event: eventName, + context: { + ip: '0.0.0.0', // RudderStack anonymized IP + }, + }), + ); + }); + }); }); const fakeJestSystemTime = (dateTime: string | Date): Date => { diff --git a/packages/cli/src/telemetry/index.ts b/packages/cli/src/telemetry/index.ts index 8ffca478c1..d9a8e590f4 100644 --- a/packages/cli/src/telemetry/index.ts +++ b/packages/cli/src/telemetry/index.ts @@ -1,3 +1,4 @@ +import { GlobalConfig } from '@n8n/config'; import type RudderStack from '@rudderstack/rudder-sdk-node'; import axios from 'axios'; import { InstanceSettings } from 'n8n-core'; @@ -13,7 +14,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { OnShutdown } from '@/decorators/on-shutdown'; import type { IExecutionTrackProperties } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PostHogClient } from '@/posthog'; import { SourceControlPreferencesService } from '../environments/source-control/source-control-preferences.service.ee'; @@ -49,6 +50,7 @@ export class Telemetry { private readonly license: License, private readonly instanceSettings: InstanceSettings, private readonly workflowRepository: WorkflowRepository, + private readonly globalConfig: GlobalConfig, ) {} async init() { @@ -62,7 +64,7 @@ export class Telemetry { return; } - const logLevel = config.getEnv('logs.level'); + const logLevel = this.globalConfig.logging.level; const { default: RudderStack } = await import('@rudderstack/rudder-sdk-node'); const axiosInstance = axios.create(); @@ -186,6 +188,10 @@ export class Telemetry { this.rudderStack.identify({ userId: instanceId, traits: { ...traits, instanceId }, + context: { + // provide a fake IP address to instruct RudderStack to not use the user's IP address + ip: '0.0.0.0', + }, }); } @@ -210,13 +216,18 @@ export class Telemetry { userId: `${instanceId}${user_id ? `#${user_id}` : ''}`, event: eventName, properties: updatedProperties, + context: {}, }; if (withPostHog) { this.postHog?.track(payload); } - return this.rudderStack.track(payload); + return this.rudderStack.track({ + ...payload, + // provide a fake IP address to instruct RudderStack to not use the user's IP address + context: { ...payload.context, ip: '0.0.0.0' }, + }); } // test helpers diff --git a/packages/cli/src/user-management/email/node-mailer.ts b/packages/cli/src/user-management/email/node-mailer.ts index cfc7247546..661c3fed7f 100644 --- a/packages/cli/src/user-management/email/node-mailer.ts +++ b/packages/cli/src/user-management/email/node-mailer.ts @@ -7,7 +7,7 @@ import { createTransport } from 'nodemailer'; import type SMTPConnection from 'nodemailer/lib/smtp-connection'; import { Service } from 'typedi'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import type { MailData, SendEmailResult } from './interfaces'; diff --git a/packages/cli/src/user-management/email/user-management-mailer.ts b/packages/cli/src/user-management/email/user-management-mailer.ts index 6703354f07..b5df958d7d 100644 --- a/packages/cli/src/user-management/email/user-management-mailer.ts +++ b/packages/cli/src/user-management/email/user-management-mailer.ts @@ -11,7 +11,7 @@ import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { UserRepository } from '@/databases/repositories/user.repository'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { EventService } from '@/events/event.service'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { UrlService } from '@/services/url.service'; import { toError } from '@/utils'; diff --git a/packages/cli/src/wait-tracker.ts b/packages/cli/src/wait-tracker.ts index 7792cf5285..82b42c39df 100644 --- a/packages/cli/src/wait-tracker.ts +++ b/packages/cli/src/wait-tracker.ts @@ -6,7 +6,7 @@ import { import { Service } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { OwnershipService } from '@/services/ownership.service'; import { WorkflowRunner } from '@/workflow-runner'; diff --git a/packages/cli/src/webhooks/live-webhooks.ts b/packages/cli/src/webhooks/live-webhooks.ts index e9314060d7..458701caee 100644 --- a/packages/cli/src/webhooks/live-webhooks.ts +++ b/packages/cli/src/webhooks/live-webhooks.ts @@ -1,12 +1,12 @@ import type { Response } from 'express'; -import { Workflow, NodeHelpers } from 'n8n-workflow'; +import { Workflow, NodeHelpers, CHAT_TRIGGER_NODE_TYPE } from 'n8n-workflow'; import type { INode, IWebhookData, IHttpRequestMethods } from 'n8n-workflow'; import { Service } from 'typedi'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { WebhookNotFoundError } from '@/errors/response-errors/webhook-not-found.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WebhookHelpers from '@/webhooks/webhook-helpers'; import { WebhookService } from '@/webhooks/webhook.service'; @@ -47,12 +47,18 @@ export class LiveWebhooks implements IWebhookManager { select: ['nodes'], }); + const isChatWebhookNode = (type: string, webhookId?: string) => + type === CHAT_TRIGGER_NODE_TYPE && `${webhookId}/chat` === path; + const nodes = workflowData?.nodes; const webhookNode = nodes?.find( - ({ type, parameters, typeVersion }) => - parameters?.path === path && - (parameters?.httpMethod ?? 'GET') === httpMethod && - 'webhook' in this.nodeTypes.getByNameAndVersion(type, typeVersion), + ({ type, parameters, typeVersion, webhookId }) => + (parameters?.path === path && + (parameters?.httpMethod ?? 'GET') === httpMethod && + 'webhook' in this.nodeTypes.getByNameAndVersion(type, typeVersion)) || + // Chat Trigger has doesn't have configurable path and is always using POST, so + // we need to use webhookId for matching + isChatWebhookNode(type, webhookId), ); return webhookNode?.parameters?.options as WebhookAccessControlOptions; } @@ -154,8 +160,9 @@ export class LiveWebhooks implements IWebhookManager { } const webhook = await this.webhookService.findWebhook(httpMethod, path); + const webhookMethods = await this.getWebhookMethods(path); if (webhook === null) { - throw new WebhookNotFoundError({ path, httpMethod }, { hint: 'production' }); + throw new WebhookNotFoundError({ path, httpMethod, webhookMethods }, { hint: 'production' }); } return webhook; diff --git a/packages/cli/src/webhooks/waiting-webhooks.ts b/packages/cli/src/webhooks/waiting-webhooks.ts index 6493fde981..922de9d869 100644 --- a/packages/cli/src/webhooks/waiting-webhooks.ts +++ b/packages/cli/src/webhooks/waiting-webhooks.ts @@ -6,7 +6,7 @@ import { ExecutionRepository } from '@/databases/repositories/execution.reposito import { ConflictError } from '@/errors/response-errors/conflict.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import type { IExecutionResponse, IWorkflowDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import * as WebhookHelpers from '@/webhooks/webhook-helpers'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; diff --git a/packages/cli/src/webhooks/webhook-helpers.ts b/packages/cli/src/webhooks/webhook-helpers.ts index c2c3b59bff..5ff770acfb 100644 --- a/packages/cli/src/webhooks/webhook-helpers.ts +++ b/packages/cli/src/webhooks/webhook-helpers.ts @@ -45,7 +45,7 @@ import { InternalServerError } from '@/errors/response-errors/internal-server.er import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { UnprocessableRequestError } from '@/errors/response-errors/unprocessable.error'; import type { IExecutionDb, IWorkflowDb } from '@/interfaces'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { parseBody } from '@/middlewares'; import { OwnershipService } from '@/services/ownership.service'; import { WorkflowStatisticsService } from '@/services/workflow-statistics.service'; diff --git a/packages/cli/src/workflow-execute-additional-data.ts b/packages/cli/src/workflow-execute-additional-data.ts index 4d3bd7a223..f357bbc018 100644 --- a/packages/cli/src/workflow-execute-additional-data.ts +++ b/packages/cli/src/workflow-execute-additional-data.ts @@ -24,6 +24,8 @@ import type { WorkflowExecuteMode, ExecutionStatus, ExecutionError, + IExecuteFunctions, + ITaskDataConnections, ExecuteWorkflowOptions, IWorkflowExecutionDataProcess, } from 'n8n-workflow'; @@ -40,8 +42,13 @@ import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; import { CredentialsHelper } from '@/credentials-helper'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; +import type { AiEventMap, AiEventPayload } from '@/events/maps/ai.event-map'; import { ExternalHooks } from '@/external-hooks'; -import type { IWorkflowExecuteProcess, IWorkflowErrorData, ExecutionPayload } from '@/interfaces'; +import type { + IWorkflowExecuteProcess, + IWorkflowErrorData, + UpdateExecutionPayload, +} from '@/interfaces'; import { NodeTypes } from '@/node-types'; import { Push } from '@/push'; import { WorkflowStatisticsService } from '@/services/workflow-statistics.service'; @@ -49,7 +56,6 @@ import { findSubworkflowStart, isWorkflowIdValid } from '@/utils'; import * as WorkflowHelpers from '@/workflow-helpers'; import { WorkflowRepository } from './databases/repositories/workflow.repository'; -import type { AiEventMap, AiEventPayload } from './events/ai-event-map'; import { EventService } from './events/event.service'; import { restoreBinaryDataId } from './execution-lifecycle-hooks/restore-binary-data-id'; import { saveExecutionProgress } from './execution-lifecycle-hooks/save-execution-progress'; @@ -59,7 +65,8 @@ import { updateExistingExecution, } from './execution-lifecycle-hooks/shared/shared-hook-functions'; import { toSaveSettings } from './execution-lifecycle-hooks/to-save-settings'; -import { Logger } from './logger'; +import { Logger } from './logging/logger.service'; +import { TaskManager } from './runners/task-managers/task-manager'; import { SecretsHelper } from './secrets-helpers'; import { OwnershipService } from './services/ownership.service'; import { UrlService } from './services/url.service'; @@ -759,7 +766,7 @@ export async function getWorkflowData( /** * Executes the workflow with the given ID */ -async function executeWorkflow( +export async function executeWorkflow( workflowInfo: IExecuteWorkflowInfo, additionalData: IWorkflowExecuteAdditionalData, options: ExecuteWorkflowOptions, @@ -791,7 +798,13 @@ async function executeWorkflow( const runData = options.loadedRunData ?? (await getRunData(workflowData, options.inputData)); const executionId = await activeExecutions.add(runData); - await executionRepository.updateStatus(executionId, 'running'); + + /** + * A subworkflow execution in queue mode is not enqueued, but rather runs in the + * same worker process as the parent execution. Hence ensure the subworkflow + * execution is marked as started as well. + */ + await executionRepository.setRunning(executionId); Container.get(EventService).emit('workflow-pre-execute', { executionId, data: runData }); @@ -865,7 +878,7 @@ async function executeWorkflow( // Therefore, database might not contain finished errors. // Force an update to db as there should be no harm doing this - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: UpdateExecutionPayload = { data: fullRunData.data, mode: fullRunData.mode, finished: fullRunData.finished ? fullRunData.finished : false, @@ -980,6 +993,47 @@ export async function getBase( setExecutionStatus, variables, secretsHelpers: Container.get(SecretsHelper), + async startAgentJob( + additionalData: IWorkflowExecuteAdditionalData, + jobType: string, + settings: unknown, + executeFunctions: IExecuteFunctions, + inputData: ITaskDataConnections, + node: INode, + workflow: Workflow, + runExecutionData: IRunExecutionData, + runIndex: number, + itemIndex: number, + activeNodeName: string, + connectionInputData: INodeExecutionData[], + siblingParameters: INodeParameters, + mode: WorkflowExecuteMode, + executeData?: IExecuteData, + defaultReturnRunIndex?: number, + selfData?: IDataObject, + contextNodeName?: string, + ) { + return await Container.get(TaskManager).startTask( + additionalData, + jobType, + settings, + executeFunctions, + inputData, + node, + workflow, + runExecutionData, + runIndex, + itemIndex, + activeNodeName, + connectionInputData, + siblingParameters, + mode, + executeData, + defaultReturnRunIndex, + selfData, + contextNodeName, + ); + }, logAiEvent: (eventName: keyof AiEventMap, payload: AiEventPayload) => eventService.emit(eventName, payload), }; diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index a4dd344b62..8d1e147e85 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -26,7 +26,7 @@ import { ActiveExecutions } from '@/active-executions'; import config from '@/config'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { ExternalHooks } from '@/external-hooks'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import type { ScalingService } from '@/scaling/scaling.service'; import type { Job, JobData, JobResult } from '@/scaling/scaling.types'; @@ -245,7 +245,7 @@ export class WorkflowRunner { { executionId }, ); let workflowExecution: PCancelable; - await this.executionRepository.updateStatus(executionId, 'running'); + await this.executionRepository.setRunning(executionId); // write try { additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId); @@ -376,22 +376,12 @@ export class WorkflowRunner { this.scalingService = Container.get(ScalingService); } - let priority = 100; - if (realtime === true) { - // Jobs which require a direct response get a higher priority - priority = 50; - } // TODO: For realtime jobs should probably also not do retry or not retry if they are older than x seconds. // Check if they get retried by default and how often. - const jobOptions = { - priority, - removeOnComplete: true, - removeOnFail: true, - }; let job: Job; let hooks: WorkflowHooks; try { - job = await this.scalingService.addJob(jobData, jobOptions); + job = await this.scalingService.addJob(jobData, { priority: realtime ? 50 : 100 }); hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain( data.executionMode, diff --git a/packages/cli/src/workflows/workflow-execution.service.ts b/packages/cli/src/workflows/workflow-execution.service.ts index 4dc6d00f34..dd8480d759 100644 --- a/packages/cli/src/workflows/workflow-execution.service.ts +++ b/packages/cli/src/workflows/workflow-execution.service.ts @@ -22,8 +22,8 @@ import type { Project } from '@/databases/entities/project'; import type { User } from '@/databases/entities/user'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import type { ExecutionPayload, IWorkflowDb, IWorkflowErrorData } from '@/interfaces'; -import { Logger } from '@/logger'; +import type { CreateExecutionPayload, IWorkflowDb, IWorkflowErrorData } from '@/interfaces'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service'; import { TestWebhooks } from '@/webhooks/test-webhooks'; @@ -206,11 +206,10 @@ export class WorkflowExecutionService { initialNode, ); - const fullExecutionData: ExecutionPayload = { + const fullExecutionData: CreateExecutionPayload = { data: fakeExecution.data, mode: fakeExecution.mode, finished: false, - startedAt: new Date(), stoppedAt: new Date(), workflowData, waitTill: null, diff --git a/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts b/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts index 8bc54318bf..cb1b3952ad 100644 --- a/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts +++ b/packages/cli/src/workflows/workflow-history/__tests__/workflow-history.service.ee.test.ts @@ -3,7 +3,7 @@ import { mockClear } from 'jest-mock-extended'; import { User } from '@/databases/entities/user'; import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository'; import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { WorkflowHistoryService } from '@/workflows/workflow-history/workflow-history.service.ee'; import { mockInstance } from '@test/mocking'; import { getWorkflow } from '@test-integration/workflow'; diff --git a/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts b/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts index 7d40c38ec0..eddb8bf7e6 100644 --- a/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts +++ b/packages/cli/src/workflows/workflow-history/workflow-history.service.ee.ts @@ -7,7 +7,7 @@ import { SharedWorkflowRepository } from '@/databases/repositories/shared-workfl import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository'; import { SharedWorkflowNotFoundError } from '@/errors/shared-workflow-not-found.error'; import { WorkflowHistoryVersionNotFoundError } from '@/errors/workflow-history-version-not-found.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isWorkflowHistoryEnabled } from './workflow-history-helper.ee'; diff --git a/packages/cli/src/workflows/workflow-static-data.service.ts b/packages/cli/src/workflows/workflow-static-data.service.ts index f639345a1c..10655b77c7 100644 --- a/packages/cli/src/workflows/workflow-static-data.service.ts +++ b/packages/cli/src/workflows/workflow-static-data.service.ts @@ -3,7 +3,7 @@ import { type IDataObject, type Workflow, ErrorReporterProxy as ErrorReporter } import { Service } from 'typedi'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { isWorkflowIdValid } from '@/utils'; @Service() diff --git a/packages/cli/src/workflows/workflow.service.ee.ts b/packages/cli/src/workflows/workflow.service.ee.ts index 5456ac6268..90a8af90b1 100644 --- a/packages/cli/src/workflows/workflow.service.ee.ts +++ b/packages/cli/src/workflows/workflow.service.ee.ts @@ -17,7 +17,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { TransferWorkflowError } from '@/errors/response-errors/transfer-workflow.error'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { OwnershipService } from '@/services/ownership.service'; import { ProjectService } from '@/services/project.service'; @@ -285,14 +285,6 @@ export class EnterpriseWorkflowService { "You can't transfer a workflow into the project that's already owning it.", ); } - if (sourceProject.type !== 'team' && sourceProject.type !== 'personal') { - throw new TransferWorkflowError( - 'You can only transfer workflows out of personal or team projects.', - ); - } - if (destinationProject.type !== 'team') { - throw new TransferWorkflowError('You can only transfer workflows into team projects.'); - } // 6. deactivate workflow if necessary const wasActive = workflow.active; diff --git a/packages/cli/src/workflows/workflow.service.ts b/packages/cli/src/workflows/workflow.service.ts index 03ab425a24..bce8770303 100644 --- a/packages/cli/src/workflows/workflow.service.ts +++ b/packages/cli/src/workflows/workflow.service.ts @@ -24,7 +24,7 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { EventService } from '@/events/event.service'; import { ExternalHooks } from '@/external-hooks'; import { validateEntity } from '@/generic-helpers'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { hasSharing, type ListQuery } from '@/requests'; import { OrchestrationService } from '@/services/orchestration.service'; import { OwnershipService } from '@/services/ownership.service'; diff --git a/packages/cli/src/workflows/workflows.controller.ts b/packages/cli/src/workflows/workflows.controller.ts index 30ca5c9773..59f53e0df1 100644 --- a/packages/cli/src/workflows/workflows.controller.ts +++ b/packages/cli/src/workflows/workflows.controller.ts @@ -27,7 +27,7 @@ import { ExternalHooks } from '@/external-hooks'; import { validateEntity } from '@/generic-helpers'; import type { IWorkflowResponse } from '@/interfaces'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { listQueryMiddleware } from '@/middlewares'; import * as ResponseHelper from '@/response-helper'; import { NamingService } from '@/services/naming.service'; diff --git a/packages/cli/templates/form-trigger.handlebars b/packages/cli/templates/form-trigger.handlebars index 67818629f5..5493a76e7f 100644 --- a/packages/cli/templates/form-trigger.handlebars +++ b/packages/cli/templates/form-trigger.handlebars @@ -735,6 +735,14 @@ } return; + }).then(() => { + window.addEventListener('storage', function(event) { + if (event.key === 'n8n_redirect_to_next_form_test_page' && event.newValue) { + const newUrl = event.newValue; + localStorage.removeItem('n8n_redirect_to_next_form_test_page'); + window.location.replace(newUrl); + } + }); }) .catch(function (error) { console.error('Error:', error); diff --git a/packages/cli/test/integration/api-keys.api.test.ts b/packages/cli/test/integration/api-keys.api.test.ts new file mode 100644 index 0000000000..f577e0cf78 --- /dev/null +++ b/packages/cli/test/integration/api-keys.api.test.ts @@ -0,0 +1,178 @@ +import { GlobalConfig } from '@n8n/config'; +import { Container } from 'typedi'; + +import type { ApiKey } from '@/databases/entities/api-key'; +import type { User } from '@/databases/entities/user'; +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; +import { PublicApiKeyService } from '@/services/public-api-key.service'; +import { mockInstance } from '@test/mocking'; + +import { createOwnerWithApiKey, createUser, createUserShell } from './shared/db/users'; +import { randomValidPassword } from './shared/random'; +import * as testDb from './shared/test-db'; +import type { SuperAgentTest } from './shared/types'; +import * as utils from './shared/utils/'; + +const testServer = utils.setupTestServer({ endpointGroups: ['apiKeys'] }); +let publicApiKeyService: PublicApiKeyService; + +beforeAll(() => { + publicApiKeyService = Container.get(PublicApiKeyService); +}); + +beforeEach(async () => { + await testDb.truncate(['User']); + mockInstance(GlobalConfig, { publicApi: { disabled: false } }); +}); + +describe('When public API is disabled', () => { + let owner: User; + let authAgent: SuperAgentTest; + + beforeEach(async () => { + owner = await createOwnerWithApiKey(); + + authAgent = testServer.authAgentFor(owner); + mockInstance(GlobalConfig, { publicApi: { disabled: true } }); + }); + + test('POST /api-keys should 404', async () => { + await authAgent.post('/api-keys').expect(404); + }); + + test('GET /api-keys should 404', async () => { + await authAgent.get('/api-keys').expect(404); + }); + + test('DELETE /api-key/:id should 404', async () => { + await authAgent.delete(`/api-keys/${1}`).expect(404); + }); +}); + +describe('Owner shell', () => { + let ownerShell: User; + + beforeEach(async () => { + ownerShell = await createUserShell('global:owner'); + }); + + test('POST /api-keys should create an api key', async () => { + const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + + const newApiKey = newApiKeyResponse.body.data as ApiKey; + + expect(newApiKeyResponse.statusCode).toBe(200); + expect(newApiKey).toBeDefined(); + + const newStoredApiKey = await Container.get(ApiKeyRepository).findOneByOrFail({ + userId: ownerShell.id, + }); + + expect(newStoredApiKey).toEqual({ + id: expect.any(String), + label: 'My API Key', + userId: ownerShell.id, + apiKey: newApiKey.apiKey, + createdAt: expect.any(Date), + updatedAt: expect.any(Date), + }); + }); + + test('GET /api-keys should fetch the api key redacted', async () => { + const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + + const retrieveAllApiKeysResponse = await testServer.authAgentFor(ownerShell).get('/api-keys'); + + expect(retrieveAllApiKeysResponse.statusCode).toBe(200); + + expect(retrieveAllApiKeysResponse.body.data[0]).toEqual({ + id: newApiKeyResponse.body.data.id, + label: 'My API Key', + userId: ownerShell.id, + apiKey: publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.apiKey), + createdAt: expect.any(String), + updatedAt: expect.any(String), + }); + }); + + test('DELETE /api-keys/:id should delete the api key', async () => { + const newApiKeyResponse = await testServer.authAgentFor(ownerShell).post('/api-keys'); + + const deleteApiKeyResponse = await testServer + .authAgentFor(ownerShell) + .delete(`/api-keys/${newApiKeyResponse.body.data.id}`); + + const retrieveAllApiKeysResponse = await testServer.authAgentFor(ownerShell).get('/api-keys'); + + expect(deleteApiKeyResponse.body.data.success).toBe(true); + expect(retrieveAllApiKeysResponse.body.data.length).toBe(0); + }); +}); + +describe('Member', () => { + const memberPassword = randomValidPassword(); + let member: User; + + beforeEach(async () => { + member = await createUser({ + password: memberPassword, + role: 'global:member', + }); + await utils.setInstanceOwnerSetUp(true); + }); + + test('POST /api-keys should create an api key', async () => { + const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + + expect(newApiKeyResponse.statusCode).toBe(200); + expect(newApiKeyResponse.body.data.apiKey).toBeDefined(); + expect(newApiKeyResponse.body.data.apiKey).not.toBeNull(); + + const newStoredApiKey = await Container.get(ApiKeyRepository).findOneByOrFail({ + userId: member.id, + }); + + expect(newStoredApiKey).toEqual({ + id: expect.any(String), + label: 'My API Key', + userId: member.id, + apiKey: newApiKeyResponse.body.data.apiKey, + createdAt: expect.any(Date), + updatedAt: expect.any(Date), + }); + }); + + test('GET /api-keys should fetch the api key redacted', async () => { + const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + + const retrieveAllApiKeysResponse = await testServer.authAgentFor(member).get('/api-keys'); + + expect(retrieveAllApiKeysResponse.statusCode).toBe(200); + + expect(retrieveAllApiKeysResponse.body.data[0]).toEqual({ + id: newApiKeyResponse.body.data.id, + label: 'My API Key', + userId: member.id, + apiKey: publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.apiKey), + createdAt: expect.any(String), + updatedAt: expect.any(String), + }); + + expect(newApiKeyResponse.body.data.apiKey).not.toEqual( + retrieveAllApiKeysResponse.body.data[0].apiKey, + ); + }); + + test('DELETE /api-keys/:id should delete the api key', async () => { + const newApiKeyResponse = await testServer.authAgentFor(member).post('/api-keys'); + + const deleteApiKeyResponse = await testServer + .authAgentFor(member) + .delete(`/api-keys/${newApiKeyResponse.body.data.id}`); + + const retrieveAllApiKeysResponse = await testServer.authAgentFor(member).get('/api-keys'); + + expect(deleteApiKeyResponse.body.data.success).toBe(true); + expect(retrieveAllApiKeysResponse.body.data.length).toBe(0); + }); +}); diff --git a/packages/cli/test/integration/commands/worker.cmd.test.ts b/packages/cli/test/integration/commands/worker.cmd.test.ts index 726c78537e..1ff05181b2 100644 --- a/packages/cli/test/integration/commands/worker.cmd.test.ts +++ b/packages/cli/test/integration/commands/worker.cmd.test.ts @@ -5,11 +5,12 @@ import { BinaryDataService } from 'n8n-core'; import { Worker } from '@/commands/worker'; import config from '@/config'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay'; +import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay'; import { ExternalHooks } from '@/external-hooks'; import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; import { ScalingService } from '@/scaling/scaling.service'; import { OrchestrationHandlerWorkerService } from '@/services/orchestration/worker/orchestration.handler.worker.service'; import { OrchestrationWorkerService } from '@/services/orchestration/worker/orchestration.worker.service'; @@ -29,12 +30,12 @@ const logStreamingEventRelay = mockInstance(LogStreamingEventRelay); const orchestrationHandlerWorkerService = mockInstance(OrchestrationHandlerWorkerService); const scalingService = mockInstance(ScalingService); const orchestrationWorkerService = mockInstance(OrchestrationWorkerService); +mockInstance(Publisher); const command = setupTestCommand(Worker); test('worker initializes all its components', async () => { const worker = await command.run(); - expect(worker.queueModeId).toBeDefined(); expect(worker.queueModeId).toContain('worker'); expect(worker.queueModeId.length).toBeGreaterThan(15); diff --git a/packages/cli/test/integration/credentials/credentials.api.ee.test.ts b/packages/cli/test/integration/credentials/credentials.api.ee.test.ts index b1c0bfab75..5428cafbd4 100644 --- a/packages/cli/test/integration/credentials/credentials.api.ee.test.ts +++ b/packages/cli/test/integration/credentials/credentials.api.ee.test.ts @@ -3,6 +3,7 @@ import { Container } from 'typedi'; import config from '@/config'; import type { Project } from '@/databases/entities/project'; +import type { ProjectRole } from '@/databases/entities/project-relation'; import type { User } from '@/databases/entities/user'; import { ProjectRepository } from '@/databases/repositories/project.repository'; import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository'; @@ -1118,18 +1119,6 @@ describe('PUT /:credentialId/transfer', () => { .expect(400); }); - test('cannot transfer into a personal project', async () => { - const credential = await saveCredential(randomCredentialPayload(), { - user: member, - }); - - await testServer - .authAgentFor(member) - .put(`/credentials/${credential.id}/transfer`) - .send({ destinationProjectId: memberPersonalProject.id }) - .expect(400); - }); - test('cannot transfer somebody elses credential', async () => { const destinationProject = await createTeamProject('Destination Project', member); @@ -1158,187 +1147,139 @@ describe('PUT /:credentialId/transfer', () => { .expect(404); }); - test('project:editors cannot transfer credentials', async () => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Source Project'); - await linkUserToProject(member, sourceProject, 'project:editor'); - - const credential = await saveCredential(randomCredentialPayload(), { - project: sourceProject, - }); - - const destinationProject = await createTeamProject('Destination Project', member); - - // - // ACT & ASSERT - // - await testServer - .authAgentFor(member) - .put(`/credentials/${credential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(403); - }); - - test('transferring from a personal project to a team project severs all sharings', async () => { - // - // ARRANGE - // - const credential = await saveCredential(randomCredentialPayload(), { user: member }); - - // these sharings should be deleted by the transfer - await shareCredentialWithUsers(credential, [anotherMember, owner]); - - const destinationProject = await createTeamProject('Destination Project', member); - - // - // ACT - // - const response = await testServer - .authAgentFor(member) - .put(`/credentials/${credential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response.body).toEqual({}); - - const allSharings = await getCredentialSharings(credential); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - credentialsId: credential.id, - role: 'credential:owner', - }); - }); - - test('can transfer from team to another team project', async () => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Team Project 1', member); - const credential = await saveCredential(randomCredentialPayload(), { - project: sourceProject, - }); - - const destinationProject = await createTeamProject('Team Project 2', member); - - // - // ACT - // - const response = await testServer - .authAgentFor(member) - .put(`/credentials/${credential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response.body).toEqual({}); - - const allSharings = await getCredentialSharings(credential); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - credentialsId: credential.id, - role: 'credential:owner', - }); - }); - - test.each([ - ['owners', () => owner], - ['admins', () => admin], - ])( - '%s can always transfer from any personal or team project into any team project', - async (_name, actor) => { + test.each(['project:editor', 'project:viewer'])( + '%ss cannot transfer credentials', + async (projectRole) => { // // ARRANGE // - const sourceProject = await createTeamProject('Source Project', member); - const teamCredential = await saveCredential(randomCredentialPayload(), { + const sourceProject = await createTeamProject('Source Project'); + await linkUserToProject(member, sourceProject, projectRole); + + const credential = await saveCredential(randomCredentialPayload(), { project: sourceProject, }); - const personalCredential = await saveCredential(randomCredentialPayload(), { user: member }); - const destinationProject = await createTeamProject('Destination Project', member); // + // ACT & ASSERT + // + await testServer + .authAgentFor(member) + .put(`/credentials/${credential.id}/transfer`) + .send({ destinationProjectId: destinationProject.id }) + .expect(403); + }, + ); + + test.each< + [ + // user role + 'owners' | 'admins', + // source project type + 'team' | 'personal', + // destination project type + 'team' | 'personal', + // actor + () => User, + // source project + () => Promise | Project, + // destination project + () => Promise | Project, + ] + >([ + // owner + [ + 'owners', + 'team', + 'team', + () => owner, + async () => await createTeamProject('Source Project'), + async () => await createTeamProject('Destination Project'), + ], + [ + 'owners', + 'team', + 'personal', + () => owner, + async () => await createTeamProject('Source Project'), + () => memberPersonalProject, + ], + [ + 'owners', + 'personal', + 'team', + () => owner, + () => memberPersonalProject, + async () => await createTeamProject('Destination Project'), + ], + + // admin + [ + 'admins', + 'team', + 'team', + () => admin, + async () => await createTeamProject('Source Project'), + async () => await createTeamProject('Destination Project'), + ], + [ + 'admins', + 'team', + 'personal', + () => admin, + async () => await createTeamProject('Source Project'), + () => memberPersonalProject, + ], + [ + 'admins', + 'personal', + 'team', + () => admin, + () => memberPersonalProject, + async () => await createTeamProject('Destination Project'), + ], + ])( + '%s can always transfer from a %s project to a %s project', + async ( + _roleName, + _sourceProjectName, + _destinationProjectName, + getUser, + getSourceProject, + getDestinationProject, + ) => { + // ARRANGE + const user = getUser(); + const sourceProject = await getSourceProject(); + const destinationProject = await getDestinationProject(); + + const credential = await saveCredential(randomCredentialPayload(), { + project: sourceProject, + }); + // ACT - // - const response1 = await testServer - .authAgentFor(actor()) - .put(`/credentials/${teamCredential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - const response2 = await testServer - .authAgentFor(actor()) - .put(`/credentials/${personalCredential.id}/transfer`) + const response = await testServer + .authAgentFor(user) + .put(`/credentials/${credential.id}/transfer`) .send({ destinationProjectId: destinationProject.id }) .expect(200); - // // ASSERT - // - expect(response1.body).toEqual({}); - expect(response2.body).toEqual({}); + expect(response.body).toEqual({}); { - const allSharings = await getCredentialSharings(teamCredential); + const allSharings = await getCredentialSharings(credential); expect(allSharings).toHaveLength(1); expect(allSharings[0]).toMatchObject({ projectId: destinationProject.id, - credentialsId: teamCredential.id, - role: 'credential:owner', - }); - } - - { - const allSharings = await getCredentialSharings(personalCredential); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - credentialsId: personalCredential.id, + credentialsId: credential.id, role: 'credential:owner', }); } }, ); - - test.each([ - ['owners', () => owner], - ['admins', () => admin], - ])('%s cannot transfer into personal projects', async (_name, actor) => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Source Project', member); - const teamCredential = await saveCredential(randomCredentialPayload(), { - project: sourceProject, - }); - - const personalCredential = await saveCredential(randomCredentialPayload(), { user: member }); - - const destinationProject = anotherMemberPersonalProject; - - // - // ACT & ASSERT - // - await testServer - .authAgentFor(actor()) - .put(`/credentials/${teamCredential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(400); - await testServer - .authAgentFor(actor()) - .put(`/credentials/${personalCredential.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(400); - }); }); function validateMainCredentialData(credential: ListQuery.Credentials.WithOwnedByAndSharedWith) { diff --git a/packages/cli/test/integration/execution.service.integration.test.ts b/packages/cli/test/integration/execution.service.integration.test.ts index 15d97f69ab..22d0d65754 100644 --- a/packages/cli/test/integration/execution.service.integration.test.ts +++ b/packages/cli/test/integration/execution.service.integration.test.ts @@ -70,6 +70,7 @@ describe('ExecutionService', () => { mode: expect.any(String), retryOf: null, status: expect.any(String), + createdAt: expect.any(String), startedAt: expect.any(String), stoppedAt: expect.any(String), waitTill: null, @@ -510,6 +511,7 @@ describe('ExecutionService', () => { mode: expect.any(String), retryOf: null, status: expect.any(String), + createdAt: expect.any(String), startedAt: expect.any(String), stoppedAt: expect.any(String), waitTill: null, diff --git a/packages/cli/test/integration/me.api.test.ts b/packages/cli/test/integration/me.api.test.ts index 2fc9b07870..a29f158a32 100644 --- a/packages/cli/test/integration/me.api.test.ts +++ b/packages/cli/test/integration/me.api.test.ts @@ -3,57 +3,25 @@ import type { IPersonalizationSurveyAnswersV4 } from 'n8n-workflow'; import { Container } from 'typedi'; import validator from 'validator'; -import type { ApiKey } from '@/databases/entities/api-key'; import type { User } from '@/databases/entities/user'; -import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; import { ProjectRepository } from '@/databases/repositories/project.repository'; import { UserRepository } from '@/databases/repositories/user.repository'; -import { PublicApiKeyService } from '@/services/public-api-key.service'; import { mockInstance } from '@test/mocking'; import { SUCCESS_RESPONSE_BODY } from './shared/constants'; -import { createOwnerWithApiKey, createUser, createUserShell } from './shared/db/users'; +import { createUser, createUserShell } from './shared/db/users'; import { randomEmail, randomName, randomValidPassword } from './shared/random'; import * as testDb from './shared/test-db'; import type { SuperAgentTest } from './shared/types'; import * as utils from './shared/utils/'; const testServer = utils.setupTestServer({ endpointGroups: ['me'] }); -let publicApiKeyService: PublicApiKeyService; - -beforeAll(() => { - publicApiKeyService = Container.get(PublicApiKeyService); -}); beforeEach(async () => { await testDb.truncate(['User']); mockInstance(GlobalConfig, { publicApi: { disabled: false } }); }); -describe('When public API is disabled', () => { - let owner: User; - let authAgent: SuperAgentTest; - - beforeEach(async () => { - owner = await createOwnerWithApiKey(); - - authAgent = testServer.authAgentFor(owner); - mockInstance(GlobalConfig, { publicApi: { disabled: true } }); - }); - - test('POST /me/api-keys should 404', async () => { - await authAgent.post('/me/api-keys').expect(404); - }); - - test('GET /me/api-keys should 404', async () => { - await authAgent.get('/me/api-keys').expect(404); - }); - - test('DELETE /me/api-key/:id should 404', async () => { - await authAgent.delete(`/me/api-keys/${1}`).expect(404); - }); -}); - describe('Owner shell', () => { let ownerShell: User; let authOwnerShellAgent: SuperAgentTest; @@ -156,58 +124,6 @@ describe('Owner shell', () => { expect(storedShellOwner.personalizationAnswers).toEqual(validPayload); } }); - - test('POST /me/api-keys should create an api key', async () => { - const newApiKeyResponse = await authOwnerShellAgent.post('/me/api-keys'); - - const newApiKey = newApiKeyResponse.body.data as ApiKey; - - expect(newApiKeyResponse.statusCode).toBe(200); - expect(newApiKey).toBeDefined(); - - const newStoredApiKey = await Container.get(ApiKeyRepository).findOneByOrFail({ - userId: ownerShell.id, - }); - - expect(newStoredApiKey).toEqual({ - id: expect.any(String), - label: 'My API Key', - userId: ownerShell.id, - apiKey: newApiKey.apiKey, - createdAt: expect.any(Date), - updatedAt: expect.any(Date), - }); - }); - - test('GET /me/api-keys should fetch the api key redacted', async () => { - const newApiKeyResponse = await authOwnerShellAgent.post('/me/api-keys'); - - const retrieveAllApiKeysResponse = await authOwnerShellAgent.get('/me/api-keys'); - - expect(retrieveAllApiKeysResponse.statusCode).toBe(200); - - expect(retrieveAllApiKeysResponse.body.data[0]).toEqual({ - id: newApiKeyResponse.body.data.id, - label: 'My API Key', - userId: ownerShell.id, - apiKey: publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.apiKey), - createdAt: expect.any(String), - updatedAt: expect.any(String), - }); - }); - - test('DELETE /me/api-keys/:id should delete the api key', async () => { - const newApiKeyResponse = await authOwnerShellAgent.post('/me/api-keys'); - - const deleteApiKeyResponse = await authOwnerShellAgent.delete( - `/me/api-keys/${newApiKeyResponse.body.data.id}`, - ); - - const retrieveAllApiKeysResponse = await authOwnerShellAgent.get('/me/api-keys'); - - expect(deleteApiKeyResponse.body.data.success).toBe(true); - expect(retrieveAllApiKeysResponse.body.data.length).toBe(0); - }); }); describe('Member', () => { @@ -318,61 +234,6 @@ describe('Member', () => { expect(storedAnswers).toEqual(validPayload); } }); - - test('POST /me/api-keys should create an api key', async () => { - const newApiKeyResponse = await testServer.authAgentFor(member).post('/me/api-keys'); - - expect(newApiKeyResponse.statusCode).toBe(200); - expect(newApiKeyResponse.body.data.apiKey).toBeDefined(); - expect(newApiKeyResponse.body.data.apiKey).not.toBeNull(); - - const newStoredApiKey = await Container.get(ApiKeyRepository).findOneByOrFail({ - userId: member.id, - }); - - expect(newStoredApiKey).toEqual({ - id: expect.any(String), - label: 'My API Key', - userId: member.id, - apiKey: newApiKeyResponse.body.data.apiKey, - createdAt: expect.any(Date), - updatedAt: expect.any(Date), - }); - }); - - test('GET /me/api-keys should fetch the api key redacted', async () => { - const newApiKeyResponse = await testServer.authAgentFor(member).post('/me/api-keys'); - - const retrieveAllApiKeysResponse = await testServer.authAgentFor(member).get('/me/api-keys'); - - expect(retrieveAllApiKeysResponse.statusCode).toBe(200); - - expect(retrieveAllApiKeysResponse.body.data[0]).toEqual({ - id: newApiKeyResponse.body.data.id, - label: 'My API Key', - userId: member.id, - apiKey: publicApiKeyService.redactApiKey(newApiKeyResponse.body.data.apiKey), - createdAt: expect.any(String), - updatedAt: expect.any(String), - }); - - expect(newApiKeyResponse.body.data.apiKey).not.toEqual( - retrieveAllApiKeysResponse.body.data[0].apiKey, - ); - }); - - test('DELETE /me/api-keys/:id should delete the api key', async () => { - const newApiKeyResponse = await testServer.authAgentFor(member).post('/me/api-keys'); - - const deleteApiKeyResponse = await testServer - .authAgentFor(member) - .delete(`/me/api-keys/${newApiKeyResponse.body.data.id}`); - - const retrieveAllApiKeysResponse = await testServer.authAgentFor(member).get('/me/api-keys'); - - expect(deleteApiKeyResponse.body.data.success).toBe(true); - expect(retrieveAllApiKeysResponse.body.data.length).toBe(0); - }); }); describe('Owner', () => { diff --git a/packages/cli/test/integration/pruning.service.test.ts b/packages/cli/test/integration/pruning.service.test.ts index 990d0aec3a..c4d1957de0 100644 --- a/packages/cli/test/integration/pruning.service.test.ts +++ b/packages/cli/test/integration/pruning.service.test.ts @@ -8,7 +8,7 @@ import { TIME } from '@/constants'; import type { ExecutionEntity } from '@/databases/entities/execution-entity'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { PruningService } from '@/services/pruning.service'; import { diff --git a/packages/cli/test/integration/runners/task-runner-process.test.ts b/packages/cli/test/integration/runners/task-runner-process.test.ts new file mode 100644 index 0000000000..f517ee6398 --- /dev/null +++ b/packages/cli/test/integration/runners/task-runner-process.test.ts @@ -0,0 +1,91 @@ +import { GlobalConfig } from '@n8n/config'; +import Container from 'typedi'; + +import { TaskRunnerService } from '@/runners/runner-ws-server'; +import { TaskBroker } from '@/runners/task-broker.service'; +import { TaskRunnerProcess } from '@/runners/task-runner-process'; +import { TaskRunnerServer } from '@/runners/task-runner-server'; +import { retryUntil } from '@test-integration/retry-until'; + +describe('TaskRunnerProcess', () => { + const authToken = 'token'; + const globalConfig = Container.get(GlobalConfig); + globalConfig.taskRunners.authToken = authToken; + globalConfig.taskRunners.port = 0; // Use any port + const taskRunnerServer = Container.get(TaskRunnerServer); + + const runnerProcess = Container.get(TaskRunnerProcess); + const taskBroker = Container.get(TaskBroker); + const taskRunnerService = Container.get(TaskRunnerService); + + beforeAll(async () => { + await taskRunnerServer.start(); + // Set the port to the actually used port + globalConfig.taskRunners.port = taskRunnerServer.port; + }); + + afterAll(async () => { + await taskRunnerServer.stop(); + }); + + afterEach(async () => { + await runnerProcess.stop(); + }); + + const getNumConnectedRunners = () => taskRunnerService.runnerConnections.size; + const getNumRegisteredRunners = () => taskBroker.getKnownRunners().size; + + it('should start and connect the task runner', async () => { + // Act + await runnerProcess.start(); + + // Assert + expect(runnerProcess.isRunning).toBeTruthy(); + + // Wait until the runner has connected + await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); + expect(getNumRegisteredRunners()).toBe(1); + }); + + it('should stop an disconnect the task runner', async () => { + // Arrange + await runnerProcess.start(); + + // Wait until the runner has connected + await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); + expect(getNumRegisteredRunners()).toBe(1); + + // Act + await runnerProcess.stop(); + + // Assert + // Wait until the runner has disconnected + await retryUntil(() => expect(getNumConnectedRunners()).toBe(0)); + + expect(runnerProcess.isRunning).toBeFalsy(); + expect(getNumRegisteredRunners()).toBe(0); + }); + + it('should restart the task runner if it exits', async () => { + // Arrange + await runnerProcess.start(); + + // Wait until the runner has connected + await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); + const processId = runnerProcess.pid; + + // Act + // @ts-expect-error private property + runnerProcess.process?.kill('SIGKILL'); + + // Assert + // Wait until the runner is running again + await retryUntil(() => expect(runnerProcess.isRunning).toBeTruthy()); + expect(runnerProcess.pid).not.toBe(processId); + + // Wait until the runner has connected again + await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); + expect(getNumConnectedRunners()).toBe(1); + expect(getNumRegisteredRunners()).toBe(1); + }); +}); diff --git a/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts b/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts index 8da1f3e1bf..4513beb6bb 100644 --- a/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts +++ b/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts @@ -159,6 +159,7 @@ test('should report credential in not recently executed workflow', async () => { const savedExecution = await Container.get(ExecutionRepository).save({ finished: true, mode: 'manual', + createdAt: date, startedAt: date, stoppedAt: date, workflowId: workflow.id, @@ -227,6 +228,7 @@ test('should not report credentials in recently executed workflow', async () => const savedExecution = await Container.get(ExecutionRepository).save({ finished: true, mode: 'manual', + createdAt: date, startedAt: date, stoppedAt: date, workflowId: workflow.id, diff --git a/packages/cli/test/integration/shared/db/executions.ts b/packages/cli/test/integration/shared/db/executions.ts index e09ca44dfb..4dd0b4fa76 100644 --- a/packages/cli/test/integration/shared/db/executions.ts +++ b/packages/cli/test/integration/shared/db/executions.ts @@ -39,6 +39,7 @@ export async function createExecution( const execution = await Container.get(ExecutionRepository).save({ finished: finished ?? true, mode: mode ?? 'manual', + createdAt: new Date(), startedAt: startedAt ?? new Date(), ...(workflow !== undefined && { workflowId: workflow.id }), stoppedAt: stoppedAt ?? new Date(), diff --git a/packages/cli/test/integration/shared/retry-until.ts b/packages/cli/test/integration/shared/retry-until.ts new file mode 100644 index 0000000000..f469149b31 --- /dev/null +++ b/packages/cli/test/integration/shared/retry-until.ts @@ -0,0 +1,32 @@ +/** + * Retries the given assertion until it passes or the timeout is reached + * + * @example + * await retryUntil( + * () => expect(service.someState).toBe(true) + * ); + */ +export const retryUntil = async ( + assertion: () => Promise | void, + { interval = 20, timeout = 1000 } = {}, +) => { + return await new Promise((resolve, reject) => { + const startTime = Date.now(); + + const tryAgain = () => { + setTimeout(async () => { + try { + resolve(await assertion()); + } catch (error) { + if (Date.now() - startTime > timeout) { + reject(error); + } else { + tryAgain(); + } + } + }, interval); + }; + + tryAgain(); + }); +}; diff --git a/packages/cli/test/integration/shared/types.ts b/packages/cli/test/integration/shared/types.ts index 87f349fb79..8dc922dda2 100644 --- a/packages/cli/test/integration/shared/types.ts +++ b/packages/cli/test/integration/shared/types.ts @@ -40,7 +40,8 @@ type EndpointGroup = | 'debug' | 'project' | 'role' - | 'dynamic-node-parameters'; + | 'dynamic-node-parameters' + | 'apiKeys'; export interface SetupProps { endpointGroups?: EndpointGroup[]; diff --git a/packages/cli/test/integration/shared/utils/test-command.ts b/packages/cli/test/integration/shared/utils/test-command.ts index 82effd1818..d0737ddcc1 100644 --- a/packages/cli/test/integration/shared/utils/test-command.ts +++ b/packages/cli/test/integration/shared/utils/test-command.ts @@ -4,7 +4,7 @@ import type { Class } from 'n8n-core'; import type { BaseCommand } from '@/commands/base-command'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; -import { TelemetryEventRelay } from '@/events/telemetry-event-relay'; +import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay'; import { mockInstance } from '@test/mocking'; import * as testDb from '../test-db'; diff --git a/packages/cli/test/integration/shared/utils/test-server.ts b/packages/cli/test/integration/shared/utils/test-server.ts index cb66b7868d..b69f21499a 100644 --- a/packages/cli/test/integration/shared/utils/test-server.ts +++ b/packages/cli/test/integration/shared/utils/test-server.ts @@ -11,7 +11,7 @@ import { AUTH_COOKIE_NAME } from '@/constants'; import type { User } from '@/databases/entities/user'; import { ControllerRegistry } from '@/decorators'; import { License } from '@/license'; -import { Logger } from '@/logger'; +import { Logger } from '@/logging/logger.service'; import { rawBodyReader, bodyParser } from '@/middlewares'; import { PostHogClient } from '@/posthog'; import { Push } from '@/push'; @@ -273,6 +273,10 @@ export const setupTestServer = ({ case 'dynamic-node-parameters': await import('@/controllers/dynamic-node-parameters.controller'); break; + + case 'apiKeys': + await import('@/controllers/api-keys.controller'); + break; } } diff --git a/packages/cli/test/integration/workflows/workflows.controller.ee.test.ts b/packages/cli/test/integration/workflows/workflows.controller.ee.test.ts index c8f2db889f..2002843bfe 100644 --- a/packages/cli/test/integration/workflows/workflows.controller.ee.test.ts +++ b/packages/cli/test/integration/workflows/workflows.controller.ee.test.ts @@ -5,6 +5,7 @@ import { v4 as uuid } from 'uuid'; import { ActiveWorkflowManager } from '@/active-workflow-manager'; import config from '@/config'; import type { Project } from '@/databases/entities/project'; +import type { ProjectRole } from '@/databases/entities/project-relation'; import type { User } from '@/databases/entities/user'; import { ProjectRepository } from '@/databases/repositories/project.repository'; import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository'; @@ -1385,18 +1386,6 @@ describe('PUT /:workflowId/transfer', () => { .expect(400); }); - test('cannot transfer into a personal project', async () => { - const sourceProject = await createTeamProject('Team Project', member); - - const workflow = await createWorkflow({}, sourceProject); - - await testServer - .authAgentFor(member) - .put(`/workflows/${workflow.id}/transfer`) - .send({ destinationProjectId: memberPersonalProject.id }) - .expect(400); - }); - test('cannot transfer somebody elses workflow', async () => { const destinationProject = await createTeamProject('Team Project', member); @@ -1421,180 +1410,133 @@ describe('PUT /:workflowId/transfer', () => { .expect(404); }); - test('project:editors cannot transfer workflows', async () => { - // - // ARRANGE - // - const sourceProject = await createTeamProject(); - await linkUserToProject(member, sourceProject, 'project:editor'); - - const workflow = await createWorkflow({}, sourceProject); - - const destinationProject = await createTeamProject(); - await linkUserToProject(member, destinationProject, 'project:admin'); - - // - // ACT & ASSERT - // - await testServer - .authAgentFor(member) - .put(`/workflows/${workflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(403); - }); - - test('transferring from a personal project to a team project severs all sharings', async () => { - // - // ARRANGE - // - const workflow = await createWorkflow({}, member); - - // these sharings should be deleted by the transfer - await shareWorkflowWithUsers(workflow, [anotherMember, owner]); - - const destinationProject = await createTeamProject('Team Project', member); - - // - // ACT - // - const response = await testServer - .authAgentFor(member) - .put(`/workflows/${workflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response.body).toEqual({}); - - const allSharings = await getWorkflowSharing(workflow); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - workflowId: workflow.id, - role: 'workflow:owner', - }); - }); - - test('can transfer from team to another team project', async () => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Team Project 1', member); - const workflow = await createWorkflow({}, sourceProject); - - const destinationProject = await createTeamProject('Team Project 2', member); - - // - // ACT - // - const response = await testServer - .authAgentFor(member) - .put(`/workflows/${workflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response.body).toEqual({}); - - const allSharings = await getWorkflowSharing(workflow); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - workflowId: workflow.id, - role: 'workflow:owner', - }); - }); - - test.each([ - ['owners', () => owner], - ['admins', () => admin], - ])( - 'global %s can always transfer from any personal or team project into any team project', - async (_name, actor) => { + test.each(['project:editor', 'project:viewer'])( + '%ss cannot transfer workflows', + async (projectRole) => { // // ARRANGE // - const sourceProject = await createTeamProject('Source Project', member); - const teamWorkflow = await createWorkflow({}, sourceProject); + const sourceProject = await createTeamProject(); + await linkUserToProject(member, sourceProject, projectRole); - const personalWorkflow = await createWorkflow({}, member); + const workflow = await createWorkflow({}, sourceProject); - const destinationProject = await createTeamProject('Destination Project', member); + const destinationProject = await createTeamProject(); + await linkUserToProject(member, destinationProject, 'project:admin'); // - // ACT + // ACT & ASSERT // - const response1 = await testServer - .authAgentFor(actor()) - .put(`/workflows/${teamWorkflow.id}/transfer`) + await testServer + .authAgentFor(member) + .put(`/workflows/${workflow.id}/transfer`) .send({ destinationProjectId: destinationProject.id }) - .expect(200); - const response2 = await testServer - .authAgentFor(actor()) - .put(`/workflows/${personalWorkflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(200); - - // - // ASSERT - // - expect(response1.body).toEqual({}); - expect(response2.body).toEqual({}); - - { - const allSharings = await getWorkflowSharing(teamWorkflow); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - workflowId: teamWorkflow.id, - role: 'workflow:owner', - }); - } - - { - const allSharings = await getWorkflowSharing(personalWorkflow); - expect(allSharings).toHaveLength(1); - expect(allSharings[0]).toMatchObject({ - projectId: destinationProject.id, - workflowId: personalWorkflow.id, - role: 'workflow:owner', - }); - } + .expect(403); }, ); - test.each([ - ['owners', () => owner], - ['admins', () => admin], - ])('global %s cannot transfer into personal projects', async (_name, actor) => { - // - // ARRANGE - // - const sourceProject = await createTeamProject('Source Project', member); - const teamWorkflow = await createWorkflow({}, sourceProject); + test.each< + [ + // user role + 'owners' | 'admins', + // source project type + 'team' | 'personal', + // destination project type + 'team' | 'personal', + // actor + () => User, + // source project + () => Promise | Project, + // destination project + () => Promise | Project, + ] + >([ + // owner + [ + 'owners', + 'team', + 'team', + () => owner, + async () => await createTeamProject('Source Project'), + async () => await createTeamProject('Destination Project'), + ], + [ + 'owners', + 'team', + 'personal', + () => owner, + async () => await createTeamProject('Source Project'), + () => memberPersonalProject, + ], + [ + 'owners', + 'personal', + 'team', + () => owner, + () => memberPersonalProject, + async () => await createTeamProject('Destination Project'), + ], - const personalWorkflow = await createWorkflow({}, member); + // admin + [ + 'admins', + 'team', + 'team', + () => admin, + async () => await createTeamProject('Source Project'), + async () => await createTeamProject('Destination Project'), + ], + [ + 'admins', + 'team', + 'personal', + () => admin, + async () => await createTeamProject('Source Project'), + () => memberPersonalProject, + ], + [ + 'admins', + 'personal', + 'team', + () => admin, + () => memberPersonalProject, + async () => await createTeamProject('Destination Project'), + ], + ])( + 'global %s can transfer workflows from a %s project to a %s project', + async ( + _roleName, + _sourceProjectName, + _destinationProjectName, + getActor, + getSourceProject, + getDestinationProject, + ) => { + // ARRANGE + const actor = getActor(); + const sourceProject = await getSourceProject(); + const destinationProject = await getDestinationProject(); + const workflow = await createWorkflow({}, sourceProject); - const destinationProject = anotherMemberPersonalProject; + // ACT + const response = await testServer + .authAgentFor(actor) + .put(`/workflows/${workflow.id}/transfer`) + .send({ destinationProjectId: destinationProject.id }) + .expect(200); - // - // ACT & ASSERT - // - await testServer - .authAgentFor(actor()) - .put(`/workflows/${teamWorkflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(400); - await testServer - .authAgentFor(actor()) - .put(`/workflows/${personalWorkflow.id}/transfer`) - .send({ destinationProjectId: destinationProject.id }) - .expect(400); - }); + // ASSERT + expect(response.body).toEqual({}); + + const allSharings = await getWorkflowSharing(workflow); + expect(allSharings).toHaveLength(1); + expect(allSharings[0]).toMatchObject({ + projectId: destinationProject.id, + workflowId: workflow.id, + role: 'workflow:owner', + }); + }, + ); test('removes and re-adds the workflow from the active workflow manager during the transfer', async () => { // diff --git a/packages/core/.eslintrc.js b/packages/core/.eslintrc.js index cd962bad5c..9bfae8a9eb 100644 --- a/packages/core/.eslintrc.js +++ b/packages/core/.eslintrc.js @@ -18,7 +18,6 @@ module.exports = { complexity: 'error', // TODO: Remove this - 'import/order': 'off', '@typescript-eslint/ban-ts-comment': ['error', { 'ts-ignore': true }], }, }; diff --git a/packages/core/package.json b/packages/core/package.json index 74e3483943..50762d8c14 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "1.61.0", + "version": "1.62.1", "description": "Core functionality of n8n", "main": "dist/index", "types": "dist/index.d.ts", diff --git a/packages/core/src/ActiveWorkflows.ts b/packages/core/src/ActiveWorkflows.ts index bfc6319626..93e67488d5 100644 --- a/packages/core/src/ActiveWorkflows.ts +++ b/packages/core/src/ActiveWorkflows.ts @@ -1,5 +1,3 @@ -import { Service } from 'typedi'; - import type { IGetExecutePollFunctions, IGetExecuteTriggerFunctions, @@ -20,9 +18,10 @@ import { WorkflowActivationError, WorkflowDeactivationError, } from 'n8n-workflow'; +import { Service } from 'typedi'; -import { ScheduledTaskManager } from './ScheduledTaskManager'; import type { IWorkflowData } from './Interfaces'; +import { ScheduledTaskManager } from './ScheduledTaskManager'; @Service() export class ActiveWorkflows { diff --git a/packages/core/src/Agent/index.ts b/packages/core/src/Agent/index.ts new file mode 100644 index 0000000000..75195b6acf --- /dev/null +++ b/packages/core/src/Agent/index.ts @@ -0,0 +1,58 @@ +import type { + IExecuteFunctions, + Workflow, + IRunExecutionData, + INodeExecutionData, + ITaskDataConnections, + INode, + IWorkflowExecuteAdditionalData, + WorkflowExecuteMode, + INodeParameters, + IExecuteData, + IDataObject, +} from 'n8n-workflow'; + +export const createAgentStartJob = ( + additionalData: IWorkflowExecuteAdditionalData, + inputData: ITaskDataConnections, + node: INode, + workflow: Workflow, + runExecutionData: IRunExecutionData, + runIndex: number, + activeNodeName: string, + connectionInputData: INodeExecutionData[], + siblingParameters: INodeParameters, + mode: WorkflowExecuteMode, + executeData?: IExecuteData, + defaultReturnRunIndex?: number, + selfData?: IDataObject, + contextNodeName?: string, +): IExecuteFunctions['startJob'] => { + return async function startJob( + this: IExecuteFunctions, + jobType: string, + settings: unknown, + itemIndex: number, + ): Promise { + return await additionalData.startAgentJob( + additionalData, + jobType, + settings, + this, + inputData, + node, + workflow, + runExecutionData, + runIndex, + itemIndex, + activeNodeName, + connectionInputData, + siblingParameters, + mode, + executeData, + defaultReturnRunIndex, + selfData, + contextNodeName, + ); + }; +}; diff --git a/packages/core/src/BinaryData/BinaryData.service.ts b/packages/core/src/BinaryData/BinaryData.service.ts index 20903aa71c..556a3176de 100644 --- a/packages/core/src/BinaryData/BinaryData.service.ts +++ b/packages/core/src/BinaryData/BinaryData.service.ts @@ -1,14 +1,14 @@ +import { BINARY_ENCODING } from 'n8n-workflow'; +import type { INodeExecutionData, IBinaryData } from 'n8n-workflow'; import { readFile, stat } from 'node:fs/promises'; import prettyBytes from 'pretty-bytes'; -import Container, { Service } from 'typedi'; -import { BINARY_ENCODING } from 'n8n-workflow'; -import { InvalidModeError } from '../errors/invalid-mode.error'; -import { areConfigModes, binaryToBuffer } from './utils'; - import type { Readable } from 'stream'; +import Container, { Service } from 'typedi'; + import type { BinaryData } from './types'; -import type { INodeExecutionData, IBinaryData } from 'n8n-workflow'; +import { areConfigModes, binaryToBuffer } from './utils'; import { InvalidManagerError } from '../errors/invalid-manager.error'; +import { InvalidModeError } from '../errors/invalid-mode.error'; @Service() export class BinaryDataService { diff --git a/packages/core/src/BinaryData/FileSystem.manager.ts b/packages/core/src/BinaryData/FileSystem.manager.ts index 5b7250d9eb..f49e6e5c02 100644 --- a/packages/core/src/BinaryData/FileSystem.manager.ts +++ b/packages/core/src/BinaryData/FileSystem.manager.ts @@ -1,13 +1,13 @@ +import { jsonParse } from 'n8n-workflow'; import { createReadStream } from 'node:fs'; import fs from 'node:fs/promises'; import path from 'node:path'; +import type { Readable } from 'stream'; import { v4 as uuid } from 'uuid'; -import { jsonParse } from 'n8n-workflow'; + +import type { BinaryData } from './types'; import { assertDir, doesNotExist } from './utils'; import { DisallowedFilepathError } from '../errors/disallowed-filepath.error'; - -import type { Readable } from 'stream'; -import type { BinaryData } from './types'; import { FileNotFoundError } from '../errors/file-not-found.error'; const EXECUTION_ID_EXTRACTOR = diff --git a/packages/core/src/BinaryData/ObjectStore.manager.ts b/packages/core/src/BinaryData/ObjectStore.manager.ts index 6f7bb3ef29..65827d4ef0 100644 --- a/packages/core/src/BinaryData/ObjectStore.manager.ts +++ b/packages/core/src/BinaryData/ObjectStore.manager.ts @@ -1,12 +1,12 @@ import fs from 'node:fs/promises'; +import type { Readable } from 'node:stream'; import { Service } from 'typedi'; import { v4 as uuid } from 'uuid'; + +import type { BinaryData } from './types'; import { binaryToBuffer } from './utils'; import { ObjectStoreService } from '../ObjectStore/ObjectStore.service.ee'; -import type { Readable } from 'node:stream'; -import type { BinaryData } from './types'; - @Service() export class ObjectStoreManager implements BinaryData.Manager { constructor(private readonly objectStoreService: ObjectStoreService) {} diff --git a/packages/core/src/BinaryData/utils.ts b/packages/core/src/BinaryData/utils.ts index c64cb4315b..bedda5be12 100644 --- a/packages/core/src/BinaryData/utils.ts +++ b/packages/core/src/BinaryData/utils.ts @@ -1,7 +1,8 @@ +import concatStream from 'concat-stream'; import fs from 'node:fs/promises'; import type { Readable } from 'node:stream'; + import type { BinaryData } from './types'; -import concatStream from 'concat-stream'; export const CONFIG_MODES = ['default', 'filesystem', 's3'] as const; diff --git a/packages/core/src/Cipher.ts b/packages/core/src/Cipher.ts index 4e1b649bed..d9ed93ddb6 100644 --- a/packages/core/src/Cipher.ts +++ b/packages/core/src/Cipher.ts @@ -1,5 +1,6 @@ -import { Service } from 'typedi'; import { createHash, createCipheriv, createDecipheriv, randomBytes } from 'crypto'; +import { Service } from 'typedi'; + import { InstanceSettings } from './InstanceSettings'; // Data encrypted by CryptoJS always starts with these bytes diff --git a/packages/core/src/CreateNodeAsTool.ts b/packages/core/src/CreateNodeAsTool.ts index 21e1b6352a..1466f57d09 100644 --- a/packages/core/src/CreateNodeAsTool.ts +++ b/packages/core/src/CreateNodeAsTool.ts @@ -1,296 +1,435 @@ -/** - * @module NodeAsTool - * @description This module converts n8n nodes into LangChain tools by analyzing node parameters, - * identifying placeholders, and generating a Zod schema. It then creates a DynamicStructuredTool - * that can be used in LangChain workflows. - * - * General approach: - * 1. Recursively traverse node parameters to find placeholders, including in nested structures - * 2. Generate a Zod schema based on these placeholders, preserving the nested structure - * 3. Create a DynamicStructuredTool with the schema and a function that executes the n8n node - * - * Example: - * - Node parameters: - * { - * "inputText": "{{ '__PLACEHOLDER: Enter main text to process' }}", - * "options": { - * "language": "{{ '__PLACEHOLDER: Specify language' }}", - * "advanced": { - * "maxLength": "{{ '__PLACEHOLDER: Enter maximum length' }}" - * } - * } - * } - * - * - Generated Zod schema: - * z.object({ - * "inputText": z.string().describe("Enter main text to process"), - * "options__language": z.string().describe("Specify language"), - * "options__advanced__maxLength": z.string().describe("Enter maximum length") - * }).required() - * - * - Resulting tool can be called with: - * { - * "inputText": "Hello, world!", - * "options__language": "en", - * "options__advanced__maxLength": "100" - * } - * - * Note: Nested properties are flattened with double underscores in the schema, - * but the tool reconstructs the original nested structure when executing the node. - */ - import { DynamicStructuredTool } from '@langchain/core/tools'; -import { - NodeConnectionType, - type IExecuteFunctions, - type INodeParameters, - type INodeType, -} from 'n8n-workflow'; +import type { IExecuteFunctions, INodeParameters, INodeType } from 'n8n-workflow'; +import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { z } from 'zod'; -/** Represents a nested object structure */ -type NestedObject = { [key: string]: unknown }; - -/** - * Encodes a dot-notated key to a format safe for use as an object key. - * @param {string} key - The dot-notated key to encode. - * @returns {string} The encoded key. - */ -function encodeDotNotation(key: string): string { - // Replace dots with double underscores, then handle special case for '__value' for complicated params - return key.replace(/\./g, '__').replace('__value', ''); +type AllowedTypes = 'string' | 'number' | 'boolean' | 'json'; +interface FromAIArgument { + key: string; + description?: string; + type?: AllowedTypes; + defaultValue?: string | number | boolean | Record; } /** - * Decodes an encoded key back to its original dot-notated form. - * @param {string} key - The encoded key to decode. - * @returns {string} The decoded, dot-notated key. + * AIParametersParser + * + * This class encapsulates the logic for parsing node parameters, extracting $fromAI calls, + * generating Zod schemas, and creating LangChain tools. */ -function decodeDotNotation(key: string): string { - // Simply replace double underscores with dots - return key.replace(/__/g, '.'); -} +class AIParametersParser { + private ctx: IExecuteFunctions; -/** - * Recursively traverses an object to find placeholder values. - * @param {NestedObject} obj - The object to traverse. - * @param {string[]} path - The current path in the object. - * @param {Map} results - Map to store found placeholders. - * @returns {Map} Updated map of placeholders. - */ -function traverseObject( - obj: NestedObject, - path: string[] = [], - results: Map = new Map(), -): Map { - for (const [key, value] of Object.entries(obj)) { - const currentPath = [...path, key]; - const fullPath = currentPath.join('.'); + /** + * Constructs an instance of AIParametersParser. + * @param ctx The execution context. + */ + constructor(ctx: IExecuteFunctions) { + this.ctx = ctx; + } - if (typeof value === 'string' && value.startsWith("{{ '__PLACEHOLDER")) { - // Store placeholder values with their full path - results.set(encodeDotNotation(fullPath), value); - } else if (Array.isArray(value)) { - // Recursively traverse arrays - // eslint-disable-next-line @typescript-eslint/no-use-before-define - traverseArray(value, currentPath, results); - } else if (typeof value === 'object' && value !== null) { - // Recursively traverse nested objects, but only if they're not empty - if (Object.keys(value).length > 0) { - traverseObject(value as NestedObject, currentPath, results); - } + /** + * Generates a Zod schema based on the provided FromAIArgument placeholder. + * @param placeholder The FromAIArgument object containing key, type, description, and defaultValue. + * @returns A Zod schema corresponding to the placeholder's type and constraints. + */ + private generateZodSchema(placeholder: FromAIArgument): z.ZodTypeAny { + let schema: z.ZodTypeAny; + + switch (placeholder.type?.toLowerCase()) { + case 'string': + schema = z.string(); + break; + case 'number': + schema = z.number(); + break; + case 'boolean': + schema = z.boolean(); + break; + case 'json': + schema = z.record(z.any()); + break; + default: + schema = z.string(); + } + + if (placeholder.description) { + schema = schema.describe(`${schema.description ?? ''} ${placeholder.description}`.trim()); + } + + if (placeholder.defaultValue !== undefined) { + schema = schema.default(placeholder.defaultValue); + } + + return schema; + } + + /** + * Recursively traverses the nodeParameters object to find all $fromAI calls. + * @param payload The current object or value being traversed. + * @param collectedArgs The array collecting FromAIArgument objects. + */ + private traverseNodeParameters(payload: unknown, collectedArgs: FromAIArgument[]) { + if (typeof payload === 'string') { + const fromAICalls = this.extractFromAICalls(payload); + fromAICalls.forEach((call) => collectedArgs.push(call)); + } else if (Array.isArray(payload)) { + payload.forEach((item: unknown) => this.traverseNodeParameters(item, collectedArgs)); + } else if (typeof payload === 'object' && payload !== null) { + Object.values(payload).forEach((value) => this.traverseNodeParameters(value, collectedArgs)); } } - return results; -} + /** + * Extracts all $fromAI calls from a given string + * @param str The string to search for $fromAI calls. + * @returns An array of FromAIArgument objects. + * + * This method uses a regular expression to find the start of each $fromAI function call + * in the input string. It then employs a character-by-character parsing approach to + * accurately extract the arguments of each call, handling nested parentheses and quoted strings. + * + * The parsing process: + * 1. Finds the starting position of a $fromAI call using regex. + * 2. Iterates through characters, keeping track of parentheses depth and quote status. + * 3. Handles escaped characters within quotes to avoid premature quote closing. + * 4. Builds the argument string until the matching closing parenthesis is found. + * 5. Parses the extracted argument string into a FromAIArgument object. + * 6. Repeats the process for all $fromAI calls in the input string. + * + */ + private extractFromAICalls(str: string): FromAIArgument[] { + const args: FromAIArgument[] = []; + // Regular expression to match the start of a $fromAI function call + const pattern = /\$fromAI\s*\(\s*/gi; + let match: RegExpExecArray | null; -/** - * Recursively traverses an array to find placeholder values. - * @param {unknown[]} arr - The array to traverse. - * @param {string[]} path - The current path in the array. - * @param {Map} results - Map to store found placeholders. - */ -function traverseArray(arr: unknown[], path: string[], results: Map): void { - arr.forEach((item, index) => { - const currentPath = [...path, index.toString()]; - const fullPath = currentPath.join('.'); + while ((match = pattern.exec(str)) !== null) { + const startIndex = match.index + match[0].length; + let current = startIndex; + let inQuotes = false; + let quoteChar = ''; + let parenthesesCount = 1; + let argsString = ''; - if (typeof item === 'string' && item.startsWith("{{ '__PLACEHOLDER")) { - // Store placeholder values with their full path - results.set(encodeDotNotation(fullPath), item); - } else if (Array.isArray(item)) { - // Recursively traverse nested arrays - traverseArray(item, currentPath, results); - } else if (typeof item === 'object' && item !== null) { - // Recursively traverse nested objects - traverseObject(item as NestedObject, currentPath, results); - } - }); -} + // Parse the arguments string, handling nested parentheses and quotes + while (current < str.length && parenthesesCount > 0) { + const char = str[current]; -/** - * Builds a nested object structure from matching keys and their values. - * @param {string} baseKey - The base key to start building from. - * @param {string[]} matchingKeys - Array of matching keys. - * @param {Record} values - Object containing values for the keys. - * @returns {Record} The built nested object structure. - */ -function buildStructureFromMatches( - baseKey: string, - matchingKeys: string[], - values: Record, -): Record { - const result = {}; + if (inQuotes) { + // Handle characters inside quotes, including escaped characters + if (char === '\\' && current + 1 < str.length) { + argsString += char + str[current + 1]; + current += 2; + continue; + } - for (const matchingKey of matchingKeys) { - const decodedKey = decodeDotNotation(matchingKey); - // Extract the part of the key after the base key - const remainingPath = decodedKey - .slice(baseKey.length) - .split('.') - .filter((k) => k !== ''); - let current: Record = result; + if (char === quoteChar) { + inQuotes = false; + quoteChar = ''; + } + argsString += char; + } else { + // Handle characters outside quotes + if (['"', "'", '`'].includes(char)) { + inQuotes = true; + quoteChar = char; + } else if (char === '(') { + parenthesesCount++; + } else if (char === ')') { + parenthesesCount--; + } - // Build the nested structure - for (let i = 0; i < remainingPath.length - 1; i++) { - if (!(remainingPath[i] in current)) { - current[remainingPath[i]] = {}; + // Only add characters if we're still inside the main parentheses + if (parenthesesCount > 0 || char !== ')') { + argsString += char; + } + } + + current++; + } + + // If parentheses are balanced, parse the arguments + if (parenthesesCount === 0) { + try { + const parsedArgs = this.parseArguments(argsString); + args.push(parsedArgs); + } catch (error) { + // If parsing fails, throw an ApplicationError with details + throw new NodeOperationError( + this.ctx.getNode(), + `Failed to parse $fromAI arguments: ${argsString}: ${error}`, + ); + } + } else { + // Log an error if parentheses are unbalanced + throw new NodeOperationError( + this.ctx.getNode(), + `Unbalanced parentheses while parsing $fromAI call: ${str.slice(startIndex)}`, + ); } - current = current[remainingPath[i]] as Record; } - // Set the value at the deepest level - const lastKey = remainingPath[remainingPath.length - 1]; - current[lastKey ?? matchingKey] = values[matchingKey]; + return args; } - // If no nested structure was created, return the direct value - return Object.keys(result).length === 0 ? values[encodeDotNotation(baseKey)] : result; + /** + * Parses the arguments of a single $fromAI function call. + * @param argsString The string containing the function arguments. + * @returns A FromAIArgument object. + */ + private parseArguments(argsString: string): FromAIArgument { + // Split arguments by commas not inside quotes + const args: string[] = []; + let currentArg = ''; + let inQuotes = false; + let quoteChar = ''; + let escapeNext = false; + + for (let i = 0; i < argsString.length; i++) { + const char = argsString[i]; + + if (escapeNext) { + currentArg += char; + escapeNext = false; + continue; + } + + if (char === '\\') { + escapeNext = true; + continue; + } + + if (['"', "'", '`'].includes(char)) { + if (!inQuotes) { + inQuotes = true; + quoteChar = char; + currentArg += char; + } else if (char === quoteChar) { + inQuotes = false; + quoteChar = ''; + currentArg += char; + } else { + currentArg += char; + } + continue; + } + + if (char === ',' && !inQuotes) { + args.push(currentArg.trim()); + currentArg = ''; + continue; + } + + currentArg += char; + } + + if (currentArg) { + args.push(currentArg.trim()); + } + + // Remove surrounding quotes if present + const cleanArgs = args.map((arg) => { + const trimmed = arg.trim(); + if ( + (trimmed.startsWith("'") && trimmed.endsWith("'")) || + (trimmed.startsWith('`') && trimmed.endsWith('`')) || + (trimmed.startsWith('"') && trimmed.endsWith('"')) + ) { + return trimmed + .slice(1, -1) + .replace(/\\'/g, "'") + .replace(/\\`/g, '`') + .replace(/\\"/g, '"') + .replace(/\\\\/g, '\\'); + } + return trimmed; + }); + + const type = cleanArgs?.[2] || 'string'; + + if (!['string', 'number', 'boolean', 'json'].includes(type.toLowerCase())) { + throw new NodeOperationError(this.ctx.getNode(), `Invalid type: ${type}`); + } + + return { + key: cleanArgs[0] || '', + description: cleanArgs[1], + type: (cleanArgs?.[2] ?? 'string') as AllowedTypes, + defaultValue: this.parseDefaultValue(cleanArgs[3]), + }; + } + + /** + * Parses the default value, preserving its original type. + * @param value The default value as a string. + * @returns The parsed default value in its appropriate type. + */ + private parseDefaultValue( + value: string | undefined, + ): string | number | boolean | Record | undefined { + if (value === undefined || value === '') return undefined; + const lowerValue = value.toLowerCase(); + if (lowerValue === 'true') return true; + if (lowerValue === 'false') return false; + if (!isNaN(Number(value))) return Number(value); + try { + return jsonParse(value); + } catch { + return value; + } + } + + /** + * Generates a description for a node based on the provided parameters. + * @param node The node type. + * @param nodeParameters The parameters of the node. + * @returns A string description for the node. + */ + private getDescription(node: INodeType, nodeParameters: INodeParameters): string { + const manualDescription = nodeParameters.toolDescription as string; + + if (nodeParameters.descriptionType === 'auto') { + const resource = nodeParameters.resource as string; + const operation = nodeParameters.operation as string; + let description = node.description.description; + if (resource) { + description += `\n Resource: ${resource}`; + } + if (operation) { + description += `\n Operation: ${operation}`; + } + return description.trim(); + } + if (nodeParameters.descriptionType === 'manual') { + return manualDescription ?? node.description.description; + } + + return node.description.description; + } + + /** + * Creates a DynamicStructuredTool from a node. + * @param node The node type. + * @param nodeParameters The parameters of the node. + * @returns A DynamicStructuredTool instance. + */ + public createTool(node: INodeType, nodeParameters: INodeParameters): DynamicStructuredTool { + const collectedArguments: FromAIArgument[] = []; + this.traverseNodeParameters(nodeParameters, collectedArguments); + + // Validate each collected argument + const nameValidationRegex = /^[a-zA-Z0-9_-]{1,64}$/; + const keyMap = new Map(); + for (const argument of collectedArguments) { + if (argument.key.length === 0 || !nameValidationRegex.test(argument.key)) { + const isEmptyError = 'You must specify a key when using $fromAI()'; + const isInvalidError = `Parameter key \`${argument.key}\` is invalid`; + const error = new Error(argument.key.length === 0 ? isEmptyError : isInvalidError); + throw new NodeOperationError(this.ctx.getNode(), error, { + description: + 'Invalid parameter key, must be between 1 and 64 characters long and only contain letters, numbers, underscores, and hyphens', + }); + } + + if (keyMap.has(argument.key)) { + // If the key already exists in the Map + const existingArg = keyMap.get(argument.key)!; + + // Check if the existing argument has the same description and type + if ( + existingArg.description !== argument.description || + existingArg.type !== argument.type + ) { + // If not, throw an error for inconsistent duplicate keys + throw new NodeOperationError( + this.ctx.getNode(), + `Duplicate key '${argument.key}' found with different description or type`, + { + description: + 'Ensure all $fromAI() calls with the same key have consistent descriptions and types', + }, + ); + } + // If the duplicate key has consistent description and type, it's allowed (no action needed) + } else { + // If the key doesn't exist in the Map, add it + keyMap.set(argument.key, argument); + } + } + + // Remove duplicate keys, latest occurrence takes precedence + const uniqueArgsMap = collectedArguments.reduce((map, arg) => { + map.set(arg.key, arg); + return map; + }, new Map()); + + const uniqueArguments = Array.from(uniqueArgsMap.values()); + + // Generate Zod schema from unique arguments + const schemaObj = uniqueArguments.reduce((acc: Record, placeholder) => { + acc[placeholder.key] = this.generateZodSchema(placeholder); + return acc; + }, {}); + + const schema = z.object(schemaObj).required(); + const description = this.getDescription(node, nodeParameters); + const nodeName = this.ctx.getNode().name.replace(/ /g, '_'); + const name = nodeName || node.description.name; + + const tool = new DynamicStructuredTool({ + name, + description, + schema, + func: async (functionArgs: z.infer) => { + const { index } = this.ctx.addInputData(NodeConnectionType.AiTool, [ + [{ json: functionArgs }], + ]); + + try { + // Execute the node with the proxied context + const result = await node.execute?.bind(this.ctx)(); + + // Process and map the results + const mappedResults = result?.[0]?.flatMap((item) => item.json); + + // Add output data to the context + this.ctx.addOutputData(NodeConnectionType.AiTool, index, [ + [{ json: { response: mappedResults } }], + ]); + + // Return the stringified results + return JSON.stringify(mappedResults); + } catch (error) { + const nodeError = new NodeOperationError(this.ctx.getNode(), error as Error); + this.ctx.addOutputData(NodeConnectionType.AiTool, index, nodeError); + return 'Error during node execution: ' + nodeError.description; + } + }, + }); + + return tool; + } } /** - * Extracts the description from a placeholder string. - * @param {string} value - The placeholder string. - * @returns {string} The extracted description or a default message. - */ -function extractPlaceholderDescription(value: string): string { - const match = value.match(/{{ '__PLACEHOLDER:\s*(.+?)\s*' }}/); - return match ? match[1] : 'No description provided'; -} - -/** - * Creates a DynamicStructuredTool from an n8n node. - * @param {INodeType} node - The n8n node to convert. - * @param {IExecuteFunctions} ctx - The execution context. - * @param {INodeParameters} nodeParameters - The node parameters. - * @returns {DynamicStructuredTool} The created tool. + * Converts node into LangChain tool by analyzing node parameters, + * identifying placeholders using the $fromAI function, and generating a Zod schema. It then creates + * a DynamicStructuredTool that can be used in LangChain workflows. + * + * @param ctx The execution context. + * @param node The node type. + * @param nodeParameters The parameters of the node. + * @returns An object containing the DynamicStructuredTool instance. */ export function createNodeAsTool( - node: INodeType, - ctx: IExecuteFunctions, - nodeParameters: INodeParameters, -): DynamicStructuredTool { - // Find all placeholder values in the node parameters - const placeholderValues = traverseObject(nodeParameters); - - // Generate Zod schema from placeholder values - const schemaObj: { [key: string]: z.ZodString } = {}; - for (const [key, value] of placeholderValues.entries()) { - const description = extractPlaceholderDescription(value); - schemaObj[key] = z.string().describe(description); - } - const schema = z.object(schemaObj).required(); - - // Get the tool description from node parameters or use the default - const toolDescription = ctx.getNodeParameter( - 'toolDescription', - 0, - node.description.description, - ) as string; - type GetNodeParameterMethod = IExecuteFunctions['getNodeParameter']; - - const tool = new DynamicStructuredTool({ - name: node.description.name, - description: toolDescription ? toolDescription : node.description.description, - schema, - func: async (functionArgs: z.infer) => { - // Create a proxy for ctx to soft-override parameters with values from the LLM - const ctxProxy = new Proxy(ctx, { - get(target: IExecuteFunctions, prop: string | symbol, receiver: unknown) { - if (prop === 'getNodeParameter') { - // Override getNodeParameter method - // eslint-disable-next-line @typescript-eslint/unbound-method - return new Proxy(target.getNodeParameter, { - apply( - targetMethod: GetNodeParameterMethod, - thisArg: unknown, - argumentsList: Parameters, - ): ReturnType { - const [key] = argumentsList; - if (typeof key !== 'string') { - // If key is not a string, use the original method - return Reflect.apply(targetMethod, thisArg, argumentsList); - } - - const encodedKey = encodeDotNotation(key); - // Check if the full key or any more specific key is a placeholder - const matchingKeys = Array.from(placeholderValues.keys()).filter((k) => - k.startsWith(encodedKey), - ); - - if (matchingKeys.length > 0) { - // If there are matching keys, build the structure using args - const res = buildStructureFromMatches(encodedKey, matchingKeys, functionArgs); - // Return either the specific value or the entire built structure - return res?.[decodeDotNotation(key)] ?? res; - } - - // If no placeholder is found, use the original function - return Reflect.apply(targetMethod, thisArg, argumentsList); - }, - }); - } - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return Reflect.get(target, prop, receiver); - }, - }); - - // Add input data to the context - ctxProxy.addInputData(NodeConnectionType.AiTool, [[{ json: functionArgs }]]); - - // Execute the node with the proxied context - const result = await node.execute?.bind(ctxProxy)(); - - // Process and map the results - const mappedResults = result?.[0]?.flatMap((item) => item.json); - - // Add output data to the context - ctxProxy.addOutputData(NodeConnectionType.AiTool, 0, [ - [{ json: { response: mappedResults } }], - ]); - - // Return the stringified results - return JSON.stringify(mappedResults); - }, - }); - - return tool; -} - -/** - * Asynchronously creates a DynamicStructuredTool from an n8n node. - * @param {IExecuteFunctions} ctx - The execution context. - * @param {INodeType} node - The n8n node to convert. - * @param {INodeParameters} nodeParameters - The node parameters. - * @returns {Promise<{response: DynamicStructuredTool}>} A promise that resolves to an object containing the created tool. - */ -export function getNodeAsTool( ctx: IExecuteFunctions, node: INodeType, nodeParameters: INodeParameters, ) { + const parser = new AIParametersParser(ctx); + return { - response: createNodeAsTool(node, ctx, nodeParameters), + response: parser.createTool(node, nodeParameters), }; } diff --git a/packages/core/src/Credentials.ts b/packages/core/src/Credentials.ts index 3210703a27..da6deb742c 100644 --- a/packages/core/src/Credentials.ts +++ b/packages/core/src/Credentials.ts @@ -1,6 +1,7 @@ -import { Container } from 'typedi'; import type { ICredentialDataDecryptedObject, ICredentialsEncrypted } from 'n8n-workflow'; import { ApplicationError, ICredentials, jsonParse } from 'n8n-workflow'; +import { Container } from 'typedi'; + import { Cipher } from './Cipher'; export class Credentials< diff --git a/packages/core/src/DirectoryLoader.ts b/packages/core/src/DirectoryLoader.ts index cef3db4068..a1401a8fb5 100644 --- a/packages/core/src/DirectoryLoader.ts +++ b/packages/core/src/DirectoryLoader.ts @@ -1,6 +1,4 @@ import glob from 'fast-glob'; -import { readFileSync } from 'node:fs'; -import { readFile } from 'node:fs/promises'; import type { CodexData, DocumentationLink, @@ -21,7 +19,10 @@ import { getVersionedNodeTypeAll, jsonParse, } from 'n8n-workflow'; +import { readFileSync } from 'node:fs'; +import { readFile } from 'node:fs/promises'; import * as path from 'path'; + import { loadClassInIsolation } from './ClassLoader'; import { CUSTOM_NODES_CATEGORY } from './Constants'; import type { n8n } from './Interfaces'; diff --git a/packages/core/src/ExecutionMetadata.ts b/packages/core/src/ExecutionMetadata.ts index cc2743a56f..8466933e05 100644 --- a/packages/core/src/ExecutionMetadata.ts +++ b/packages/core/src/ExecutionMetadata.ts @@ -1,5 +1,6 @@ import type { IRunExecutionData } from 'n8n-workflow'; import { LoggerProxy as Logger } from 'n8n-workflow'; + import { InvalidExecutionMetadataError } from './errors/invalid-execution-metadata.error'; export const KV_LIMIT = 10; diff --git a/packages/core/src/InstanceSettings.ts b/packages/core/src/InstanceSettings.ts index 44f4b0c336..17ccf15def 100644 --- a/packages/core/src/InstanceSettings.ts +++ b/packages/core/src/InstanceSettings.ts @@ -1,8 +1,8 @@ -import path from 'path'; -import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { createHash, randomBytes } from 'crypto'; -import { Service } from 'typedi'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { ApplicationError, jsonParse } from 'n8n-workflow'; +import path from 'path'; +import { Service } from 'typedi'; interface ReadOnlySettings { encryptionKey: string; diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index ec9895c483..d2669b3ca9 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -32,6 +32,7 @@ import { IncomingMessage, type IncomingHttpHeaders } from 'http'; import { Agent, type AgentOptions } from 'https'; import get from 'lodash/get'; import isEmpty from 'lodash/isEmpty'; +import merge from 'lodash/merge'; import pick from 'lodash/pick'; import { DateTime } from 'luxon'; import { extension, lookup } from 'mime-types'; @@ -129,9 +130,13 @@ import clientOAuth1 from 'oauth-1.0a'; import path from 'path'; import { stringify } from 'qs'; import { Readable } from 'stream'; +import Container from 'typedi'; import url, { URL, URLSearchParams } from 'url'; +import { createAgentStartJob } from './Agent'; import { BinaryDataService } from './BinaryData/BinaryData.service'; +import type { BinaryData } from './BinaryData/types'; +import { binaryToBuffer } from './BinaryData/utils'; import { BINARY_DATA_STORAGE_PATH, BLOCK_FILE_ACCESS_TO_N8N_FILES, @@ -144,23 +149,19 @@ import { UM_EMAIL_TEMPLATES_INVITE, UM_EMAIL_TEMPLATES_PWRESET, } from './Constants'; -import { extractValue } from './ExtractValue'; -import type { ExtendedValidationResult, IResponseError } from './Interfaces'; +import { createNodeAsTool } from './CreateNodeAsTool'; import { getAllWorkflowExecutionMetadata, getWorkflowExecutionMetadata, setAllWorkflowExecutionMetadata, setWorkflowExecutionMetadata, } from './ExecutionMetadata'; -import { getSecretsProxy } from './Secrets'; -import Container from 'typedi'; -import type { BinaryData } from './BinaryData/types'; -import merge from 'lodash/merge'; +import { extractValue } from './ExtractValue'; import { InstanceSettings } from './InstanceSettings'; +import type { ExtendedValidationResult, IResponseError } from './Interfaces'; import { ScheduledTaskManager } from './ScheduledTaskManager'; +import { getSecretsProxy } from './Secrets'; import { SSHClientsManager } from './SSHClientsManager'; -import { binaryToBuffer } from './BinaryData/utils'; -import { getNodeAsTool } from './CreateNodeAsTool'; axios.defaults.timeout = 300000; // Prevent axios from adding x-form-www-urlencoded headers by default @@ -2853,7 +2854,7 @@ async function getInputConnectionData( if (!nodeType.supplyData) { if (nodeType.description.outputs.includes(NodeConnectionType.AiTool)) { nodeType.supplyData = async function (this: IExecuteFunctions) { - return getNodeAsTool(this, nodeType, this.getNode().parameters); + return createNodeAsTool(this, nodeType, this.getNode().parameters); }; } else { throw new ApplicationError('Node does not have a `supplyData` method defined', { @@ -3792,6 +3793,17 @@ export function getExecuteFunctions( additionalData.setExecutionStatus('waiting'); } }, + logNodeOutput(...args: unknown[]): void { + if (mode === 'manual') { + // @ts-expect-error `args` is spreadable + this.sendMessageToUI(...args); + return; + } + + if (process.env.CODE_ENABLE_STDOUT === 'true') { + console.log(`[Workflow "${this.getWorkflow().id}"][Node "${node.name}"]`, ...args); + } + }, sendMessageToUI(...args: any[]): void { if (mode !== 'manual') { return; @@ -3909,6 +3921,19 @@ export function getExecuteFunctions( }); }, getParentCallbackManager: () => additionalData.parentCallbackManager, + startJob: createAgentStartJob( + additionalData, + inputData, + node, + workflow, + runExecutionData, + runIndex, + node.name, + connectionInputData, + {}, + mode, + executeData, + ), }; })(workflow, runExecutionData, connectionInputData, inputData, node) as IExecuteFunctions; } diff --git a/packages/core/src/ObjectStore/ObjectStore.service.ee.ts b/packages/core/src/ObjectStore/ObjectStore.service.ee.ts index ddd21db799..0e4d8463df 100644 --- a/packages/core/src/ObjectStore/ObjectStore.service.ee.ts +++ b/packages/core/src/ObjectStore/ObjectStore.service.ee.ts @@ -1,12 +1,12 @@ -import { createHash } from 'node:crypto'; -import axios from 'axios'; -import { Service } from 'typedi'; import { sign } from 'aws4'; -import { isStream, parseXml, writeBlockedMessage } from './utils'; -import { ApplicationError, LoggerProxy as Logger } from 'n8n-workflow'; - -import type { AxiosRequestConfig, AxiosResponse, InternalAxiosRequestConfig, Method } from 'axios'; import type { Request as Aws4Options, Credentials as Aws4Credentials } from 'aws4'; +import axios from 'axios'; +import type { AxiosRequestConfig, AxiosResponse, InternalAxiosRequestConfig, Method } from 'axios'; +import { ApplicationError, LoggerProxy as Logger } from 'n8n-workflow'; +import { createHash } from 'node:crypto'; +import type { Readable } from 'stream'; +import { Service } from 'typedi'; + import type { Bucket, ConfigSchemaCredentials, @@ -15,7 +15,7 @@ import type { RawListPage, RequestOptions, } from './types'; -import type { Readable } from 'stream'; +import { isStream, parseXml, writeBlockedMessage } from './utils'; import type { BinaryData } from '../BinaryData/types'; @Service() diff --git a/packages/core/src/ObjectStore/types.ts b/packages/core/src/ObjectStore/types.ts index 639ae2e6a2..d0b7ab0713 100644 --- a/packages/core/src/ObjectStore/types.ts +++ b/packages/core/src/ObjectStore/types.ts @@ -1,4 +1,5 @@ import type { AxiosResponseHeaders, ResponseType } from 'axios'; + import type { BinaryData } from '../BinaryData/types'; export type RawListPage = { diff --git a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts index bd6cf81a2f..33cc114698 100644 --- a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts +++ b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts @@ -67,6 +67,63 @@ export class DirectedGraph { return this; } + /** + * Removes a node from the graph. + * + * By default it will also remove all connections that use that node and + * return nothing. + * + * If you pass `{ reconnectConnections: true }` it will rewire all + * connections making sure all parent nodes are connected to all child nodes + * and return the new connections. + */ + removeNode(node: INode, options?: { reconnectConnections: true }): GraphConnection[]; + removeNode(node: INode, options?: { reconnectConnections: false }): undefined; + removeNode(node: INode, { reconnectConnections = false } = {}): undefined | GraphConnection[] { + if (reconnectConnections) { + const incomingConnections = this.getDirectParents(node); + const outgoingConnections = this.getDirectChildren(node); + + const newConnections: GraphConnection[] = []; + + for (const incomingConnection of incomingConnections) { + for (const outgoingConnection of outgoingConnections) { + const newConnection = { + ...incomingConnection, + to: outgoingConnection.to, + inputIndex: outgoingConnection.inputIndex, + }; + + newConnections.push(newConnection); + } + } + + for (const [key, connection] of this.connections.entries()) { + if (connection.to === node || connection.from === node) { + this.connections.delete(key); + } + } + + for (const newConnection of newConnections) { + this.connections.set(this.makeKey(newConnection), newConnection); + } + + this.nodes.delete(node.name); + + return newConnections; + } else { + for (const [key, connection] of this.connections.entries()) { + if (connection.to === node || connection.from === node) { + this.connections.delete(key); + } + } + + this.nodes.delete(node.name); + + return; + } + } + addConnection(connectionInput: { from: INode; to: INode; diff --git a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts index 93df23de32..426a5405c7 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts @@ -9,8 +9,10 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data -import { DirectedGraph } from '../DirectedGraph'; +import { NodeConnectionType } from 'n8n-workflow'; + import { createNodeData, defaultWorkflowParameter } from './helpers'; +import { DirectedGraph } from '../DirectedGraph'; describe('DirectedGraph', () => { // ┌─────┐ ┌─────┐ ┌─────┐ @@ -86,4 +88,202 @@ describe('DirectedGraph', () => { expect(children).toEqual(new Set([node1, node2, node3])); }); }); + + describe('removeNode', () => { + // XX + // ┌─────┐ ┌─────┐ ┌─────┐ + // │node0├───►│node1├──►│node2│ + // └─────┘ └─────┘ └─────┘ + // turns into + // ┌─────┐ ┌─────┐ + // │node0│ │node2│ + // └─────┘ └─────┘ + test('remove node and all connections', () => { + // ARRANGE + const node0 = createNodeData({ name: 'node0' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2) + .addConnections({ from: node0, to: node1 }, { from: node0, to: node2 }); + + // ACT + graph.removeNode(node1); + + // ASSERT + expect(graph).toEqual( + new DirectedGraph().addNodes(node0, node2).addConnections({ from: node0, to: node2 }), + ); + }); + + // XX + // ┌─────┐ ┌─────┐ ┌─────┐ + // │node0├───►│node1├──►│node2│ + // └─────┘ └─────┘ └─────┘ + // turns into + // ┌─────┐ ┌─────┐ + // │node0├──►│node2│ + // └─────┘ └─────┘ + test('remove node, but reconnect connections', () => { + // ARRANGE + const node0 = createNodeData({ name: 'node0' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2) + .addConnections({ from: node0, to: node1 }, { from: node1, to: node2 }); + + // ACT + const newConnections = graph.removeNode(node1, { reconnectConnections: true }); + + // ASSERT + expect(newConnections).toHaveLength(1); + expect(newConnections[0]).toEqual({ + from: node0, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 0, + to: node2, + }); + expect(graph).toEqual( + new DirectedGraph().addNodes(node0, node2).addConnections({ from: node0, to: node2 }), + ); + }); + + // XX + // ┌─────┐ ┌─────┐ ┌─────┐ + // │ │o o│ │o o│ │ + // │ │o─┐ o│ │o o│ │ + // │node0│o └►o│node1│o o│node2│ + // │ │o o│ │o─┐ o│ │ + // │ │o o│ │o └►o│ │ + // └─────┘ └─────┘ └─────┘ + // turns into + // ┌─────┐ ┌─────┐ + // │ │o o│ │ + // │ │o───────┐ o│ │ + // │node0│o │ o│node2│ + // │ │o │ o│ │ + // │ │o └──────►o│ │ + // └─────┘ └─────┘ + test('remove node, reconnect connections and retaining the input indexes', () => { + // ARRANGE + const node0 = createNodeData({ name: 'node0' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2) + .addConnections( + { from: node0, outputIndex: 1, inputIndex: 2, to: node1 }, + { from: node1, outputIndex: 3, inputIndex: 4, to: node2 }, + ); + + // ACT + const newConnections = graph.removeNode(node1, { reconnectConnections: true }); + + // ASSERT + expect(newConnections).toHaveLength(1); + expect(newConnections[0]).toEqual({ + from: node0, + outputIndex: 1, + type: NodeConnectionType.Main, + inputIndex: 4, + to: node2, + }); + expect(graph).toEqual( + new DirectedGraph() + .addNodes(node0, node2) + .addConnections({ from: node0, outputIndex: 1, inputIndex: 4, to: node2 }), + ); + }); + + // XX + // ┌─────┐ ┌─────┐ ┌─────┐ + // │ │o o│ │o │ │ + // │ │o─┐ o│ │o │ │ + // │node0│ └►o│node1│o ┌►o│node2│ + // │ │ │ │o─┘ │ │ + // │ │ │ │ │ │ + // └─────┘ └─────┘ └─────┘ + // turns into + // ┌─────┐ ┌─────┐ + // │ │o │ │ + // │ │o───────┐ │ │ + // │node0│ └──────►o│node2│ + // │ │ │ │ + // │ │ │ │ + // └─────┘ └─────┘ + test('remove node, reconnect connections and retaining the input indexes, even if the child has less inputs than the than the removed node had', () => { + // ARRANGE + const node0 = createNodeData({ name: 'node0' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2) + .addConnections( + { from: node0, outputIndex: 1, inputIndex: 2, to: node1 }, + { from: node1, outputIndex: 3, inputIndex: 0, to: node2 }, + ); + + // ACT + const newConnections = graph.removeNode(node1, { reconnectConnections: true }); + + // ASSERT + const expectedGraph = new DirectedGraph() + .addNodes(node0, node2) + .addConnections({ from: node0, outputIndex: 1, inputIndex: 0, to: node2 }); + expect(newConnections).toHaveLength(1); + expect(newConnections).toEqual(expectedGraph.getConnections()); + expect(graph).toEqual(expectedGraph); + }); + + // ┌─────┐ ┌──────┐ + // │left0├─┐ XX ┌►│right0│ + // └─────┘ │ ┌──────┐ │ └──────┘ + // ├─►│center├──┤ + // ┌─────┐ │ └──────┘ │ ┌──────┐ + // │left1├─┘ └►│right1│ + // └─────┘ └──────┘ + // turns into + // + // ┌─────┐ ┌──────┐ + // │left0├─┐ ┌─►│right0│ + // └─────┘ │ │ └──────┘ + // ├───────────┤ + // ┌─────┐ │ │ ┌──────┐ + // │left1├─┘ └─►│right1│ + // └─────┘ └──────┘ + test('remove node, reconnect connections and multiplexes them', () => { + // ARRANGE + const left0 = createNodeData({ name: 'left0' }); + const left1 = createNodeData({ name: 'left1' }); + const center = createNodeData({ name: 'center' }); + const right0 = createNodeData({ name: 'right0' }); + const right1 = createNodeData({ name: 'right1' }); + const graph = new DirectedGraph() + .addNodes(left0, left1, center, right0, right1) + .addConnections( + { from: left0, to: center }, + { from: left1, to: center }, + { from: center, to: right0 }, + { from: center, to: right1 }, + ); + + // ACT + const newConnections = graph.removeNode(center, { reconnectConnections: true }); + + // ASSERT + const expectedGraph = new DirectedGraph() + .addNodes(left0, left1, right0, right1) + .addConnections( + { from: left0, to: right0 }, + { from: left0, to: right1 }, + { from: left1, to: right0 }, + { from: left1, to: right1 }, + ); + expect(newConnections).toHaveLength(4); + expect(newConnections).toEqual(expectedGraph.getConnections()); + expect(graph).toEqual(expectedGraph); + }); + }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts index fabfae0ee3..bf37ec7636 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts @@ -1,7 +1,8 @@ import type { IRunData } from 'n8n-workflow'; + +import { createNodeData, toITaskData } from './helpers'; import { cleanRunData } from '../cleanRunData'; import { DirectedGraph } from '../DirectedGraph'; -import { createNodeData, toITaskData } from './helpers'; describe('cleanRunData', () => { // ┌─────┐ ┌─────┐ ┌─────┐ diff --git a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts index c830833d8d..0ea2e4f611 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts @@ -10,9 +10,10 @@ // PD denotes that the node has pinned data import { type IPinData, type IRunData } from 'n8n-workflow'; + import { createNodeData, toITaskData } from './helpers'; -import { findStartNodes, isDirty } from '../findStartNodes'; import { DirectedGraph } from '../DirectedGraph'; +import { findStartNodes, isDirty } from '../findStartNodes'; describe('isDirty', () => { test("if the node has pinned data it's not dirty", () => { diff --git a/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts index d82f73e9e3..a5187eacf4 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/findSubgraph.test.ts @@ -9,11 +9,11 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data +import { createNodeData } from './helpers'; import { DirectedGraph } from '../DirectedGraph'; import { findSubgraph } from '../findSubgraph'; -import { createNodeData } from './helpers'; -describe('findSubgraph2', () => { +describe('findSubgraph', () => { // ►► // ┌───────┐ ┌───────────┐ // │trigger├────►│destination│ @@ -83,6 +83,12 @@ describe('findSubgraph2', () => { // │trigger│ │disabled├─────►│destination│ // │ ├────────►│ │ └───────────┘ // └───────┘ └────────┘ + // turns into + // ┌───────┐ ►► + // │ │ ┌───────────┐ + // │trigger├─────►│destination│ + // │ │ └───────────┘ + // └───────┘ test('skip disabled nodes', () => { const trigger = createNodeData({ name: 'trigger' }); const disabled = createNodeData({ name: 'disabled', disabled: true }); @@ -101,6 +107,40 @@ describe('findSubgraph2', () => { ); }); + // XX XX + // ┌───────┐ ┌─────┐ ┌─────┐ ┌───────────┐ + // │trigger├────►│node1├────►│node2├────►│destination│ + // └───────┘ └─────┘ └─────┘ └───────────┘ + // turns into + // ┌───────┐ ┌───────────┐ + // │trigger├────►│destination│ + // └───────┘ └───────────┘ + test('skip multiple disabled nodes', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const disabledNode1 = createNodeData({ name: 'disabledNode1', disabled: true }); + const disabledNode2 = createNodeData({ name: 'disabledNode2', disabled: true }); + const destination = createNodeData({ name: 'destination' }); + + const graph = new DirectedGraph() + .addNodes(trigger, disabledNode1, disabledNode2, destination) + .addConnections( + { from: trigger, to: disabledNode1 }, + { from: disabledNode1, to: disabledNode2 }, + { from: disabledNode2, to: destination }, + ); + + // ACT + const subgraph = findSubgraph(graph, destination, trigger); + + // ASSERT + expect(subgraph).toEqual( + new DirectedGraph() + .addNodes(trigger, destination) + .addConnections({ from: trigger, to: destination }), + ); + }); + // ►► // ┌───────┐ ┌─────┐ ┌─────┐ // │Trigger├───┬──►│Node1├───┬─►│Node2│ diff --git a/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts index 737d0a2754..dffbe310d1 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts @@ -9,8 +9,9 @@ import type { IPinData } from 'n8n-workflow'; import { NodeConnectionType, type IRunData } from 'n8n-workflow'; -import { DirectedGraph } from '../DirectedGraph'; + import { createNodeData, toITaskData } from './helpers'; +import { DirectedGraph } from '../DirectedGraph'; import { getSourceDataGroups } from '../getSourceDataGroups'; describe('getSourceDataGroups', () => { diff --git a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts index 42cbe1f5ff..d0cc934b13 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts @@ -9,12 +9,14 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data -import { recreateNodeExecutionStack } from '@/PartialExecutionUtils/recreateNodeExecutionStack'; -import { type IPinData, type IRunData } from 'n8n-workflow'; import { AssertionError } from 'assert'; +import { type IPinData, type IRunData } from 'n8n-workflow'; + +import { recreateNodeExecutionStack } from '@/PartialExecutionUtils/recreateNodeExecutionStack'; + +import { createNodeData, toITaskData } from './helpers'; import { DirectedGraph } from '../DirectedGraph'; import { findSubgraph } from '../findSubgraph'; -import { createNodeData, toITaskData } from './helpers'; describe('recreateNodeExecutionStack', () => { // ►► diff --git a/packages/core/src/PartialExecutionUtils/__tests__/toIConnections.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/toIConnections.test.ts index a2524bf3ce..e5ea0e658a 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/toIConnections.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/toIConnections.test.ts @@ -1,4 +1,5 @@ import { NodeConnectionType } from 'n8n-workflow'; + import { createNodeData, toIConnections } from './helpers'; test('toIConnections', () => { diff --git a/packages/core/src/PartialExecutionUtils/__tests__/toITaskData.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/toITaskData.test.ts index e255836339..fe9c3f132a 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/toITaskData.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/toITaskData.test.ts @@ -1,4 +1,5 @@ import { NodeConnectionType } from 'n8n-workflow'; + import { toITaskData } from './helpers'; test('toITaskData', function () { diff --git a/packages/core/src/PartialExecutionUtils/cleanRunData.ts b/packages/core/src/PartialExecutionUtils/cleanRunData.ts index 945dca1451..5d74a3575a 100644 --- a/packages/core/src/PartialExecutionUtils/cleanRunData.ts +++ b/packages/core/src/PartialExecutionUtils/cleanRunData.ts @@ -1,4 +1,5 @@ import type { INode, IRunData } from 'n8n-workflow'; + import type { DirectedGraph } from './DirectedGraph'; /** diff --git a/packages/core/src/PartialExecutionUtils/findStartNodes.ts b/packages/core/src/PartialExecutionUtils/findStartNodes.ts index 910045d709..12a9688c1c 100644 --- a/packages/core/src/PartialExecutionUtils/findStartNodes.ts +++ b/packages/core/src/PartialExecutionUtils/findStartNodes.ts @@ -1,4 +1,5 @@ import type { INode, IPinData, IRunData } from 'n8n-workflow'; + import type { DirectedGraph } from './DirectedGraph'; import { getIncomingData } from './getIncomingData'; diff --git a/packages/core/src/PartialExecutionUtils/findSubgraph.ts b/packages/core/src/PartialExecutionUtils/findSubgraph.ts index 2b1ceb2998..ea1df91840 100644 --- a/packages/core/src/PartialExecutionUtils/findSubgraph.ts +++ b/packages/core/src/PartialExecutionUtils/findSubgraph.ts @@ -1,4 +1,5 @@ import type { INode } from 'n8n-workflow'; + import type { GraphConnection } from './DirectedGraph'; import { DirectedGraph } from './DirectedGraph'; @@ -50,27 +51,14 @@ function findSubgraphRecursive( // Take every incoming connection and connect it to every node that is // connected to the current node’s first output if (current.disabled) { - const incomingConnections = graph.getDirectParents(current); - const outgoingConnections = graph - .getDirectChildren(current) - // NOTE: When a node is disabled only the first output gets data - .filter((connection) => connection.outputIndex === 0); + // The last segment on the current branch is still pointing to the removed + // node, so let's remove it. + currentBranch.pop(); - parentConnections = []; - - for (const incomingConnection of incomingConnections) { - for (const outgoingConnection of outgoingConnections) { - const newConnection = { - ...incomingConnection, - to: outgoingConnection.to, - inputIndex: outgoingConnection.inputIndex, - }; - - parentConnections.push(newConnection); - currentBranch.pop(); - currentBranch.push(newConnection); - } - } + // The node is replaced by a set of new connections, connecting the parents + // and children of it directly. In the recursive call below we'll follow + // them further. + parentConnections = graph.removeNode(current, { reconnectConnections: true }); } // Recurse on each parent. diff --git a/packages/core/src/PartialExecutionUtils/findTriggerForPartialExecution.ts b/packages/core/src/PartialExecutionUtils/findTriggerForPartialExecution.ts index baae6e7304..977e99c107 100644 --- a/packages/core/src/PartialExecutionUtils/findTriggerForPartialExecution.ts +++ b/packages/core/src/PartialExecutionUtils/findTriggerForPartialExecution.ts @@ -1,5 +1,5 @@ -import type { INode, Workflow } from 'n8n-workflow'; import * as assert from 'assert/strict'; +import type { INode, Workflow } from 'n8n-workflow'; function findAllParentTriggers(workflow: Workflow, destinationNodeName: string) { const parentNodes = workflow diff --git a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts index b1e3334440..f2f1f4af68 100644 --- a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts +++ b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts @@ -1,3 +1,4 @@ +import * as a from 'assert/strict'; import { NodeConnectionType, type IExecuteData, @@ -11,7 +12,6 @@ import { type IWaitingForExecutionSource, } from 'n8n-workflow'; -import * as a from 'assert/strict'; import type { DirectedGraph } from './DirectedGraph'; import { getIncomingData } from './getIncomingData'; import { getSourceDataGroups } from './getSourceDataGroups'; @@ -44,12 +44,12 @@ export function recreateNodeExecutionStack( // Validate invariants. // The graph needs to be free of disabled nodes. If it's not it hasn't been - // passed through findSubgraph2. + // passed through findSubgraph. for (const node of graph.getNodes().values()) { a.notEqual( node.disabled, true, - `Graph contains disabled nodes. This is not supported. Make sure to pass the graph through "findSubgraph2" before calling "recreateNodeExecutionStack". The node in question is "${node.name}"`, + `Graph contains disabled nodes. This is not supported. Make sure to pass the graph through "findSubgraph" before calling "recreateNodeExecutionStack". The node in question is "${node.name}"`, ); } diff --git a/packages/core/src/SSHClientsManager.ts b/packages/core/src/SSHClientsManager.ts index 78126f96e8..17046a26eb 100644 --- a/packages/core/src/SSHClientsManager.ts +++ b/packages/core/src/SSHClientsManager.ts @@ -1,7 +1,7 @@ -import { Service } from 'typedi'; -import { Client, type ConnectConfig } from 'ssh2'; -import { createHash } from 'node:crypto'; import type { SSHCredentials } from 'n8n-workflow'; +import { createHash } from 'node:crypto'; +import { Client, type ConnectConfig } from 'ssh2'; +import { Service } from 'typedi'; @Service() export class SSHClientsManager { diff --git a/packages/core/src/ScheduledTaskManager.ts b/packages/core/src/ScheduledTaskManager.ts index eb519a60a7..fd2bb525a9 100644 --- a/packages/core/src/ScheduledTaskManager.ts +++ b/packages/core/src/ScheduledTaskManager.ts @@ -1,6 +1,7 @@ -import { Service } from 'typedi'; import { CronJob } from 'cron'; import type { CronExpression, Workflow } from 'n8n-workflow'; +import { Service } from 'typedi'; + import { InstanceSettings } from './InstanceSettings'; @Service() diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index a10d8c530c..46de2472fc 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -2,9 +2,9 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/prefer-nullish-coalescing */ +import * as assert from 'assert/strict'; import { setMaxListeners } from 'events'; -import PCancelable from 'p-cancelable'; - +import get from 'lodash/get'; import type { ExecutionBaseError, ExecutionStatus, @@ -46,11 +46,9 @@ import { sleep, ErrorReporterProxy, } from 'n8n-workflow'; -import get from 'lodash/get'; -import * as NodeExecuteFunctions from './NodeExecuteFunctions'; +import PCancelable from 'p-cancelable'; -import * as assert from 'assert/strict'; -import { recreateNodeExecutionStack } from './PartialExecutionUtils/recreateNodeExecutionStack'; +import * as NodeExecuteFunctions from './NodeExecuteFunctions'; import { DirectedGraph, findCycles, @@ -59,6 +57,7 @@ import { findTriggerForPartialExecution, } from './PartialExecutionUtils'; import { cleanRunData } from './PartialExecutionUtils/cleanRunData'; +import { recreateNodeExecutionStack } from './PartialExecutionUtils/recreateNodeExecutionStack'; export class WorkflowExecute { private status: ExecutionStatus = 'new'; diff --git a/packages/core/src/errors/invalid-mode.error.ts b/packages/core/src/errors/invalid-mode.error.ts index 348fbb410d..179582911a 100644 --- a/packages/core/src/errors/invalid-mode.error.ts +++ b/packages/core/src/errors/invalid-mode.error.ts @@ -1,4 +1,5 @@ import { ApplicationError } from 'n8n-workflow'; + import { CONFIG_MODES } from '../BinaryData/utils'; export class InvalidModeError extends ApplicationError { diff --git a/packages/core/test/BinaryData/utils.test.ts b/packages/core/test/BinaryData/utils.test.ts index 95a138c00d..50a7f165df 100644 --- a/packages/core/test/BinaryData/utils.test.ts +++ b/packages/core/test/BinaryData/utils.test.ts @@ -1,5 +1,6 @@ import { Readable } from 'node:stream'; import { createGunzip } from 'node:zlib'; + import { binaryToBuffer } from '@/BinaryData/utils'; describe('BinaryData/utils', () => { diff --git a/packages/core/test/Cipher.test.ts b/packages/core/test/Cipher.test.ts index 1b7c0de944..e3dfa609fa 100644 --- a/packages/core/test/Cipher.test.ts +++ b/packages/core/test/Cipher.test.ts @@ -1,6 +1,8 @@ import Container from 'typedi'; -import { InstanceSettings } from '@/InstanceSettings'; + import { Cipher } from '@/Cipher'; +import { InstanceSettings } from '@/InstanceSettings'; + import { mockInstance } from './utils'; describe('Cipher', () => { diff --git a/packages/core/test/CreateNodeAsTool.test.ts b/packages/core/test/CreateNodeAsTool.test.ts index c4509e08be..5c485b9837 100644 --- a/packages/core/test/CreateNodeAsTool.test.ts +++ b/packages/core/test/CreateNodeAsTool.test.ts @@ -1,8 +1,9 @@ -import { createNodeAsTool } from '@/CreateNodeAsTool'; import type { IExecuteFunctions, INodeParameters, INodeType } from 'n8n-workflow'; -import { NodeConnectionType } from 'n8n-workflow'; +import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { z } from 'zod'; +import { createNodeAsTool } from '@/CreateNodeAsTool'; + jest.mock('@langchain/core/tools', () => ({ DynamicStructuredTool: jest.fn().mockImplementation((config) => ({ name: config.name, @@ -18,10 +19,12 @@ describe('createNodeAsTool', () => { let mockNodeParameters: INodeParameters; beforeEach(() => { + // Setup mock objects mockCtx = { getNodeParameter: jest.fn(), - addInputData: jest.fn(), + addInputData: jest.fn().mockReturnValue({ index: 0 }), addOutputData: jest.fn(), + getNode: jest.fn().mockReturnValue({ name: 'Test_Node' }), } as unknown as IExecuteFunctions; mockNode = { @@ -33,60 +36,456 @@ describe('createNodeAsTool', () => { } as unknown as INodeType; mockNodeParameters = { - param1: "{{ '__PLACEHOLDER: Test parameter' }}", + param1: "={{$fromAI('param1', 'Test parameter', 'string') }}", param2: 'static value', nestedParam: { - subParam: "{{ '__PLACEHOLDER: Nested parameter' }}", + subParam: "={{ $fromAI('subparam', 'Nested parameter', 'string') }}", }, + descriptionType: 'auto', + resource: 'testResource', + operation: 'testOperation', }; + jest.clearAllMocks(); }); - it('should create a DynamicStructuredTool with correct properties', () => { - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + describe('Tool Creation and Basic Properties', () => { + it('should create a DynamicStructuredTool with correct properties', () => { + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; - expect(tool).toBeDefined(); - expect(tool.name).toBe('TestNode'); - expect(tool.description).toBe('Test node description'); - expect(tool.schema).toBeDefined(); + expect(tool).toBeDefined(); + expect(tool.name).toBe('Test_Node'); + expect(tool.description).toBe( + 'Test node description\n Resource: testResource\n Operation: testOperation', + ); + expect(tool.schema).toBeDefined(); + }); + + it('should use toolDescription if provided', () => { + mockNodeParameters.descriptionType = 'manual'; + mockNodeParameters.toolDescription = 'Custom tool description'; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.description).toBe('Custom tool description'); + }); }); - it('should use toolDescription if provided', () => { - const customDescription = 'Custom tool description'; - (mockCtx.getNodeParameter as jest.Mock).mockReturnValue(customDescription); + describe('Schema Creation and Parameter Handling', () => { + it('should create a schema based on fromAI arguments in nodeParameters', () => { + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + expect(tool.schema).toBeDefined(); + expect(tool.schema.shape).toHaveProperty('param1'); + expect(tool.schema.shape).toHaveProperty('subparam'); + expect(tool.schema.shape).not.toHaveProperty('param2'); + }); - expect(tool.description).toBe(customDescription); + it('should handle fromAI arguments correctly', () => { + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.subparam).toBeInstanceOf(z.ZodString); + }); + + it('should handle default values correctly', () => { + mockNodeParameters = { + paramWithDefault: + "={{ $fromAI('paramWithDefault', 'Parameter with default', 'string', 'default value') }}", + numberWithDefault: + "={{ $fromAI('numberWithDefault', 'Number with default', 'number', 42) }}", + booleanWithDefault: + "={{ $fromAI('booleanWithDefault', 'Boolean with default', 'boolean', true) }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramWithDefault.description).toBe('Parameter with default'); + expect(tool.schema.shape.numberWithDefault.description).toBe('Number with default'); + expect(tool.schema.shape.booleanWithDefault.description).toBe('Boolean with default'); + }); + + it('should handle nested parameters correctly', () => { + mockNodeParameters = { + topLevel: "={{ $fromAI('topLevel', 'Top level parameter', 'string') }}", + nested: { + level1: "={{ $fromAI('level1', 'Nested level 1', 'string') }}", + deeperNested: { + level2: "={{ $fromAI('level2', 'Nested level 2', 'number') }}", + }, + }, + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.topLevel).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.level1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.level2).toBeInstanceOf(z.ZodNumber); + }); + + it('should handle array parameters correctly', () => { + mockNodeParameters = { + arrayParam: [ + "={{ $fromAI('item1', 'First item', 'string') }}", + "={{ $fromAI('item2', 'Second item', 'number') }}", + ], + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.item1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.item2).toBeInstanceOf(z.ZodNumber); + }); }); - it('should create a schema based on placeholder values in nodeParameters', () => { - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + describe('Error Handling and Edge Cases', () => { + it('should handle error during node execution', async () => { + mockNode.execute = jest.fn().mockRejectedValue(new Error('Execution failed')); + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; - expect(tool.schema).toBeDefined(); - expect(tool.schema.shape).toHaveProperty('param1'); - expect(tool.schema.shape).toHaveProperty('nestedParam__subParam'); - expect(tool.schema.shape).not.toHaveProperty('param2'); + const result = await tool.func({ param1: 'test value' }); + + expect(result).toContain('Error during node execution:'); + expect(mockCtx.addOutputData).toHaveBeenCalledWith( + NodeConnectionType.AiTool, + 0, + expect.any(NodeOperationError), + ); + }); + + it('should throw an error for invalid parameter names', () => { + mockNodeParameters.invalidParam = "$fromAI('invalid param', 'Invalid parameter', 'string')"; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + 'Parameter key `invalid param` is invalid', + ); + }); + + it('should throw an error for $fromAI calls with unsupported types', () => { + mockNodeParameters = { + invalidTypeParam: + "={{ $fromAI('invalidType', 'Param with unsupported type', 'unsupportedType') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + 'Invalid type: unsupportedType', + ); + }); + + it('should handle empty parameters and parameters with no fromAI calls', () => { + mockNodeParameters = { + param1: 'static value 1', + param2: 'static value 2', + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape).toEqual({}); + }); }); - it('should handle nested parameters correctly', () => { - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + describe('Parameter Name and Description Handling', () => { + it('should accept parameter names with underscores and hyphens', () => { + mockNodeParameters = { + validName1: + "={{ $fromAI('param_name-1', 'Valid name with underscore and hyphen', 'string') }}", + validName2: "={{ $fromAI('param_name_2', 'Another valid name', 'number') }}", + }; - expect(tool.schema.shape.nestedParam__subParam).toBeInstanceOf(z.ZodString); + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape['param_name-1']).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape['param_name-1'].description).toBe( + 'Valid name with underscore and hyphen', + ); + + expect(tool.schema.shape.param_name_2).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.param_name_2.description).toBe('Another valid name'); + }); + + it('should throw an error for parameter names with invalid special characters', () => { + mockNodeParameters = { + invalidNameParam: + "={{ $fromAI('param@name!', 'Invalid name with special characters', 'string') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + 'Parameter key `param@name!` is invalid', + ); + }); + + it('should throw an error for empty parameter name', () => { + mockNodeParameters = { + invalidNameParam: "={{ $fromAI('', 'Invalid name with special characters', 'string') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + 'You must specify a key when using $fromAI()', + ); + }); + + it('should handle parameter names with exact and exceeding character limits', () => { + const longName = 'a'.repeat(64); + const tooLongName = 'a'.repeat(65); + mockNodeParameters = { + longNameParam: `={{ $fromAI('${longName}', 'Param with 64 character name', 'string') }}`, + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape[longName]).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape[longName].description).toBe('Param with 64 character name'); + + expect(() => + createNodeAsTool(mockCtx, mockNode, { + tooLongNameParam: `={{ $fromAI('${tooLongName}', 'Param with 65 character name', 'string') }}`, + }), + ).toThrow(`Parameter key \`${tooLongName}\` is invalid`); + }); + + it('should handle $fromAI calls with empty description', () => { + mockNodeParameters = { + emptyDescriptionParam: "={{ $fromAI('emptyDescription', '', 'number') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.emptyDescription).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.emptyDescription.description).toBeUndefined(); + }); + + it('should throw an error for calls with the same parameter but different descriptions', () => { + mockNodeParameters = { + duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}", + duplicateParam2: "={{ $fromAI('duplicate', 'Second duplicate', 'number') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + "Duplicate key 'duplicate' found with different description or type", + ); + }); + it('should throw an error for calls with the same parameter but different types', () => { + mockNodeParameters = { + duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}", + duplicateParam2: "={{ $fromAI('duplicate', 'First duplicate', 'number') }}", + }; + + expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + "Duplicate key 'duplicate' found with different description or type", + ); + }); }); - it('should create a function that wraps the node execution', async () => { - const tool = createNodeAsTool(mockNode, mockCtx, mockNodeParameters); + describe('Complex Parsing Scenarios', () => { + it('should correctly parse $fromAI calls with varying spaces, capitalization, and within template literals', () => { + mockNodeParameters = { + varyingSpacing1: "={{$fromAI('param1','Description1','string')}}", + varyingSpacing2: "={{ $fromAI ( 'param2' , 'Description2' , 'number' ) }}", + varyingSpacing3: "={{ $FROMai('param3', 'Description3', 'boolean') }}", + wrongCapitalization: "={{$fromai('param4','Description4','number')}}", + templateLiteralParam: + // eslint-disable-next-line n8n-local-rules/no-interpolation-in-regular-string + "={{ `Value is: ${$fromAI('templatedParam', 'Templated param description', 'string')}` }}", + }; - const result = await tool.func({ param1: 'test value', nestedParam__subParam: 'nested value' }); + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; - expect(mockCtx.addInputData).toHaveBeenCalledWith(NodeConnectionType.AiTool, [ - [{ json: { param1: 'test value', nestedParam__subParam: 'nested value' } }], - ]); - expect(mockNode.execute).toHaveBeenCalled(); - expect(mockCtx.addOutputData).toHaveBeenCalledWith(NodeConnectionType.AiTool, 0, [ - [{ json: { response: [{ result: 'test' }] } }], - ]); - expect(result).toBe(JSON.stringify([{ result: 'test' }])); + expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.param1.description).toBe('Description1'); + + expect(tool.schema.shape.param2).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.param2.description).toBe('Description2'); + + expect(tool.schema.shape.param3).toBeInstanceOf(z.ZodBoolean); + expect(tool.schema.shape.param3.description).toBe('Description3'); + + expect(tool.schema.shape.param4).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.param4.description).toBe('Description4'); + + expect(tool.schema.shape.templatedParam).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.templatedParam.description).toBe('Templated param description'); + }); + + it('should correctly parse multiple $fromAI calls interleaved with regular text', () => { + mockNodeParameters = { + interleavedParams: + "={{ 'Start ' + $fromAI('param1', 'First param', 'string') + ' Middle ' + $fromAI('param2', 'Second param', 'number') + ' End' }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.param1.description).toBe('First param'); + + expect(tool.schema.shape.param2).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.param2.description).toBe('Second param'); + }); + + it('should correctly parse $fromAI calls with complex JSON default values', () => { + mockNodeParameters = { + complexJsonDefault: + '={{ $fromAI(\'complexJson\', \'Param with complex JSON default\', \'json\', \'{"nested": {"key": "value"}, "array": [1, 2, 3]}\') }}', + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.complexJson._def.innerType).toBeInstanceOf(z.ZodRecord); + expect(tool.schema.shape.complexJson.description).toBe('Param with complex JSON default'); + expect(tool.schema.shape.complexJson._def.defaultValue()).toEqual({ + nested: { key: 'value' }, + array: [1, 2, 3], + }); + }); + + it('should ignore $fromAI calls embedded in non-string node parameters', () => { + mockNodeParameters = { + numberParam: 42, + booleanParam: false, + objectParam: { + innerString: "={{ $fromAI('innerParam', 'Inner param', 'string') }}", + innerNumber: 100, + innerObject: { + deepParam: "={{ $fromAI('deepParam', 'Deep param', 'number') }}", + }, + }, + arrayParam: [ + "={{ $fromAI('arrayParam1', 'First array param', 'string') }}", + 200, + "={{ $fromAI('nestedArrayParam', 'Nested array param', 'boolean') }}", + ], + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.innerParam).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.innerParam.description).toBe('Inner param'); + + expect(tool.schema.shape.deepParam).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.deepParam.description).toBe('Deep param'); + + expect(tool.schema.shape.arrayParam1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.arrayParam1.description).toBe('First array param'); + + expect(tool.schema.shape.nestedArrayParam).toBeInstanceOf(z.ZodBoolean); + expect(tool.schema.shape.nestedArrayParam.description).toBe('Nested array param'); + }); + }); + + describe('Escaping and Special Characters', () => { + it('should handle escaped single quotes in parameter names and descriptions', () => { + mockNodeParameters = { + escapedQuotesParam: + "={{ $fromAI('paramName', 'Description with \\'escaped\\' quotes', 'string') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.paramName.description).toBe("Description with 'escaped' quotes"); + }); + + it('should handle escaped double quotes in parameter names and descriptions', () => { + mockNodeParameters = { + escapedQuotesParam: + '={{ $fromAI("paramName", "Description with \\"escaped\\" quotes", "string") }}', + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.paramName.description).toBe('Description with "escaped" quotes'); + }); + + it('should handle escaped backslashes in parameter names and descriptions', () => { + mockNodeParameters = { + escapedBackslashesParam: + "={{ $fromAI('paramName', 'Description with \\\\ backslashes', 'string') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.paramName.description).toBe('Description with \\ backslashes'); + }); + + it('should handle mixed escaped characters in parameter names and descriptions', () => { + mockNodeParameters = { + mixedEscapesParam: + '={{ $fromAI(`paramName`, \'Description with \\\'mixed" characters\', "number") }}', + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodNumber); + expect(tool.schema.shape.paramName.description).toBe('Description with \'mixed" characters'); + }); + }); + + describe('Edge Cases and Limitations', () => { + it('should ignore excess arguments in $fromAI calls beyond the fourth argument', () => { + mockNodeParameters = { + excessArgsParam: + "={{ $fromAI('excessArgs', 'Param with excess arguments', 'string', 'default', 'extraArg1', 'extraArg2') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.excessArgs._def.innerType).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.excessArgs.description).toBe('Param with excess arguments'); + expect(tool.schema.shape.excessArgs._def.defaultValue()).toBe('default'); + }); + + it('should correctly parse $fromAI calls with nested parentheses', () => { + mockNodeParameters = { + nestedParenthesesParam: + "={{ $fromAI('paramWithNested', 'Description with ((nested)) parentheses', 'string') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.paramWithNested).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.paramWithNested.description).toBe( + 'Description with ((nested)) parentheses', + ); + }); + + it('should handle $fromAI calls with very long descriptions', () => { + const longDescription = 'A'.repeat(1000); + mockNodeParameters = { + longParam: `={{ $fromAI('longParam', '${longDescription}', 'string') }}`, + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.longParam).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.longParam.description).toBe(longDescription); + }); + + it('should handle $fromAI calls with only some parameters', () => { + mockNodeParameters = { + partialParam1: "={{ $fromAI('partial1') }}", + partialParam2: "={{ $fromAI('partial2', 'Description only') }}", + partialParam3: "={{ $fromAI('partial3', '', 'number') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.partial1).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.partial2).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.partial3).toBeInstanceOf(z.ZodNumber); + }); + }); + + describe('Unicode and Internationalization', () => { + it('should handle $fromAI calls with unicode characters', () => { + mockNodeParameters = { + unicodeParam: "={{ $fromAI('unicodeParam', '🌈 Unicode parameter 你好', 'string') }}", + }; + + const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + + expect(tool.schema.shape.unicodeParam).toBeInstanceOf(z.ZodString); + expect(tool.schema.shape.unicodeParam.description).toBe('🌈 Unicode parameter 你好'); + }); }); }); diff --git a/packages/core/test/Credentials.test.ts b/packages/core/test/Credentials.test.ts index ada86a07b0..fa7be59267 100644 --- a/packages/core/test/Credentials.test.ts +++ b/packages/core/test/Credentials.test.ts @@ -1,6 +1,7 @@ -import { Container } from 'typedi'; import { mock } from 'jest-mock-extended'; import type { CredentialInformation } from 'n8n-workflow'; +import { Container } from 'typedi'; + import { Cipher } from '@/Cipher'; import { Credentials } from '@/Credentials'; import type { InstanceSettings } from '@/InstanceSettings'; diff --git a/packages/core/test/FileSystem.manager.test.ts b/packages/core/test/FileSystem.manager.test.ts index 7087242726..581974c0e9 100644 --- a/packages/core/test/FileSystem.manager.test.ts +++ b/packages/core/test/FileSystem.manager.test.ts @@ -1,9 +1,11 @@ -import path from 'node:path'; import fs from 'node:fs'; import fsp from 'node:fs/promises'; import { tmpdir } from 'node:os'; +import path from 'node:path'; + import { FileSystemManager } from '@/BinaryData/FileSystem.manager'; import { isStream } from '@/ObjectStore/utils'; + import { toFileId, toStream } from './utils'; jest.mock('fs'); diff --git a/packages/core/test/InstanceSettings.test.ts b/packages/core/test/InstanceSettings.test.ts index 414f875274..64b6840f2f 100644 --- a/packages/core/test/InstanceSettings.test.ts +++ b/packages/core/test/InstanceSettings.test.ts @@ -1,4 +1,5 @@ import fs from 'fs'; + import { InstanceSettings } from '@/InstanceSettings'; describe('InstanceSettings', () => { diff --git a/packages/core/test/NodeExecuteFunctions.test.ts b/packages/core/test/NodeExecuteFunctions.test.ts index 3af9c752f6..421b7cd247 100644 --- a/packages/core/test/NodeExecuteFunctions.test.ts +++ b/packages/core/test/NodeExecuteFunctions.test.ts @@ -1,20 +1,9 @@ -import type { SecureContextOptions } from 'tls'; -import { - cleanupParameterData, - copyInputItems, - ensureType, - getBinaryDataBuffer, - isFilePathBlocked, - parseIncomingMessage, - parseRequestObject, - proxyRequestToAxios, - removeEmptyBody, - setBinaryDataBuffer, -} from '@/NodeExecuteFunctions'; -import { DateTime } from 'luxon'; import { mkdtempSync, readFileSync } from 'fs'; import type { IncomingMessage } from 'http'; +import type { Agent } from 'https'; import { mock } from 'jest-mock-extended'; +import toPlainObject from 'lodash/toPlainObject'; +import { DateTime } from 'luxon'; import type { IBinaryData, IHttpRequestMethods, @@ -28,14 +17,26 @@ import type { WorkflowHooks, } from 'n8n-workflow'; import { ExpressionError } from 'n8n-workflow'; -import { BinaryDataService } from '@/BinaryData/BinaryData.service'; import nock from 'nock'; import { tmpdir } from 'os'; import { join } from 'path'; +import type { SecureContextOptions } from 'tls'; import Container from 'typedi'; -import type { Agent } from 'https'; -import toPlainObject from 'lodash/toPlainObject'; + +import { BinaryDataService } from '@/BinaryData/BinaryData.service'; import { InstanceSettings } from '@/InstanceSettings'; +import { + cleanupParameterData, + copyInputItems, + ensureType, + getBinaryDataBuffer, + isFilePathBlocked, + parseIncomingMessage, + parseRequestObject, + proxyRequestToAxios, + removeEmptyBody, + setBinaryDataBuffer, +} from '@/NodeExecuteFunctions'; const temporaryDir = mkdtempSync(join(tmpdir(), 'n8n')); diff --git a/packages/core/test/ObjectStore.manager.test.ts b/packages/core/test/ObjectStore.manager.test.ts index abc1f24c3a..f01e170213 100644 --- a/packages/core/test/ObjectStore.manager.test.ts +++ b/packages/core/test/ObjectStore.manager.test.ts @@ -1,9 +1,11 @@ -import fs from 'node:fs/promises'; import { mock } from 'jest-mock-extended'; +import fs from 'node:fs/promises'; + import { ObjectStoreManager } from '@/BinaryData/ObjectStore.manager'; import { ObjectStoreService } from '@/ObjectStore/ObjectStore.service.ee'; -import { isStream } from '@/ObjectStore/utils'; import type { MetadataResponseHeaders } from '@/ObjectStore/types'; +import { isStream } from '@/ObjectStore/utils'; + import { mockInstance, toFileId, toStream } from './utils'; jest.mock('fs/promises'); diff --git a/packages/core/test/ObjectStore.service.test.ts b/packages/core/test/ObjectStore.service.test.ts index d39f08e1e2..77936c20f0 100644 --- a/packages/core/test/ObjectStore.service.test.ts +++ b/packages/core/test/ObjectStore.service.test.ts @@ -1,6 +1,7 @@ import axios from 'axios'; -import { ObjectStoreService } from '@/ObjectStore/ObjectStore.service.ee'; import { Readable } from 'stream'; + +import { ObjectStoreService } from '@/ObjectStore/ObjectStore.service.ee'; import { writeBlockedMessage } from '@/ObjectStore/utils'; jest.mock('axios'); diff --git a/packages/core/test/SSHClientsManager.test.ts b/packages/core/test/SSHClientsManager.test.ts index a7ceabe9f9..132a54baef 100644 --- a/packages/core/test/SSHClientsManager.test.ts +++ b/packages/core/test/SSHClientsManager.test.ts @@ -1,5 +1,6 @@ -import { Client } from 'ssh2'; import type { SSHCredentials } from 'n8n-workflow'; +import { Client } from 'ssh2'; + import { SSHClientsManager } from '@/SSHClientsManager'; describe('SSHClientsManager', () => { diff --git a/packages/core/test/ScheduledTaskManager.test.ts b/packages/core/test/ScheduledTaskManager.test.ts index 15d5f7d487..3ff8837ca9 100644 --- a/packages/core/test/ScheduledTaskManager.test.ts +++ b/packages/core/test/ScheduledTaskManager.test.ts @@ -1,5 +1,5 @@ -import type { Workflow } from 'n8n-workflow'; import { mock } from 'jest-mock-extended'; +import type { Workflow } from 'n8n-workflow'; import type { InstanceSettings } from '@/InstanceSettings'; import { ScheduledTaskManager } from '@/ScheduledTaskManager'; diff --git a/packages/core/test/Validation.test.ts b/packages/core/test/Validation.test.ts index a19422a090..04ad3c134e 100644 --- a/packages/core/test/Validation.test.ts +++ b/packages/core/test/Validation.test.ts @@ -1,4 +1,5 @@ import type { IDataObject, INode, INodeType } from 'n8n-workflow'; + import { validateValueAgainstSchema } from '@/NodeExecuteFunctions'; describe('Validation', () => { diff --git a/packages/core/test/WorkflowExecute.test.ts b/packages/core/test/WorkflowExecute.test.ts index d14a4e3fd1..6d1927fb88 100644 --- a/packages/core/test/WorkflowExecute.test.ts +++ b/packages/core/test/WorkflowExecute.test.ts @@ -5,6 +5,7 @@ import { NodeExecutionOutput, Workflow, } from 'n8n-workflow'; + import { WorkflowExecute } from '@/WorkflowExecute'; import * as Helpers from './helpers'; diff --git a/packages/core/test/WorkflowExecutionMetadata.test.ts b/packages/core/test/WorkflowExecutionMetadata.test.ts index cdb50c9737..63d0892e6a 100644 --- a/packages/core/test/WorkflowExecutionMetadata.test.ts +++ b/packages/core/test/WorkflowExecutionMetadata.test.ts @@ -1,3 +1,6 @@ +import type { IRunExecutionData } from 'n8n-workflow'; + +import { InvalidExecutionMetadataError } from '@/errors/invalid-execution-metadata.error'; import { setWorkflowExecutionMetadata, setAllWorkflowExecutionMetadata, @@ -5,8 +8,6 @@ import { getWorkflowExecutionMetadata, getAllWorkflowExecutionMetadata, } from '@/ExecutionMetadata'; -import { InvalidExecutionMetadataError } from '@/errors/invalid-execution-metadata.error'; -import type { IRunExecutionData } from 'n8n-workflow'; describe('Execution Metadata functions', () => { test('setWorkflowExecutionMetadata will set a value', () => { diff --git a/packages/core/test/helpers/constants.ts b/packages/core/test/helpers/constants.ts index 70819478ad..f3de1c667c 100644 --- a/packages/core/test/helpers/constants.ts +++ b/packages/core/test/helpers/constants.ts @@ -5,6 +5,7 @@ import type { WorkflowTestData, } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; + import { If } from '../../../nodes-base/dist/nodes/If/If.node'; import { Merge } from '../../../nodes-base/dist/nodes/Merge/Merge.node'; import { NoOp } from '../../../nodes-base/dist/nodes/NoOp/NoOp.node'; diff --git a/packages/core/test/helpers/index.ts b/packages/core/test/helpers/index.ts index 5f935ef850..5f0858ea41 100644 --- a/packages/core/test/helpers/index.ts +++ b/packages/core/test/helpers/index.ts @@ -1,8 +1,5 @@ -import path from 'path'; import { readdirSync, readFileSync } from 'fs'; - -const BASE_DIR = path.resolve(__dirname, '../../..'); - +import { mock } from 'jest-mock-extended'; import type { IDataObject, IDeferredPromise, @@ -17,11 +14,12 @@ import type { WorkflowTestData, INodeTypeData, } from 'n8n-workflow'; - import { ApplicationError, NodeHelpers, WorkflowHooks } from 'n8n-workflow'; +import path from 'path'; import { predefinedNodesTypes } from './constants'; -import { mock } from 'jest-mock-extended'; + +const BASE_DIR = path.resolve(__dirname, '../../..'); class NodeTypesClass implements INodeTypes { constructor(private nodeTypes: INodeTypeData = predefinedNodesTypes) {} diff --git a/packages/core/test/utils.ts b/packages/core/test/utils.ts index 8895875240..7f4862cabd 100644 --- a/packages/core/test/utils.ts +++ b/packages/core/test/utils.ts @@ -1,8 +1,8 @@ -import { Container } from 'typedi'; import { mock } from 'jest-mock-extended'; import { Duplex } from 'stream'; - import type { DeepPartial } from 'ts-essentials'; +import { Container } from 'typedi'; + import type { Class } from '@/Interfaces'; export const mockInstance = ( diff --git a/packages/design-system/package.json b/packages/design-system/package.json index 526a9e43c4..42f2f75111 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "1.51.0", + "version": "1.52.1", "main": "src/main.ts", "import": "src/main.ts", "scripts": { diff --git a/packages/design-system/src/__tests__/render.ts b/packages/design-system/src/__tests__/render.ts new file mode 100644 index 0000000000..afe27e6855 --- /dev/null +++ b/packages/design-system/src/__tests__/render.ts @@ -0,0 +1,20 @@ +import { render } from '@testing-library/vue'; + +import { N8nPlugin } from 'n8n-design-system/plugin'; + +type Component = Parameters[0]; +type RenderOptions = Parameters[1]; + +export const createComponentRenderer = (component: Component) => (options: RenderOptions) => { + const mergedOptions: RenderOptions = { + ...options, + global: { + ...(options?.global ?? {}), + stubs: { + ...(options?.global?.stubs ?? {}), + }, + plugins: [N8nPlugin, ...(options?.global?.plugins ?? [])], + }, + }; + return render(component, mergedOptions); +}; diff --git a/packages/design-system/src/__tests__/setup.ts b/packages/design-system/src/__tests__/setup.ts index 3ad85f014a..6eb1c426fc 100644 --- a/packages/design-system/src/__tests__/setup.ts +++ b/packages/design-system/src/__tests__/setup.ts @@ -4,3 +4,11 @@ import { config } from '@vue/test-utils'; import { N8nPlugin } from 'n8n-design-system/plugin'; config.global.plugins = [N8nPlugin]; + +window.ResizeObserver = + window.ResizeObserver || + vi.fn().mockImplementation(() => ({ + disconnect: vi.fn(), + observe: vi.fn(), + unobserve: vi.fn(), + })); diff --git a/packages/design-system/src/components/N8nFormBox/__tests__/FormBox.test.ts b/packages/design-system/src/components/N8nFormBox/__tests__/FormBox.test.ts new file mode 100644 index 0000000000..a309e1aa40 --- /dev/null +++ b/packages/design-system/src/components/N8nFormBox/__tests__/FormBox.test.ts @@ -0,0 +1,57 @@ +import { createComponentRenderer } from '../../../__tests__/render'; +import FormBox from '../FormBox.vue'; + +const render = createComponentRenderer(FormBox); + +describe('FormBox', () => { + it('should render the component', () => { + const { container } = render({ + props: { + title: 'Title', + inputs: [ + { + name: 'name', + properties: { + label: 'Name', + type: 'text', + required: true, + showRequiredAsterisk: true, + validateOnBlur: false, + autocomplete: 'email', + capitalize: true, + labelSize: 'small', + tagSize: 'small', + }, + }, + { + name: 'email', + properties: { + label: 'Email', + type: 'email', + required: true, + showRequiredAsterisk: true, + validateOnBlur: false, + autocomplete: 'email', + capitalize: true, + labelSize: 'medium', + tagSize: 'medium', + }, + }, + { + name: 'password', + properties: { + label: 'Password', + type: 'password', + required: true, + showRequiredAsterisk: true, + validateOnBlur: false, + autocomplete: 'current-password', + capitalize: true, + }, + }, + ], + }, + }); + expect(container).toMatchSnapshot(); + }); +}); diff --git a/packages/design-system/src/components/N8nFormBox/__tests__/__snapshots__/FormBox.test.ts.snap b/packages/design-system/src/components/N8nFormBox/__tests__/__snapshots__/FormBox.test.ts.snap new file mode 100644 index 0000000000..8138b44b8c --- /dev/null +++ b/packages/design-system/src/components/N8nFormBox/__tests__/__snapshots__/FormBox.test.ts.snap @@ -0,0 +1,259 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`FormBox > should render the component 1`] = ` +
+
+
+ + + Title + + +
+
+
+ +
+ +
+
+ + +
+
+ + + + +
+ + + + + +
+ + + +
+
+ + +
+
+
+
+ + +
+
+ + + + +
+ + + + + +
+ + + +
+
+ + +
+
+
+
+ + +
+
+ + + + +
+ + + + + +
+ + + +
+
+ + +
+
+ +
+ +
+
+ +
+ +
+ + +
+
+`; diff --git a/packages/design-system/src/components/N8nFormInput/FormInput.vue b/packages/design-system/src/components/N8nFormInput/FormInput.vue index b1fbd4b6e3..b50868a303 100644 --- a/packages/design-system/src/components/N8nFormInput/FormInput.vue +++ b/packages/design-system/src/components/N8nFormInput/FormInput.vue @@ -49,7 +49,7 @@ export interface Props { inactiveLabel?: string; inactiveColor?: string; teleported?: boolean; - tagSize?: 'small' | 'medium'; + tagSize?: 'small' | 'medium' | 'large'; } const props = withDefaults(defineProps(), { @@ -59,7 +59,7 @@ const props = withDefaults(defineProps(), { showRequiredAsterisk: true, validateOnBlur: true, teleported: true, - tagSize: 'small', + tagSize: 'large', }); const emit = defineEmits<{ diff --git a/packages/design-system/src/components/N8nFormInputs/FormInputs.vue b/packages/design-system/src/components/N8nFormInputs/FormInputs.vue index 8a27b28137..261663de32 100644 --- a/packages/design-system/src/components/N8nFormInputs/FormInputs.vue +++ b/packages/design-system/src/components/N8nFormInputs/FormInputs.vue @@ -13,7 +13,6 @@ export type FormInputsProps = { columnView?: boolean; verticalSpacing?: '' | 'xs' | 's' | 'm' | 'l' | 'xl'; teleported?: boolean; - tagSize?: 'small' | 'medium'; }; type Value = string | number | boolean | null | undefined; @@ -24,7 +23,6 @@ const props = withDefaults(defineProps(), { columnView: false, verticalSpacing: '', teleported: true, - tagSize: 'small', }); const emit = defineEmits<{ @@ -129,7 +127,6 @@ onMounted(() => { :data-test-id="input.name" :show-validation-warnings="showValidationWarnings" :teleported="teleported" - :tag-size="tagSize" @update:model-value="(value: Value) => onUpdateModelValue(input.name, value)" @validate="(value: boolean) => onValidate(input.name, value)" @enter="onSubmit" diff --git a/packages/design-system/src/components/N8nInput/Input.vue b/packages/design-system/src/components/N8nInput/Input.vue index 39cda6ce30..776b0bf7c8 100644 --- a/packages/design-system/src/components/N8nInput/Input.vue +++ b/packages/design-system/src/components/N8nInput/Input.vue @@ -39,7 +39,7 @@ const props = withDefaults(defineProps(), { }); const resolvedSize = computed( - () => (props.size === 'xlarge' ? undefined : props.size) as ElementPlusSizePropType, + () => (props.size === 'medium' ? 'default' : props.size) as ElementPlusSizePropType, ); const classes = computed(() => { diff --git a/packages/design-system/src/components/N8nNavigationDropdown/NavigationDropdown.stories.ts b/packages/design-system/src/components/N8nNavigationDropdown/NavigationDropdown.stories.ts new file mode 100644 index 0000000000..6951cfc365 --- /dev/null +++ b/packages/design-system/src/components/N8nNavigationDropdown/NavigationDropdown.stories.ts @@ -0,0 +1,63 @@ +import { action } from '@storybook/addon-actions'; +import type { StoryFn } from '@storybook/vue3'; + +import NavigationDropdown from './NavigationDropdown.vue'; + +export default { + title: 'Atoms/NavigationDropdown', + component: NavigationDropdown, + argTypes: {}, +}; + +const methods = { + onSelect: action('select'), +}; + +const template: StoryFn = (args, { argTypes }) => ({ + setup: () => ({ args }), + props: Object.keys(argTypes), + components: { + NavigationDropdown, + }, + template: ` +
+ + + +
+ `, + methods, +}); + +const menuItems = [ + { + id: 'credentials', + title: 'Credentials', + submenu: [ + { + id: 'credentials-0', + title: 'Create', + disabled: true, + }, + { + id: 'credentials-1', + title: 'Credentials - 1', + icon: 'user', + }, + { + id: 'credentials-2', + title: 'Credentials - 2', + icon: 'user', + }, + ], + }, + { + id: 'variables', + title: 'Variables', + }, +]; + +export const primary = template.bind({}); +primary.args = { + menu: menuItems, +}; diff --git a/packages/design-system/src/components/N8nNavigationDropdown/NavigationDropdown.vue b/packages/design-system/src/components/N8nNavigationDropdown/NavigationDropdown.vue new file mode 100644 index 0000000000..7657d73211 --- /dev/null +++ b/packages/design-system/src/components/N8nNavigationDropdown/NavigationDropdown.vue @@ -0,0 +1,127 @@ + + + + + diff --git a/packages/design-system/src/components/N8nNavigationDropdown/__tests__/NavigationDropdown.spec.ts b/packages/design-system/src/components/N8nNavigationDropdown/__tests__/NavigationDropdown.spec.ts new file mode 100644 index 0000000000..465f1f0c2b --- /dev/null +++ b/packages/design-system/src/components/N8nNavigationDropdown/__tests__/NavigationDropdown.spec.ts @@ -0,0 +1,120 @@ +import userEvent from '@testing-library/user-event'; +import { configure, render, waitFor } from '@testing-library/vue'; +import { h } from 'vue'; +import { createRouter, createWebHistory } from 'vue-router'; + +import NavigationDropdown from '../NavigationDropdown.vue'; + +configure({ testIdAttribute: 'data-test-id' }); + +const router = createRouter({ + history: createWebHistory(), + routes: [ + { + path: '/', + name: 'home', + redirect: '/home', + }, + { + path: '/projects', + name: 'projects', + component: { template: '

projects

' }, + }, + ], +}); + +describe('N8nNavigationDropdown', () => { + beforeAll(async () => { + await router.push('/'); + + await router.isReady(); + }); + it('default slot should trigger first level', async () => { + const { getByTestId, queryByTestId } = render(NavigationDropdown, { + slots: { default: h('button', { ['data-test-id']: 'test-trigger' }) }, + props: { menu: [{ id: 'aaa', title: 'aaa', route: { name: 'projects' } }] }, + global: { + plugins: [router], + }, + }); + expect(getByTestId('test-trigger')).toBeVisible(); + expect(queryByTestId('navigation-menu-item')).not.toBeVisible(); + + await userEvent.hover(getByTestId('test-trigger')); + await waitFor(() => expect(queryByTestId('navigation-menu-item')).toBeVisible()); + }); + + it('redirect to route', async () => { + const { getByTestId, queryByTestId } = render(NavigationDropdown, { + slots: { default: h('button', { ['data-test-id']: 'test-trigger' }) }, + props: { + menu: [ + { + id: 'aaa', + title: 'aaa', + submenu: [{ id: 'bbb', title: 'bbb', route: { name: 'projects' } }], + }, + ], + }, + global: { + plugins: [router], + }, + }); + + expect(getByTestId('test-trigger')).toBeVisible(); + expect(queryByTestId('navigation-submenu')).not.toBeVisible(); + + await userEvent.hover(getByTestId('test-trigger')); + + await waitFor(() => expect(getByTestId('navigation-submenu')).toBeVisible()); + + await userEvent.click(getByTestId('navigation-submenu-item')); + + expect(router.currentRoute.value.name).toBe('projects'); + }); + + it('should render icons in submenu when provided', () => { + const { getByTestId } = render(NavigationDropdown, { + slots: { default: h('button', { ['data-test-id']: 'test-trigger' }) }, + props: { + menu: [ + { + id: 'aaa', + title: 'aaa', + submenu: [{ id: 'bbb', title: 'bbb', route: { name: 'projects' }, icon: 'user' }], + }, + ], + }, + global: { + plugins: [router], + }, + }); + + expect(getByTestId('navigation-submenu-item').querySelector('.submenu__icon')).toBeTruthy(); + }); + + it('should propagate events', async () => { + const { getByTestId, emitted } = render(NavigationDropdown, { + slots: { default: h('button', { ['data-test-id']: 'test-trigger' }) }, + props: { + menu: [ + { + id: 'aaa', + title: 'aaa', + submenu: [{ id: 'bbb', title: 'bbb', route: { name: 'projects' }, icon: 'user' }], + }, + ], + }, + global: { + plugins: [router], + }, + }); + + await userEvent.click(getByTestId('navigation-submenu-item')); + + expect(emitted('itemClick')).toStrictEqual([ + [{ active: true, index: 'bbb', indexPath: ['-1', 'aaa', 'bbb'] }], + ]); + expect(emitted('select')).toStrictEqual([['bbb']]); + }); +}); diff --git a/packages/design-system/src/components/N8nNavigationDropdown/index.ts b/packages/design-system/src/components/N8nNavigationDropdown/index.ts new file mode 100644 index 0000000000..210f5d57df --- /dev/null +++ b/packages/design-system/src/components/N8nNavigationDropdown/index.ts @@ -0,0 +1,3 @@ +import N8nNavigationDropdown from './NavigationDropdown.vue'; + +export default N8nNavigationDropdown; diff --git a/packages/design-system/src/components/index.ts b/packages/design-system/src/components/index.ts index d4f44016f7..2d06e5b8c6 100644 --- a/packages/design-system/src/components/index.ts +++ b/packages/design-system/src/components/index.ts @@ -28,6 +28,7 @@ export { default as N8nLoading } from './N8nLoading'; export { default as N8nMarkdown } from './N8nMarkdown'; export { default as N8nMenu } from './N8nMenu'; export { default as N8nMenuItem } from './N8nMenuItem'; +export { default as N8nNavigationDropdown } from './N8nNavigationDropdown'; export { default as N8nNodeCreatorNode } from './N8nNodeCreatorNode'; export { default as N8nNodeIcon } from './N8nNodeIcon'; export { default as N8nNotice } from './N8nNotice'; diff --git a/packages/design-system/src/css/_tokens.dark.scss b/packages/design-system/src/css/_tokens.dark.scss index 8377f524c6..72963efcf5 100644 --- a/packages/design-system/src/css/_tokens.dark.scss +++ b/packages/design-system/src/css/_tokens.dark.scss @@ -212,6 +212,7 @@ --execution-selector-background: var(--prim-gray-740); --execution-selector-text: var(--color-text-base); --execution-select-all-text: var(--color-text-base); + --execution-card-text-waiting: var(--prim-color-secondary-tint-100); // NDV --color-run-data-background: var(--prim-gray-800); diff --git a/packages/design-system/src/css/_tokens.scss b/packages/design-system/src/css/_tokens.scss index 1690926232..56d5142c87 100644 --- a/packages/design-system/src/css/_tokens.scss +++ b/packages/design-system/src/css/_tokens.scss @@ -273,6 +273,7 @@ --execution-card-border-running: var(--prim-color-alt-b-tint-250); --execution-card-border-unknown: var(--prim-gray-120); --execution-card-background-hover: var(--color-foreground-light); + --execution-card-text-waiting: var(--color-secondary); --execution-selector-background: var(--color-background-dark); --execution-selector-text: var(--color-text-xlight); --execution-select-all-text: var(--color-danger); diff --git a/packages/design-system/src/directives/n8n-truncate.test.ts b/packages/design-system/src/directives/n8n-truncate.test.ts index 89cd771283..309cab4563 100644 --- a/packages/design-system/src/directives/n8n-truncate.test.ts +++ b/packages/design-system/src/directives/n8n-truncate.test.ts @@ -24,7 +24,7 @@ describe('Directive n8n-truncate', () => { }, }, ); - expect(html()).toBe('
This is a very long text that...
'); + expect(html()).toBe('
This is a very long text that ...
'); }); it('should truncate text to 30 chars in case of wrong argument', async () => { @@ -48,7 +48,7 @@ describe('Directive n8n-truncate', () => { }, }, ); - expect(html()).toBe('
This is a very long text that...
'); + expect(html()).toBe('
This is a very long text that ...
'); }); it('should truncate text to given length', async () => { @@ -72,6 +72,6 @@ describe('Directive n8n-truncate', () => { }, }, ); - expect(html()).toBe('
This is a very long text...
'); + expect(html()).toBe('
This is a very long text ...
'); }); }); diff --git a/packages/design-system/src/types/form.ts b/packages/design-system/src/types/form.ts index 45a054a012..bffaf67c81 100644 --- a/packages/design-system/src/types/form.ts +++ b/packages/design-system/src/types/form.ts @@ -56,6 +56,7 @@ export type IFormInput = { focusInitially?: boolean; disabled?: boolean; labelSize?: 'small' | 'medium' | 'large'; + tagSize?: 'small' | 'medium' | 'large'; labelAlignment?: 'left' | 'right' | 'center'; tooltipText?: string; }; diff --git a/packages/design-system/src/utils/index.ts b/packages/design-system/src/utils/index.ts index be6ddc6375..d029baaa26 100644 --- a/packages/design-system/src/utils/index.ts +++ b/packages/design-system/src/utils/index.ts @@ -5,3 +5,4 @@ export * from './typeguards'; export * from './uid'; export * from './valueByPath'; export * from './testUtils'; +export * from './string'; diff --git a/packages/design-system/src/utils/labelUtil.spec.ts b/packages/design-system/src/utils/labelUtil.spec.ts index 51289d1736..99f961d485 100644 --- a/packages/design-system/src/utils/labelUtil.spec.ts +++ b/packages/design-system/src/utils/labelUtil.spec.ts @@ -1,3 +1,5 @@ +import type { MockInstance } from 'vitest'; + import { getInitials } from './labelUtil'; describe('labelUtil.getInitials', () => { @@ -26,4 +28,29 @@ describe('labelUtil.getInitials', () => { ])('turns "%s" into "%s"', (input, output) => { expect(getInitials(input)).toBe(output); }); + + describe('when Intl.Segmenter is not supported', () => { + let intlSpy: MockInstance; + + beforeEach(() => { + // No Intl.Segmenter support + intlSpy = vi.spyOn(globalThis, 'Intl', 'get'); + intlSpy.mockImplementation(() => ({})); + }); + + it.each([ + ['', ''], + + // simple words + ['Hello', 'He'], + ['Hello World', 'HW'], + ['H', 'H'], + + // multiple spaces + ['Double Space', 'DS'], + [' ', ''], + ])('turns "%s" into "%s"', (input, output) => { + expect(getInitials(input)).toBe(output); + }); + }); }); diff --git a/packages/design-system/src/utils/labelUtil.ts b/packages/design-system/src/utils/labelUtil.ts index ee6f8903f6..073b06179c 100644 --- a/packages/design-system/src/utils/labelUtil.ts +++ b/packages/design-system/src/utils/labelUtil.ts @@ -1,25 +1,19 @@ export const getInitials = (label: string): string => { - const words = label - .split(' ') - .filter((word) => word !== '') - .map((word) => [...new Intl.Segmenter().segment(word)]); + const isSegmenterSupported = typeof Intl !== 'undefined' && 'Segmenter' in Intl; - if (words.length === 0) { - return ''; - } else if (words.length === 1) { - // first two segments of the first word - return ( - words - .at(0) - ?.slice(0, 2) - .map((grapheme) => grapheme.segment) - .join('') ?? '' - ); - } else { - // first segment ok the first two words - return words - .slice(0, 2) - .map((word) => word.at(0)?.segment ?? '') - .join(''); - } + const segmentWord = (word: string): string[] => { + if (isSegmenterSupported) { + return [...new Intl.Segmenter().segment(word)].map((s) => s.segment); + } + return word.split(''); + }; + + const getFirstSegment = (word: string[]): string => word[0] || ''; + const getFirstTwoSegments = (word: string[]): string => word.slice(0, 2).join(''); + + const words = label.split(' ').filter(Boolean).map(segmentWord); + + if (words.length === 0) return ''; + if (words.length === 1) return getFirstTwoSegments(words[0]); + return words.slice(0, 2).map(getFirstSegment).join(''); }; diff --git a/packages/design-system/src/utils/string.test.ts b/packages/design-system/src/utils/string.test.ts index 6f65775f9a..0517d260d4 100644 --- a/packages/design-system/src/utils/string.test.ts +++ b/packages/design-system/src/utils/string.test.ts @@ -4,13 +4,13 @@ describe('Utils string', () => { describe('truncate', () => { it('should truncate text to 30 chars by default', () => { expect(truncate('This is a very long text that should be truncated')).toBe( - 'This is a very long text that...', + 'This is a very long text that ...', ); }); it('should truncate text to given length', () => { expect(truncate('This is a very long text that should be truncated', 25)).toBe( - 'This is a very long text...', + 'This is a very long text ...', ); }); }); diff --git a/packages/design-system/src/utils/string.ts b/packages/design-system/src/utils/string.ts index 9170b57c00..1c2b2aecfd 100644 --- a/packages/design-system/src/utils/string.ts +++ b/packages/design-system/src/utils/string.ts @@ -1,2 +1,2 @@ export const truncate = (text: string, length = 30): string => - text.length > length ? text.slice(0, length).trim() + '...' : text; + text.length > length ? text.slice(0, length) + '...' : text; diff --git a/packages/editor-ui/package.json b/packages/editor-ui/package.json index 2bd69ffb30..04fed80e31 100644 --- a/packages/editor-ui/package.json +++ b/packages/editor-ui/package.json @@ -1,6 +1,6 @@ { "name": "n8n-editor-ui", - "version": "1.61.0", + "version": "1.62.1", "description": "Workflow Editor UI for n8n", "main": "index.js", "scripts": { @@ -40,14 +40,15 @@ "@n8n/permissions": "workspace:*", "@sentry/vue": "^8.31.0", "@vue-flow/background": "^1.3.0", - "@vue-flow/controls": "^1.1.1", - "@vue-flow/core": "^1.33.5", - "@vue-flow/minimap": "^1.4.0", + "@vue-flow/controls": "^1.1.2", + "@vue-flow/core": "^1.41.2", + "@vue-flow/minimap": "^1.5.0", "@vue-flow/node-resizer": "^1.4.0", "@vueuse/components": "^10.11.0", "@vueuse/core": "^10.11.0", "axios": "catalog:", "bowser": "2.11.0", + "change-case": "^5.4.4", "chart.js": "^4.4.0", "codemirror-lang-html-n8n": "^1.0.0", "dateformat": "^3.0.3", diff --git a/packages/editor-ui/src/Interface.ts b/packages/editor-ui/src/Interface.ts index 972593bceb..21849861ef 100644 --- a/packages/editor-ui/src/Interface.ts +++ b/packages/editor-ui/src/Interface.ts @@ -874,6 +874,7 @@ export interface RootState { endpointFormWaiting: string; endpointWebhook: string; endpointWebhookTest: string; + endpointWebhookWaiting: string; pushConnectionActive: boolean; timezone: string; executionTimeout: number; @@ -905,6 +906,7 @@ export interface IRootState { endpointFormWaiting: string; endpointWebhook: string; endpointWebhookTest: string; + endpointWebhookWaiting: string; executionId: string | null; executingNode: string[]; executionWaitingForWebhook: boolean; diff --git a/packages/editor-ui/src/__tests__/data/canvas.ts b/packages/editor-ui/src/__tests__/data/canvas.ts index e0e748b89c..10f36dc9ba 100644 --- a/packages/editor-ui/src/__tests__/data/canvas.ts +++ b/packages/editor-ui/src/__tests__/data/canvas.ts @@ -1,11 +1,13 @@ -import { CanvasNodeHandleKey, CanvasNodeKey } from '@/constants'; -import { ref } from 'vue'; +import { CanvasKey, CanvasNodeHandleKey, CanvasNodeKey } from '@/constants'; +import { computed, ref } from 'vue'; import type { + CanvasInjectionData, CanvasNode, CanvasNodeData, CanvasNodeEventBusEvents, CanvasNodeHandleInjectionData, CanvasNodeInjectionData, + ConnectStartEvent, ExecutionOutputMapData, } from '@/types'; import { CanvasConnectionMode, CanvasNodeRenderType } from '@/types'; @@ -88,6 +90,21 @@ export function createCanvasNodeProps({ }; } +export function createCanvasProvide({ + isExecuting = false, + connectingHandle = undefined, +}: { + isExecuting?: boolean; + connectingHandle?: ConnectStartEvent; +} = {}) { + return { + [String(CanvasKey)]: { + isExecuting: ref(isExecuting), + connectingHandle: ref(connectingHandle), + } satisfies CanvasInjectionData, + }; +} + export function createCanvasNodeProvide({ id = 'node', label = 'Test Node', @@ -125,6 +142,7 @@ export function createCanvasHandleProvide({ isConnected = false, isConnecting = false, isReadOnly = false, + isRequired = false, }: { label?: string; mode?: CanvasConnectionMode; @@ -134,6 +152,7 @@ export function createCanvasHandleProvide({ isConnected?: boolean; isConnecting?: boolean; isReadOnly?: boolean; + isRequired?: boolean; } = {}) { return { [String(CanvasNodeHandleKey)]: { @@ -141,10 +160,11 @@ export function createCanvasHandleProvide({ mode: ref(mode), type: ref(type), index: ref(index), - isConnected: ref(isConnected), + isConnected: computed(() => isConnected), isConnecting: ref(isConnecting), - runData: ref(runData), isReadOnly: ref(isReadOnly), + isRequired: ref(isRequired), + runData: ref(runData), } satisfies CanvasNodeHandleInjectionData, }; } diff --git a/packages/editor-ui/src/__tests__/defaults.ts b/packages/editor-ui/src/__tests__/defaults.ts index a8f6f6ca05..6ea31053b3 100644 --- a/packages/editor-ui/src/__tests__/defaults.ts +++ b/packages/editor-ui/src/__tests__/defaults.ts @@ -16,6 +16,7 @@ export const defaultSettings: FrontendSettings = { endpointFormWaiting: '', endpointWebhook: '', endpointWebhookTest: '', + endpointWebhookWaiting: '', enterprise: { sharing: false, ldap: false, diff --git a/packages/editor-ui/src/api/api-keys.ts b/packages/editor-ui/src/api/api-keys.ts index 7fdd201b4a..b4b44c8e13 100644 --- a/packages/editor-ui/src/api/api-keys.ts +++ b/packages/editor-ui/src/api/api-keys.ts @@ -2,16 +2,16 @@ import type { ApiKey, IRestApiContext } from '@/Interface'; import { makeRestApiRequest } from '@/utils/apiUtils'; export async function getApiKeys(context: IRestApiContext): Promise { - return await makeRestApiRequest(context, 'GET', '/me/api-keys'); + return await makeRestApiRequest(context, 'GET', '/api-keys'); } export async function createApiKey(context: IRestApiContext): Promise { - return await makeRestApiRequest(context, 'POST', '/me/api-keys'); + return await makeRestApiRequest(context, 'POST', '/api-keys'); } export async function deleteApiKey( context: IRestApiContext, id: string, ): Promise<{ success: boolean }> { - return await makeRestApiRequest(context, 'DELETE', `/me/api-keys/${id}`); + return await makeRestApiRequest(context, 'DELETE', `/api-keys/${id}`); } diff --git a/packages/editor-ui/src/components/CredentialEdit/CredentialConfig.vue b/packages/editor-ui/src/components/CredentialEdit/CredentialConfig.vue index dd45647672..bac1079d75 100644 --- a/packages/editor-ui/src/components/CredentialEdit/CredentialConfig.vue +++ b/packages/editor-ui/src/components/CredentialEdit/CredentialConfig.vue @@ -83,10 +83,10 @@ const i18n = useI18n(); const telemetry = useTelemetry(); onBeforeMount(async () => { - if (rootStore.defaultLocale === 'en') return; - uiStore.activeCredentialType = props.credentialType.name; + if (rootStore.defaultLocale === 'en') return; + const key = `n8n-nodes-base.credentials.${props.credentialType.name}`; if (i18n.exists(key)) return; diff --git a/packages/editor-ui/src/components/CredentialEdit/CredentialEdit.vue b/packages/editor-ui/src/components/CredentialEdit/CredentialEdit.vue index ba3eb19ff0..efbde8d1fb 100644 --- a/packages/editor-ui/src/components/CredentialEdit/CredentialEdit.vue +++ b/packages/editor-ui/src/components/CredentialEdit/CredentialEdit.vue @@ -420,6 +420,7 @@ async function beforeClose() { } if (!keepEditing) { + uiStore.activeCredentialType = null; return true; } else if (!requiredPropertiesFilled.value) { showValidationWarning.value = true; @@ -986,6 +987,7 @@ async function onAuthTypeChanged(type: string): Promise { const credentialsForType = getNodeCredentialForSelectedAuthType(activeNodeType.value, type); if (credentialsForType) { selectedCredential.value = credentialsForType.name; + uiStore.activeCredentialType = credentialsForType.name; resetCredentialData(); // Update current node auth type so credentials dropdown can be displayed properly updateNodeAuthType(ndvStore.activeNode, type); diff --git a/packages/editor-ui/src/components/Error/NodeErrorView.vue b/packages/editor-ui/src/components/Error/NodeErrorView.vue index aa0eac4c69..749cc203e4 100644 --- a/packages/editor-ui/src/components/Error/NodeErrorView.vue +++ b/packages/editor-ui/src/components/Error/NodeErrorView.vue @@ -23,6 +23,7 @@ import type { ChatRequest } from '@/types/assistant.types'; import InlineAskAssistantButton from 'n8n-design-system/components/InlineAskAssistantButton/InlineAskAssistantButton.vue'; import { useUIStore } from '@/stores/ui.store'; import { isCommunityPackageName } from '@/utils/nodeTypesUtils'; +import { useAIAssistantHelpers } from '@/composables/useAIAssistantHelpers'; type Props = { // TODO: .node can be undefined @@ -34,6 +35,7 @@ const props = defineProps(); const clipboard = useClipboard(); const toast = useToast(); const i18n = useI18n(); +const assistantHelpers = useAIAssistantHelpers(); const nodeTypesStore = useNodeTypesStore(); const ndvStore = useNDVStore(); @@ -124,33 +126,11 @@ const isAskAssistantAvailable = computed(() => { const assistantAlreadyAsked = computed(() => { return assistantStore.isNodeErrorActive({ - error: simplifyErrorForAssistant(props.error), + error: assistantHelpers.simplifyErrorForAssistant(props.error), node: props.error.node || ndvStore.activeNode, }); }); -function simplifyErrorForAssistant( - error: NodeError | NodeApiError | NodeOperationError, -): ChatRequest.ErrorContext['error'] { - const simple: ChatRequest.ErrorContext['error'] = { - name: error.name, - message: error.message, - }; - if ('type' in error) { - simple.type = error.type; - } - if ('description' in error && error.description) { - simple.description = error.description; - } - if (error.stack) { - simple.stack = error.stack; - } - if ('lineNumber' in error) { - simple.lineNumber = error.lineNumber; - } - return simple; -} - function nodeVersionTag(nodeType: NodeError['node']): string { if (!nodeType || ('hidden' in nodeType && nodeType.hidden)) { return i18n.baseText('nodeSettings.deprecated'); diff --git a/packages/editor-ui/src/components/InlineExpressionEditor/InlineExpressionTip.vue b/packages/editor-ui/src/components/InlineExpressionEditor/InlineExpressionTip.vue index 1ff90df190..897d8304b6 100644 --- a/packages/editor-ui/src/components/InlineExpressionEditor/InlineExpressionTip.vue +++ b/packages/editor-ui/src/components/InlineExpressionEditor/InlineExpressionTip.vue @@ -145,6 +145,13 @@ watchDebounced( color: var(--color-text-base); font-size: var(--font-size-2xs); padding: var(--spacing-2xs); + + code { + font-size: var(--font-size-3xs); + background: var(--color-background-base); + padding: var(--spacing-5xs); + border-radius: var(--border-radius-base); + } } .content { @@ -165,13 +172,6 @@ watchDebounced( display: inline; } -code { - font-size: var(--font-size-3xs); - background: var(--color-background-base); - padding: var(--spacing-5xs); - border-radius: var(--border-radius-base); -} - .pill { display: inline-flex; align-items: center; diff --git a/packages/editor-ui/src/components/InputNodeSelect.vue b/packages/editor-ui/src/components/InputNodeSelect.vue index c7550a76c6..d0ec011e07 100644 --- a/packages/editor-ui/src/components/InputNodeSelect.vue +++ b/packages/editor-ui/src/components/InputNodeSelect.vue @@ -7,6 +7,7 @@ import { isPresent } from '@/utils/typesUtils'; import type { IConnectedNode, Workflow } from 'n8n-workflow'; import { computed } from 'vue'; import NodeIcon from './NodeIcon.vue'; +import { truncate } from 'n8n-design-system'; type Props = { nodes: IConnectedNode[]; @@ -100,11 +101,7 @@ function getMultipleNodesText(nodeName: string): string { } function title(nodeName: string, length = 30) { - const truncated = nodeName.substring(0, length); - if (truncated.length < nodeName.length) { - return `${truncated}...`; - } - return truncated; + return truncate(nodeName, length); } function subtitle(nodeName: string, depth: number) { diff --git a/packages/editor-ui/src/components/InputPanel.vue b/packages/editor-ui/src/components/InputPanel.vue index 24b7e7f6b2..1cc4da099d 100644 --- a/packages/editor-ui/src/components/InputPanel.vue +++ b/packages/editor-ui/src/components/InputPanel.vue @@ -24,6 +24,7 @@ import InputNodeSelect from './InputNodeSelect.vue'; import NodeExecuteButton from './NodeExecuteButton.vue'; import RunData from './RunData.vue'; import WireMeUp from './WireMeUp.vue'; +import { waitingNodeTooltip } from '@/utils/executionUtils'; type MappingMode = 'debugging' | 'mapping'; @@ -237,6 +238,9 @@ export default defineComponent({ isMultiInputNode(): boolean { return this.activeNodeType !== null && this.activeNodeType.inputs.length > 1; }, + waitingMessage(): string { + return waitingNodeTooltip(); + }, }, watch: { inputMode: { @@ -448,6 +452,11 @@ export default defineComponent({ + + - - -