diff --git a/.github/workflows/e2e-reusable.yml b/.github/workflows/e2e-reusable.yml index ab88930a3a..b55d6728d2 100644 --- a/.github/workflows/e2e-reusable.yml +++ b/.github/workflows/e2e-reusable.yml @@ -41,6 +41,11 @@ on: description: 'PR number to run tests for.' required: false type: number + node_view_version: + description: 'Node View version to run tests with.' + required: false + default: '1' + type: string secrets: CYPRESS_RECORD_KEY: description: 'Cypress record key.' @@ -160,6 +165,7 @@ jobs: spec: '${{ inputs.spec }}' env: NODE_OPTIONS: --dns-result-order=ipv4first + CYPRESS_NODE_VIEW_VERSION: ${{ inputs.node_view_version }} CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} E2E_TESTS: true diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index e7400adecb..2f63f61bc6 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -27,6 +27,11 @@ on: description: 'URL to call after workflow is done.' required: false default: '' + node_view_version: + description: 'Node View version to run tests with.' + required: false + default: '1' + type: string jobs: calls-start-url: @@ -46,6 +51,7 @@ jobs: branch: ${{ github.event.inputs.branch || 'master' }} user: ${{ github.event.inputs.user || 'PR User' }} spec: ${{ github.event.inputs.spec || 'e2e/*' }} + node_view_version: ${{ github.event.inputs.node_view_version || '1' }} secrets: CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} diff --git a/CHANGELOG.md b/CHANGELOG.md index ab14dc462e..b1558420d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,42 @@ +# [1.64.0](https://github.com/n8n-io/n8n/compare/n8n@1.63.0...n8n@1.64.0) (2024-10-16) + + +### Bug Fixes + +* Adjust arrow button colors in dark mode ([#11248](https://github.com/n8n-io/n8n/issues/11248)) ([439132c](https://github.com/n8n-io/n8n/commit/439132c291a812d57702c94eaa12878394ac4c69)) +* **core:** Ensure error reporter does not promote `info` to `error` messages ([#11245](https://github.com/n8n-io/n8n/issues/11245)) ([a7fc7fc](https://github.com/n8n-io/n8n/commit/a7fc7fc22997acec86dc94386c95349fd018f4ae)) +* **core:** Override executions mode if `regular` during worker startup ([#11250](https://github.com/n8n-io/n8n/issues/11250)) ([c0aa28c](https://github.com/n8n-io/n8n/commit/c0aa28c6cf3f77b04e04663217c9df8e3803ed3f)) +* **core:** Wrap nodes for tool use at a suitable time ([#11238](https://github.com/n8n-io/n8n/issues/11238)) ([c2fb881](https://github.com/n8n-io/n8n/commit/c2fb881d61291209802438d95892d052f5c82d43)) +* Don't show pinned data tooltip for pinned nodes ([#11249](https://github.com/n8n-io/n8n/issues/11249)) ([c2ad156](https://github.com/n8n-io/n8n/commit/c2ad15646d326a8f71e314d54efe202a5bcdd296)) +* **editor:** Bring back the "Forgot password" link on SigninView ([#11216](https://github.com/n8n-io/n8n/issues/11216)) ([4e78c46](https://github.com/n8n-io/n8n/commit/4e78c46a7450c7fc0694369944d4fb446cef2348)) +* **editor:** Fix chat crashing when rendering output-parsed content ([#11210](https://github.com/n8n-io/n8n/issues/11210)) ([4aaebfd](https://github.com/n8n-io/n8n/commit/4aaebfd4358f590e98c453ad4e65cc2c9d0f76f8)) +* **editor:** Make submit in ChangePasswordView work again ([#11227](https://github.com/n8n-io/n8n/issues/11227)) ([4f27b39](https://github.com/n8n-io/n8n/commit/4f27b39b45b58779d363980241e6e5e11b58f5da)) +* Expressions display actual result of evaluating expression inside string ([#11257](https://github.com/n8n-io/n8n/issues/11257)) ([7f5f0a9](https://github.com/n8n-io/n8n/commit/7f5f0a9df3b3fae6e2f9787443ac1cf9415d5932)) +* **Google Ads Node:** Update to use v17 api ([#11243](https://github.com/n8n-io/n8n/issues/11243)) ([3d97f02](https://github.com/n8n-io/n8n/commit/3d97f02a8d2b6e5bc7c97c5271bed97417ecacd2)) +* **Google Calendar Node:** Fix issue with conference data types not loading ([#11185](https://github.com/n8n-io/n8n/issues/11185)) ([4012758](https://github.com/n8n-io/n8n/commit/401275884e5db0287e4eeffb3c7497dd5e024880)) +* **Google Calendar Node:** Mode to add or replace attendees in event update ([#11132](https://github.com/n8n-io/n8n/issues/11132)) ([6c6a8ef](https://github.com/n8n-io/n8n/commit/6c6a8efdea83cf7194304ce089d7b72d8f6c1a9d)) +* **HTTP Request Tool Node:** Respond with an error when receive binary response ([#11219](https://github.com/n8n-io/n8n/issues/11219)) ([0d23a7f](https://github.com/n8n-io/n8n/commit/0d23a7fb5ba41545f70c4848d30b90af91b1e7e6)) +* **MySQL Node:** Fix "Maximum call stack size exceeded" error when handling a large number of rows ([#11242](https://github.com/n8n-io/n8n/issues/11242)) ([b7ee0c4](https://github.com/n8n-io/n8n/commit/b7ee0c4087eae346bc7e5360130d6c812dbe99db)) +* **n8n Trigger Node:** Merge with Workflow Trigger node ([#11174](https://github.com/n8n-io/n8n/issues/11174)) ([6ec6b51](https://github.com/n8n-io/n8n/commit/6ec6b5197ae97eb86496effd458fcc0b9b223ef3)) +* **OpenAI Node:** Fix tool parameter parsing issue ([#11201](https://github.com/n8n-io/n8n/issues/11201)) ([5a1d81a](https://github.com/n8n-io/n8n/commit/5a1d81ad917fde5cd6a387fe2d4ec6aab6b71349)) +* **Set Node:** Fix issue with UI properties not being hidden ([#11263](https://github.com/n8n-io/n8n/issues/11263)) ([1affc27](https://github.com/n8n-io/n8n/commit/1affc27b6bf9a559061a06f92bebe8167d938665)) +* **Strava Trigger Node:** Fix issue with webhook not being deleted ([#11226](https://github.com/n8n-io/n8n/issues/11226)) ([566529c](https://github.com/n8n-io/n8n/commit/566529ca1149988a54a58b3c34bbe4d9f1add6db)) + + +### Features + +* Add tracking for node errors and update node graph ([#11060](https://github.com/n8n-io/n8n/issues/11060)) ([d3b05f1](https://github.com/n8n-io/n8n/commit/d3b05f1c54e62440666297d8e484ccd22168da48)) +* **core:** Dedupe ([#10101](https://github.com/n8n-io/n8n/issues/10101)) ([52dd2c7](https://github.com/n8n-io/n8n/commit/52dd2c76196c6895b47145c2b85a6895ce2874d4)) +* **editor:** Send workflow context to assistant store ([#11135](https://github.com/n8n-io/n8n/issues/11135)) ([fade9e4](https://github.com/n8n-io/n8n/commit/fade9e43c84a0ae1fbc80f3ee546a418970e2380)) +* **Gong Node:** New node ([#10777](https://github.com/n8n-io/n8n/issues/10777)) ([785b47f](https://github.com/n8n-io/n8n/commit/785b47feb3b83cf36aaed57123f8baca2bbab307)) + + +### Performance Improvements + +* **Google Sheets Node:** Don't load whole spreadsheet dataset to determine columns when appending data ([#11235](https://github.com/n8n-io/n8n/issues/11235)) ([26ad091](https://github.com/n8n-io/n8n/commit/26ad091f473bca4e5d3bdc257e0818be02e52db5)) + + + # [1.63.0](https://github.com/n8n-io/n8n/compare/n8n@1.62.1...n8n@1.63.0) (2024-10-09) diff --git a/cypress/e2e/10-undo-redo.cy.ts b/cypress/e2e/10-undo-redo.cy.ts index 7e3b5ef8ad..f54c2de9fa 100644 --- a/cypress/e2e/10-undo-redo.cy.ts +++ b/cypress/e2e/10-undo-redo.cy.ts @@ -20,6 +20,7 @@ describe('Undo/Redo', () => { WorkflowPage.actions.visit(); }); + // FIXME: Canvas V2: Fix redo connections it('should undo/redo adding node in the middle', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); @@ -114,6 +115,7 @@ describe('Undo/Redo', () => { WorkflowPage.getters.nodeConnections().should('have.length', 0); }); + // FIXME: Canvas V2: Fix moving of nodes via e2e tests it('should undo/redo moving nodes', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); @@ -146,18 +148,14 @@ describe('Undo/Redo', () => { it('should undo/redo deleting a connection using context menu', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - WorkflowPage.getters.nodeConnections().realHover(); - cy.get('.connection-actions .delete') - .filter(':visible') - .should('be.visible') - .click({ force: true }); + WorkflowPage.actions.deleteNodeBetweenNodes(SCHEDULE_TRIGGER_NODE_NAME, CODE_NODE_NAME); WorkflowPage.getters.nodeConnections().should('have.length', 0); WorkflowPage.actions.hitUndo(); WorkflowPage.getters.nodeConnections().should('have.length', 1); WorkflowPage.actions.hitRedo(); WorkflowPage.getters.nodeConnections().should('have.length', 0); }); - + // FIXME: Canvas V2: Fix disconnecting by moving it('should undo/redo deleting a connection by moving it away', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); @@ -206,6 +204,7 @@ describe('Undo/Redo', () => { WorkflowPage.getters.disabledNodes().should('have.length', 2); }); + // FIXME: Canvas V2: Fix undo renaming node it('should undo/redo renaming node using keyboard shortcut', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); @@ -244,6 +243,7 @@ describe('Undo/Redo', () => { }); }); + // FIXME: Canvas V2: Figure out why moving doesn't work from e2e it('should undo/redo multiple steps', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); diff --git a/cypress/e2e/12-canvas-actions.cy.ts b/cypress/e2e/12-canvas-actions.cy.ts index 8a42521d84..e9244a1d12 100644 --- a/cypress/e2e/12-canvas-actions.cy.ts +++ b/cypress/e2e/12-canvas-actions.cy.ts @@ -16,6 +16,7 @@ describe('Canvas Actions', () => { WorkflowPage.actions.visit(); }); + // FIXME: Canvas V2: Missing execute button if no nodes it('should render canvas', () => { WorkflowPage.getters.nodeViewRoot().should('be.visible'); WorkflowPage.getters.canvasPlusButton().should('be.visible'); @@ -25,10 +26,11 @@ describe('Canvas Actions', () => { WorkflowPage.getters.executeWorkflowButton().should('be.visible'); }); + // FIXME: Canvas V2: Fix changing of connection it('should connect and disconnect a simple node', () => { WorkflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); WorkflowPage.getters.nodeViewBackground().click(600, 200, { force: true }); - cy.get('.jtk-connector').should('have.length', 1); + WorkflowPage.getters.nodeConnections().should('have.length', 1); WorkflowPage.getters.nodeViewBackground().click(600, 400, { force: true }); WorkflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); @@ -40,16 +42,16 @@ describe('Canvas Actions', () => { ); WorkflowPage.getters - .canvasNodeInputEndpointByName(`${EDIT_FIELDS_SET_NODE_NAME}1`) - .should('have.class', 'jtk-endpoint-connected'); + .getConnectionBetweenNodes(MANUAL_TRIGGER_NODE_DISPLAY_NAME, `${EDIT_FIELDS_SET_NODE_NAME}1`) + .should('be.visible'); - cy.get('.jtk-connector').should('have.length', 1); + WorkflowPage.getters.nodeConnections().should('have.length', 1); // Disconnect Set1 cy.drag( WorkflowPage.getters.getEndpointSelector('input', `${EDIT_FIELDS_SET_NODE_NAME}1`), [-200, 100], ); - cy.get('.jtk-connector').should('have.length', 0); + WorkflowPage.getters.nodeConnections().should('have.length', 0); }); it('should add first step', () => { @@ -74,7 +76,7 @@ describe('Canvas Actions', () => { it('should add a connected node using plus endpoint', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); - cy.get('.plus-endpoint').should('be.visible').click(); + WorkflowPage.getters.canvasNodePlusEndpointByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); WorkflowPage.getters.nodeCreatorSearchBar().should('be.visible'); WorkflowPage.getters.nodeCreatorSearchBar().type(CODE_NODE_NAME); WorkflowPage.getters.nodeCreatorSearchBar().type('{enter}'); @@ -85,7 +87,7 @@ describe('Canvas Actions', () => { it('should add a connected node dragging from node creator', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); - cy.get('.plus-endpoint').should('be.visible').click(); + WorkflowPage.getters.canvasNodePlusEndpointByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); WorkflowPage.getters.nodeCreatorSearchBar().should('be.visible'); WorkflowPage.getters.nodeCreatorSearchBar().type(CODE_NODE_NAME); cy.drag(WorkflowPage.getters.nodeCreatorNodeItems().first(), [100, 100], { @@ -99,7 +101,7 @@ describe('Canvas Actions', () => { it('should open a category when trying to drag and drop it on the canvas', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); - cy.get('.plus-endpoint').should('be.visible').click(); + WorkflowPage.getters.canvasNodePlusEndpointByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); WorkflowPage.getters.nodeCreatorSearchBar().should('be.visible'); WorkflowPage.getters.nodeCreatorSearchBar().type(CODE_NODE_NAME); cy.drag(WorkflowPage.getters.nodeCreatorActionItems().first(), [100, 100], { @@ -114,7 +116,7 @@ describe('Canvas Actions', () => { it('should add disconnected node if nothing is selected', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); // Deselect nodes - WorkflowPage.getters.nodeViewBackground().click({ force: true }); + WorkflowPage.getters.nodeView().click({ force: true }); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); WorkflowPage.getters.canvasNodes().should('have.length', 2); WorkflowPage.getters.nodeConnections().should('have.length', 0); @@ -136,10 +138,10 @@ describe('Canvas Actions', () => { WorkflowPage.getters.nodeConnections().should('have.length', 3); WorkflowPage.getters.canvasNodeByName(EDIT_FIELDS_SET_NODE_NAME).then(($editFieldsNode) => { - const editFieldsNodeLeft = parseFloat($editFieldsNode.css('left')); + const editFieldsNodeLeft = WorkflowPage.getters.getNodeLeftPosition($editFieldsNode); WorkflowPage.getters.canvasNodeByName(HTTP_REQUEST_NODE_NAME).then(($httpNode) => { - const httpNodeLeft = parseFloat($httpNode.css('left')); + const httpNodeLeft = WorkflowPage.getters.getNodeLeftPosition($httpNode); expect(httpNodeLeft).to.be.lessThan(editFieldsNodeLeft); }); }); @@ -159,10 +161,12 @@ describe('Canvas Actions', () => { WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); WorkflowPage.getters.nodeConnections().first().realHover(); - cy.get('.connection-actions .delete').first().click({ force: true }); + WorkflowPage.actions.deleteNodeBetweenNodes(MANUAL_TRIGGER_NODE_DISPLAY_NAME, CODE_NODE_NAME); + WorkflowPage.getters.nodeConnections().should('have.length', 0); }); + // FIXME: Canvas V2: Fix disconnecting of connection by dragging it it('should delete a connection by moving it away from endpoint', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); @@ -216,10 +220,10 @@ describe('Canvas Actions', () => { WorkflowPage.actions.hitSelectAll(); WorkflowPage.actions.hitCopy(); - successToast().should('contain', 'Copied!'); + successToast().should('contain', 'Copied to clipboard'); WorkflowPage.actions.copyNode(CODE_NODE_NAME); - successToast().should('contain', 'Copied!'); + successToast().should('contain', 'Copied to clipboard'); }); it('should select/deselect all nodes', () => { @@ -231,17 +235,31 @@ describe('Canvas Actions', () => { WorkflowPage.getters.selectedNodes().should('have.length', 0); }); + // FIXME: Canvas V2: Selection via arrow keys is broken it('should select nodes using arrow keys', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); cy.wait(500); cy.get('body').type('{leftArrow}'); - WorkflowPage.getters.canvasNodes().first().should('have.class', 'jtk-drag-selected'); + const selectedCanvasNodes = () => + cy.ifCanvasVersion( + () => WorkflowPage.getters.canvasNodes(), + () => WorkflowPage.getters.canvasNodes().parent(), + ); + + cy.ifCanvasVersion( + () => selectedCanvasNodes().first().should('have.class', 'jtk-drag-selected'), + () => selectedCanvasNodes().first().should('have.class', 'selected'), + ); cy.get('body').type('{rightArrow}'); - WorkflowPage.getters.canvasNodes().last().should('have.class', 'jtk-drag-selected'); + cy.ifCanvasVersion( + () => selectedCanvasNodes().last().should('have.class', 'jtk-drag-selected'), + () => selectedCanvasNodes().last().should('have.class', 'selected'), + ); }); + // FIXME: Canvas V2: Selection via shift and arrow keys is broken it('should select nodes using shift and arrow keys', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); @@ -251,6 +269,7 @@ describe('Canvas Actions', () => { WorkflowPage.getters.selectedNodes().should('have.length', 2); }); + // FIXME: Canvas V2: Fix select & deselect it('should not break lasso selection when dragging node action buttons', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters @@ -262,6 +281,7 @@ describe('Canvas Actions', () => { WorkflowPage.actions.testLassoSelection([100, 100], [200, 200]); }); + // FIXME: Canvas V2: Fix select & deselect it('should not break lasso selection with multiple clicks on node action buttons', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.actions.testLassoSelection([100, 100], [200, 200]); diff --git a/cypress/e2e/12-canvas.cy.ts b/cypress/e2e/12-canvas.cy.ts index 4c7cccaafe..ecfb325de2 100644 --- a/cypress/e2e/12-canvas.cy.ts +++ b/cypress/e2e/12-canvas.cy.ts @@ -9,6 +9,7 @@ import { } from './../constants'; import { NDV, WorkflowExecutionsTab } from '../pages'; import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; +import { isCanvasV2 } from '../utils/workflowUtils'; const WorkflowPage = new WorkflowPageClass(); const ExecutionsTab = new WorkflowExecutionsTab(); @@ -52,15 +53,15 @@ describe('Canvas Node Manipulation and Navigation', () => { cy.reload(); cy.waitForLoad(); // Make sure outputless switch was connected correctly - cy.get( - `[data-target-node="${SWITCH_NODE_NAME}1"][data-source-node="${EDIT_FIELDS_SET_NODE_NAME}3"]`, - ).should('be.visible'); + WorkflowPage.getters + .getConnectionBetweenNodes(`${EDIT_FIELDS_SET_NODE_NAME}3`, `${SWITCH_NODE_NAME}1`) + .should('exist'); // Make sure all connections are there after reload for (let i = 0; i < desiredOutputs; i++) { const setName = `${EDIT_FIELDS_SET_NODE_NAME}${i > 0 ? i : ''}`; WorkflowPage.getters - .canvasNodeInputEndpointByName(setName) - .should('have.class', 'jtk-endpoint-connected'); + .getConnectionBetweenNodes(`${SWITCH_NODE_NAME}`, setName) + .should('exist'); } }); @@ -69,9 +70,7 @@ describe('Canvas Node Manipulation and Navigation', () => { WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); for (let i = 0; i < 2; i++) { WorkflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME, true); - WorkflowPage.getters - .nodeViewBackground() - .click((i + 1) * 200, (i + 1) * 200, { force: true }); + WorkflowPage.getters.nodeView().click((i + 1) * 200, (i + 1) * 200, { force: true }); } WorkflowPage.actions.zoomToFit(); @@ -84,8 +83,6 @@ describe('Canvas Node Manipulation and Navigation', () => { WorkflowPage.getters.getEndpointSelector('input', `${EDIT_FIELDS_SET_NODE_NAME}1`), ); - cy.get('.rect-input-endpoint.jtk-endpoint-connected').should('have.length', 2); - // Connect Set1 and Set2 to merge cy.draganddrop( WorkflowPage.getters.getEndpointSelector('plus', EDIT_FIELDS_SET_NODE_NAME), @@ -95,20 +92,36 @@ describe('Canvas Node Manipulation and Navigation', () => { WorkflowPage.getters.getEndpointSelector('plus', `${EDIT_FIELDS_SET_NODE_NAME}1`), WorkflowPage.getters.getEndpointSelector('input', MERGE_NODE_NAME, 1), ); - - cy.get('.rect-input-endpoint.jtk-endpoint-connected').should('have.length', 4); + const checkConnections = () => { + WorkflowPage.getters + .getConnectionBetweenNodes( + MANUAL_TRIGGER_NODE_DISPLAY_NAME, + `${EDIT_FIELDS_SET_NODE_NAME}1`, + ) + .should('exist'); + WorkflowPage.getters + .getConnectionBetweenNodes(EDIT_FIELDS_SET_NODE_NAME, MERGE_NODE_NAME) + .should('exist'); + WorkflowPage.getters + .getConnectionBetweenNodes(`${EDIT_FIELDS_SET_NODE_NAME}1`, MERGE_NODE_NAME) + .should('exist'); + }; + checkConnections(); // Make sure all connections are there after save & reload WorkflowPage.actions.saveWorkflowOnButtonClick(); cy.reload(); cy.waitForLoad(); - - cy.get('.rect-input-endpoint.jtk-endpoint-connected').should('have.length', 4); + checkConnections(); + // cy.get('.rect-input-endpoint.jtk-endpoint-connected').should('have.length', 4); WorkflowPage.actions.executeWorkflow(); WorkflowPage.getters.stopExecutionButton().should('not.exist'); // If the merged set nodes are connected and executed correctly, there should be 2 items in the output of merge node - cy.get('[data-label="2 items"]').should('be.visible'); + cy.ifCanvasVersion( + () => cy.get('[data-label="2 items"]').should('be.visible'), + () => cy.getByTestId('canvas-node-output-handle').contains('2 items').should('be.visible'), + ); }); it('should add nodes and check execution success', () => { @@ -120,16 +133,42 @@ describe('Canvas Node Manipulation and Navigation', () => { WorkflowPage.actions.zoomToFit(); WorkflowPage.actions.executeWorkflow(); - cy.get('.jtk-connector.success').should('have.length', 3); - cy.get('.data-count').should('have.length', 4); - cy.get('.plus-draggable-endpoint').should('have.class', 'ep-success'); + cy.ifCanvasVersion( + () => cy.get('.jtk-connector.success').should('have.length', 3), + () => cy.get('[data-edge-status=success]').should('have.length', 3), + ); + cy.ifCanvasVersion( + () => cy.get('.data-count').should('have.length', 4), + () => cy.getByTestId('canvas-node-status-success').should('have.length', 4), + ); + + cy.ifCanvasVersion( + () => cy.get('.plus-draggable-endpoint').should('have.class', 'ep-success'), + () => cy.getByTestId('canvas-handle-plus').should('have.attr', 'data-plus-type', 'success'), + ); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); WorkflowPage.actions.zoomToFit(); - cy.get('.plus-draggable-endpoint').filter(':visible').should('not.have.class', 'ep-success'); - cy.get('.jtk-connector.success').should('have.length', 3); - cy.get('.jtk-connector').should('have.length', 4); + cy.ifCanvasVersion( + () => + cy + .get('.plus-draggable-endpoint') + .filter(':visible') + .should('not.have.class', 'ep-success'), + () => + cy.getByTestId('canvas-handle-plus').should('not.have.attr', 'data-plus-type', 'success'), + ); + + cy.ifCanvasVersion( + () => cy.get('.jtk-connector.success').should('have.length', 3), + // The new version of the canvas correctly shows executed data being passed to the input of the next node + () => cy.get('[data-edge-status=success]').should('have.length', 4), + ); + cy.ifCanvasVersion( + () => cy.get('.data-count').should('have.length', 4), + () => cy.getByTestId('canvas-node-status-success').should('have.length', 4), + ); }); it('should delete node using context menu', () => { @@ -194,19 +233,29 @@ describe('Canvas Node Manipulation and Navigation', () => { WorkflowPage.getters.canvasNodes().should('have.length', 0); }); + // FIXME: Canvas V2: Figure out how to test moving of the node it('should move node', () => { WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); + WorkflowPage.actions.zoomToFit(); WorkflowPage.getters .canvasNodes() .last() .then(($node) => { const { left, top } = $node.position(); - cy.drag('[data-test-id="canvas-node"].jtk-drag-selected', [50, 150], { - clickToFinish: true, - }); + + if (isCanvasV2()) { + cy.drag('.vue-flow__node', [300, 300], { + realMouse: true, + }); + } else { + cy.drag('[data-test-id="canvas-node"].jtk-drag-selected', [50, 150], { + clickToFinish: true, + }); + } + WorkflowPage.getters .canvasNodes() .last() @@ -218,91 +267,80 @@ describe('Canvas Node Manipulation and Navigation', () => { }); }); - it('should zoom in', () => { - WorkflowPage.getters.zoomInButton().should('be.visible').click(); - WorkflowPage.getters - .nodeView() - .should( - 'have.css', - 'transform', - `matrix(${ZOOM_IN_X1_FACTOR}, 0, 0, ${ZOOM_IN_X1_FACTOR}, 0, 0)`, + describe('Canvas Zoom Functionality', () => { + const getContainer = () => + cy.ifCanvasVersion( + () => WorkflowPage.getters.nodeView(), + () => WorkflowPage.getters.canvasViewport(), ); - WorkflowPage.getters.zoomInButton().click(); - WorkflowPage.getters - .nodeView() - .should( - 'have.css', - 'transform', - `matrix(${ZOOM_IN_X2_FACTOR}, 0, 0, ${ZOOM_IN_X2_FACTOR}, 0, 0)`, - ); - }); + const checkZoomLevel = (expectedFactor: number) => { + return getContainer().should(($nodeView) => { + const newTransform = $nodeView.css('transform'); + const newScale = parseFloat(newTransform.split(',')[0].slice(7)); - it('should zoom out', () => { - WorkflowPage.getters.zoomOutButton().should('be.visible').click(); - WorkflowPage.getters - .nodeView() - .should( - 'have.css', - 'transform', - `matrix(${ZOOM_OUT_X1_FACTOR}, 0, 0, ${ZOOM_OUT_X1_FACTOR}, 0, 0)`, - ); - WorkflowPage.getters.zoomOutButton().click(); - WorkflowPage.getters - .nodeView() - .should( - 'have.css', - 'transform', - `matrix(${ZOOM_OUT_X2_FACTOR}, 0, 0, ${ZOOM_OUT_X2_FACTOR}, 0, 0)`, - ); - }); + expect(newScale).to.be.closeTo(expectedFactor, 0.2); + }); + }; - it('should zoom using scroll or pinch gesture', () => { - WorkflowPage.actions.pinchToZoom(1, 'zoomIn'); - WorkflowPage.getters - .nodeView() - .should( - 'have.css', - 'transform', - `matrix(${PINCH_ZOOM_IN_FACTOR}, 0, 0, ${PINCH_ZOOM_IN_FACTOR}, 0, 0)`, + const zoomAndCheck = (action: 'zoomIn' | 'zoomOut', expectedFactor: number) => { + WorkflowPage.getters[`${action}Button`]().click(); + checkZoomLevel(expectedFactor); + }; + + it('should zoom in', () => { + WorkflowPage.getters.zoomInButton().should('be.visible'); + getContainer().then(($nodeView) => { + const initialTransform = $nodeView.css('transform'); + const initialScale = + initialTransform === 'none' ? 1 : parseFloat(initialTransform.split(',')[0].slice(7)); + + zoomAndCheck('zoomIn', initialScale * ZOOM_IN_X1_FACTOR); + zoomAndCheck('zoomIn', initialScale * ZOOM_IN_X2_FACTOR); + }); + }); + + it('should zoom out', () => { + zoomAndCheck('zoomOut', ZOOM_OUT_X1_FACTOR); + zoomAndCheck('zoomOut', ZOOM_OUT_X2_FACTOR); + }); + + it('should zoom using scroll or pinch gesture', () => { + WorkflowPage.actions.pinchToZoom(1, 'zoomIn'); + + // V2 Canvas is using the same zoom factor for both pinch and scroll + cy.ifCanvasVersion( + () => checkZoomLevel(PINCH_ZOOM_IN_FACTOR), + () => checkZoomLevel(ZOOM_IN_X1_FACTOR), ); - WorkflowPage.actions.pinchToZoom(1, 'zoomOut'); - // Zoom in 1x + Zoom out 1x should reset to default (=1) - WorkflowPage.getters.nodeView().should('have.css', 'transform', 'matrix(1, 0, 0, 1, 0, 0)'); + WorkflowPage.actions.pinchToZoom(1, 'zoomOut'); + checkZoomLevel(1); // Zoom in 1x + Zoom out 1x should reset to default (=1) - WorkflowPage.actions.pinchToZoom(1, 'zoomOut'); - WorkflowPage.getters - .nodeView() - .should( - 'have.css', - 'transform', - `matrix(${PINCH_ZOOM_OUT_FACTOR}, 0, 0, ${PINCH_ZOOM_OUT_FACTOR}, 0, 0)`, + WorkflowPage.actions.pinchToZoom(1, 'zoomOut'); + + cy.ifCanvasVersion( + () => checkZoomLevel(PINCH_ZOOM_OUT_FACTOR), + () => checkZoomLevel(ZOOM_OUT_X1_FACTOR), ); - }); + }); - it('should reset zoom', () => { - // Reset zoom should not appear until zoom level changed - WorkflowPage.getters.resetZoomButton().should('not.exist'); - WorkflowPage.getters.zoomInButton().click(); - WorkflowPage.getters.resetZoomButton().should('be.visible').click(); - WorkflowPage.getters - .nodeView() - .should( - 'have.css', - 'transform', - `matrix(${DEFAULT_ZOOM_FACTOR}, 0, 0, ${DEFAULT_ZOOM_FACTOR}, 0, 0)`, - ); - }); + it('should reset zoom', () => { + WorkflowPage.getters.resetZoomButton().should('not.exist'); + WorkflowPage.getters.zoomInButton().click(); + WorkflowPage.getters.resetZoomButton().should('be.visible').click(); + checkZoomLevel(DEFAULT_ZOOM_FACTOR); + }); - it('should zoom to fit', () => { - WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - // At this point last added node should be off-screen - WorkflowPage.getters.canvasNodes().last().should('not.be.visible'); - WorkflowPage.getters.zoomToFitButton().click(); - WorkflowPage.getters.canvasNodes().last().should('be.visible'); + it('should zoom to fit', () => { + WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); + WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); + WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); + // At this point last added node should be off-screen + WorkflowPage.getters.canvasNodes().last().should('not.be.visible'); + WorkflowPage.getters.zoomToFitButton().click(); + WorkflowPage.getters.canvasNodes().last().should('be.visible'); + }); }); it('should disable node (context menu or shortcut)', () => { @@ -426,9 +464,9 @@ describe('Canvas Node Manipulation and Navigation', () => { cy.reload(); cy.waitForLoad(); WorkflowPage.getters.canvasNodes().should('have.length', 2); - cy.get('.rect-input-endpoint.jtk-endpoint-connected').should('have.length', 1); + WorkflowPage.getters.nodeConnections().should('have.length', 1); }); - + // FIXME: Canvas V2: Credentials should show issue on the first open it('should remove unknown credentials on pasting workflow', () => { cy.fixture('workflow-with-unknown-credentials.json').then((data) => { cy.get('body').paste(JSON.stringify(data)); @@ -441,6 +479,7 @@ describe('Canvas Node Manipulation and Navigation', () => { }); }); + // FIXME: Canvas V2: Unknown nodes should still render connection endpoints it('should render connections correctly if unkown nodes are present', () => { const unknownNodeName = 'Unknown node'; cy.createFixtureWorkflow('workflow-with-unknown-nodes.json', 'Unknown nodes'); diff --git a/cypress/e2e/16-form-trigger-node.cy.ts b/cypress/e2e/16-form-trigger-node.cy.ts index 0162479f7c..60fbd7c419 100644 --- a/cypress/e2e/16-form-trigger-node.cy.ts +++ b/cypress/e2e/16-form-trigger-node.cy.ts @@ -16,12 +16,14 @@ describe('n8n Form Trigger', () => { ndv.getters.parameterInput('formDescription').type('Test Form Description'); ndv.getters.parameterInput('fieldLabel').type('Test Field 1'); ndv.getters.backToCanvas().click(); - workflowPage.getters.nodeIssuesByName('n8n Form Trigger').should('not.exist'); + workflowPage.getters.nodeIssuesByName('On form submission').should('not.exist'); }); it('should fill up form fields', () => { - workflowPage.actions.addInitialNodeToCanvas('n8n Form Trigger'); - workflowPage.getters.canvasNodes().first().dblclick(); + workflowPage.actions.addInitialNodeToCanvas('n8n Form Trigger', { + isTrigger: true, + action: 'On new n8n Form event', + }); ndv.getters.parameterInput('formTitle').type('Test Form'); ndv.getters.parameterInput('formDescription').type('Test Form Description'); //fill up first field of type number @@ -96,6 +98,6 @@ describe('n8n Form Trigger', () => { .type('Your test form was successfully submitted'); ndv.getters.backToCanvas().click(); - workflowPage.getters.nodeIssuesByName('n8n Form Trigger').should('not.exist'); + workflowPage.getters.nodeIssuesByName('On form submission').should('not.exist'); }); }); diff --git a/cypress/e2e/19-execution.cy.ts b/cypress/e2e/19-execution.cy.ts index 81e11b1b63..5be2399253 100644 --- a/cypress/e2e/19-execution.cy.ts +++ b/cypress/e2e/19-execution.cy.ts @@ -1,6 +1,7 @@ import { SCHEDULE_TRIGGER_NODE_NAME, EDIT_FIELDS_SET_NODE_NAME } from '../constants'; import { NDV, WorkflowExecutionsTab, WorkflowPage as WorkflowPageClass } from '../pages'; import { clearNotifications, errorToast, successToast } from '../pages/notifications'; +import { isCanvasV2 } from '../utils/workflowUtils'; const workflowPage = new WorkflowPageClass(); const executionsTab = new WorkflowExecutionsTab(); @@ -117,15 +118,22 @@ describe('Execution', () => { .canvasNodeByName('Manual') .within(() => cy.get('.fa-check')) .should('exist'); - workflowPage.getters - .canvasNodeByName('Wait') - .within(() => cy.get('.fa-sync-alt').should('not.visible')); + + if (isCanvasV2()) { + workflowPage.getters + .canvasNodeByName('Wait') + .within(() => cy.get('.fa-sync-alt').should('not.exist')); + } else { + workflowPage.getters + .canvasNodeByName('Wait') + .within(() => cy.get('.fa-sync-alt').should('not.be.visible')); + } + workflowPage.getters .canvasNodeByName('Set') .within(() => cy.get('.fa-check').should('not.exist')); successToast().should('be.visible'); - clearNotifications(); // Clear execution data workflowPage.getters.clearExecutionDataButton().should('be.visible'); @@ -206,6 +214,7 @@ describe('Execution', () => { workflowPage.getters.clearExecutionDataButton().should('not.exist'); }); + // FIXME: Canvas V2: Webhook should show waiting state but it doesn't it('should test webhook workflow stop', () => { cy.createFixtureWorkflow('Webhook_wait_set.json'); @@ -267,9 +276,17 @@ describe('Execution', () => { .canvasNodeByName('Webhook') .within(() => cy.get('.fa-check')) .should('exist'); - workflowPage.getters - .canvasNodeByName('Wait') - .within(() => cy.get('.fa-sync-alt').should('not.visible')); + + if (isCanvasV2()) { + workflowPage.getters + .canvasNodeByName('Wait') + .within(() => cy.get('.fa-sync-alt').should('not.exist')); + } else { + workflowPage.getters + .canvasNodeByName('Wait') + .within(() => cy.get('.fa-sync-alt').should('not.be.visible')); + } + workflowPage.getters .canvasNodeByName('Set') .within(() => cy.get('.fa-check').should('not.exist')); @@ -295,6 +312,7 @@ describe('Execution', () => { }); }); + // FIXME: Canvas V2: Missing pinned states for `edge-label-wrapper` describe('connections should be colored differently for pinned data', () => { beforeEach(() => { cy.createFixtureWorkflow('Schedule_pinned.json'); diff --git a/cypress/e2e/2372-ado-prevent-clipping-params.cy.ts b/cypress/e2e/2372-ado-prevent-clipping-params.cy.ts new file mode 100644 index 0000000000..260c6e48c9 --- /dev/null +++ b/cypress/e2e/2372-ado-prevent-clipping-params.cy.ts @@ -0,0 +1,86 @@ +import { NDV, WorkflowPage } from '../pages'; + +const workflowPage = new WorkflowPage(); +const ndv = new NDV(); + +describe('ADO-2362 ADO-2350 NDV Prevent clipping long parameters and scrolling to expression', () => { + it('should show last parameters and open at scroll top of parameters', () => { + workflowPage.actions.visit(); + cy.createFixtureWorkflow('Test-workflow-with-long-parameters.json'); + workflowPage.actions.openNode('Schedule Trigger'); + + ndv.getters.inlineExpressionEditorInput().should('be.visible'); + + ndv.actions.close(); + + workflowPage.actions.openNode('Edit Fields1'); + + // first parameter should be visible + ndv.getters.inputLabel().eq(0).should('include.text', 'Mode'); + ndv.getters.inputLabel().eq(0).should('be.visible'); + + ndv.getters.inlineExpressionEditorInput().should('have.length', 2); + + // last parameter in view should be visible + ndv.getters.inlineExpressionEditorInput().eq(0).should('have.text', 'should be visible!'); + ndv.getters.inlineExpressionEditorInput().eq(0).should('be.visible'); + + // next parameter in view should not be visible + ndv.getters.inlineExpressionEditorInput().eq(1).should('have.text', 'not visible'); + ndv.getters.inlineExpressionEditorInput().eq(1).should('not.be.visible'); + + ndv.actions.close(); + workflowPage.actions.openNode('Schedule Trigger'); + + // first parameter (notice) should be visible + ndv.getters.nthParam(0).should('include.text', 'This workflow will run on the schedule '); + ndv.getters.inputLabel().eq(0).should('be.visible'); + + ndv.getters.inlineExpressionEditorInput().should('have.length', 2); + + // last parameter in view should be visible + ndv.getters.inlineExpressionEditorInput().eq(0).should('have.text', 'should be visible'); + ndv.getters.inlineExpressionEditorInput().eq(0).should('be.visible'); + + // next parameter in view should not be visible + ndv.getters.inlineExpressionEditorInput().eq(1).should('have.text', 'not visible'); + ndv.getters.inlineExpressionEditorInput().eq(1).should('not.be.visible'); + + ndv.actions.close(); + workflowPage.actions.openNode('Slack'); + + // first field (credentials) should be visible + ndv.getters.nodeCredentialsLabel().should('be.visible'); + + // last parameter in view should be visible + ndv.getters.inlineExpressionEditorInput().eq(0).should('have.text', 'should be visible'); + ndv.getters.inlineExpressionEditorInput().eq(0).should('be.visible'); + + // next parameter in view should not be visible + ndv.getters.inlineExpressionEditorInput().eq(1).should('have.text', 'not visible'); + ndv.getters.inlineExpressionEditorInput().eq(1).should('not.be.visible'); + }); + + it('NODE-1272 ensure expressions scrolled to top, not middle', () => { + workflowPage.actions.visit(); + cy.createFixtureWorkflow('Test-workflow-with-long-parameters.json'); + workflowPage.actions.openNode('With long expression'); + + ndv.getters.inlineExpressionEditorInput().eq(0).should('be.visible'); + // should be scrolled at top + ndv.getters + .inlineExpressionEditorInput() + .eq(0) + .find('.cm-line') + .eq(0) + .should('have.text', '1 visible!'); + ndv.getters.inlineExpressionEditorInput().eq(0).find('.cm-line').eq(0).should('be.visible'); + ndv.getters + .inlineExpressionEditorInput() + .eq(0) + .find('.cm-line') + .eq(6) + .should('have.text', '7 not visible!'); + ndv.getters.inlineExpressionEditorInput().eq(0).find('.cm-line').eq(6).should('not.be.visible'); + }); +}); diff --git a/cypress/e2e/28-debug.cy.ts b/cypress/e2e/28-debug.cy.ts index bc1f03c162..b5159951a7 100644 --- a/cypress/e2e/28-debug.cy.ts +++ b/cypress/e2e/28-debug.cy.ts @@ -117,7 +117,8 @@ describe('Debug', () => { workflowPage.getters.canvasNodes().last().find('.node-info-icon').should('be.empty'); workflowPage.getters.canvasNodes().first().dblclick(); - ndv.getters.pinDataButton().click(); + ndv.actions.unPinData(); + ndv.actions.close(); workflowPage.actions.saveWorkflowUsingKeyboardShortcut(); diff --git a/cypress/e2e/40-manual-partial-execution.cy.ts b/cypress/e2e/40-manual-partial-execution.cy.ts index 5fe31b56ad..2eb129475f 100644 --- a/cypress/e2e/40-manual-partial-execution.cy.ts +++ b/cypress/e2e/40-manual-partial-execution.cy.ts @@ -23,6 +23,7 @@ describe('Manual partial execution', () => { canvas.actions.openNode('Webhook1'); ndv.getters.nodeRunSuccessIndicator().should('exist'); + ndv.getters.nodeRunTooltipIndicator().should('exist'); ndv.getters.outputRunSelector().should('not.exist'); // single run }); }); diff --git a/cypress/e2e/5-ndv.cy.ts b/cypress/e2e/5-ndv.cy.ts index a591d62895..f2ccccb6ab 100644 --- a/cypress/e2e/5-ndv.cy.ts +++ b/cypress/e2e/5-ndv.cy.ts @@ -133,9 +133,10 @@ describe('NDV', () => { "An expression here won't work because it uses .item and n8n can't figure out the matching item.", ); ndv.getters.nodeRunErrorIndicator().should('be.visible'); + ndv.getters.nodeRunTooltipIndicator().should('be.visible'); // The error details should be hidden behind a tooltip - ndv.getters.nodeRunErrorIndicator().should('not.contain', 'Start Time'); - ndv.getters.nodeRunErrorIndicator().should('not.contain', 'Execution Time'); + ndv.getters.nodeRunTooltipIndicator().should('not.contain', 'Start Time'); + ndv.getters.nodeRunTooltipIndicator().should('not.contain', 'Execution Time'); }); it('should save workflow using keyboard shortcut from NDV', () => { @@ -617,8 +618,10 @@ describe('NDV', () => { // Should not show run info before execution ndv.getters.nodeRunSuccessIndicator().should('not.exist'); ndv.getters.nodeRunErrorIndicator().should('not.exist'); + ndv.getters.nodeRunTooltipIndicator().should('not.exist'); ndv.getters.nodeExecuteButton().click(); ndv.getters.nodeRunSuccessIndicator().should('exist'); + ndv.getters.nodeRunTooltipIndicator().should('exist'); }); it('should properly show node execution indicator for multiple nodes', () => { @@ -630,6 +633,7 @@ describe('NDV', () => { // Manual tigger node should show success indicator workflowPage.actions.openNode('When clicking ‘Test workflow’'); ndv.getters.nodeRunSuccessIndicator().should('exist'); + ndv.getters.nodeRunTooltipIndicator().should('exist'); // Code node should show error ndv.getters.backToCanvas().click(); workflowPage.actions.openNode('Code'); diff --git a/cypress/e2e/6-code-node.cy.ts b/cypress/e2e/6-code-node.cy.ts index 5a6182c25a..5bc7d05ee2 100644 --- a/cypress/e2e/6-code-node.cy.ts +++ b/cypress/e2e/6-code-node.cy.ts @@ -162,21 +162,21 @@ return [] cy.get('#tab-code').should('have.class', 'is-active'); }); - it('should show error based on status code', () => { - const prompt = nanoid(20); - cy.get('#tab-ask-ai').click(); - ndv.actions.executePrevious(); + const handledCodes = [ + { code: 400, message: 'Code generation failed due to an unknown reason' }, + { code: 413, message: 'Your workflow data is too large for AI to process' }, + { code: 429, message: "We've hit our rate limit with our AI partner" }, + { code: 500, message: 'Code generation failed due to an unknown reason' }, + ]; - cy.getByTestId('ask-ai-prompt-input').type(prompt); + handledCodes.forEach(({ code, message }) => { + it(`should show error based on status code ${code}`, () => { + const prompt = nanoid(20); + cy.get('#tab-ask-ai').click(); + ndv.actions.executePrevious(); - const handledCodes = [ - { code: 400, message: 'Code generation failed due to an unknown reason' }, - { code: 413, message: 'Your workflow data is too large for AI to process' }, - { code: 429, message: "We've hit our rate limit with our AI partner" }, - { code: 500, message: 'Code generation failed due to an unknown reason' }, - ]; + cy.getByTestId('ask-ai-prompt-input').type(prompt); - handledCodes.forEach(({ code, message }) => { cy.intercept('POST', '/rest/ai/ask-ai', { statusCode: code, status: code, diff --git a/cypress/fixtures/Test-workflow-with-long-parameters.json b/cypress/fixtures/Test-workflow-with-long-parameters.json new file mode 100644 index 0000000000..d4d052f6f0 --- /dev/null +++ b/cypress/fixtures/Test-workflow-with-long-parameters.json @@ -0,0 +1,150 @@ +{ + "meta": { + "instanceId": "777c68374367604fdf2a0bcfe9b1b574575ddea61aa8268e4bf034434bd7c894" + }, + "nodes": [ + { + "parameters": { + "assignments": { + "assignments": [ + { + "id": "0effebfc-fa8c-4d41-8a37-6d5695dfc9ee", + "name": "test", + "value": "test", + "type": "string" + }, + { + "id": "beb8723f-6333-4186-ab88-41d4e2338866", + "name": "test", + "value": "test", + "type": "string" + }, + { + "id": "85095836-4e94-442f-9270-e1a89008c129", + "name": "test", + "value": "test", + "type": "string" + }, + { + "id": "b6163f8a-bca6-4364-8b38-182df37c55cd", + "name": "=should be visible!", + "value": "=not visible", + "type": "string" + } + ] + }, + "options": {} + }, + "id": "950fcdc1-9e92-410f-8377-d4240e9bf6ff", + "name": "Edit Fields1", + "type": "n8n-nodes-base.set", + "typeVersion": 3.4, + "position": [ + 680, + 460 + ] + }, + { + "parameters": { + "messageType": "block", + "blocksUi": "blocks", + "text": "=should be visible", + "otherOptions": { + "sendAsUser": "=not visible" + } + }, + "id": "dcf7410d-0f8e-4cdb-9819-ae275558bdaa", + "name": "Slack", + "type": "n8n-nodes-base.slack", + "typeVersion": 2.2, + "position": [ + 900, + 460 + ], + "webhookId": "002b502e-31e5-4fdb-ac43-a56cfde8f82a" + }, + { + "parameters": { + "rule": { + "interval": [ + {}, + { + "field": "=should be visible" + }, + { + "field": "=not visible" + } + ] + } + }, + "id": "4c948a3f-19d4-4b08-a8be-f7d2964a21f4", + "name": "Schedule Trigger", + "type": "n8n-nodes-base.scheduleTrigger", + "typeVersion": 1.2, + "position": [ + 460, + 460 + ] + }, + { + "parameters": { + "assignments": { + "assignments": [ + { + "id": "5dcaab37-1146-49c6-97a3-3b2f73483270", + "name": "object", + "value": "=1 visible!\n2 {\n3 \"str\": \"two\",\n4 \"str_date\": \"{{ $now }}\",\n5 \"str_int\": \"1\",\n6 \"str_float\": \"1.234\",\n7 not visible!\n \"str_bool\": \"true\",\n \"str_email\": \"david@thedavid.com\",\n \"str_with_email\":\"My email is david@n8n.io\",\n \"str_json_single\":\"{'one':'two'}\",\n \"str_json_double\":\"{\\\"one\\\":\\\"two\\\"}\",\n \"bool\": true,\n \"list\": [1, 2, 3],\n \"decimal\": 1.234,\n \"timestamp1\": 1708695471,\n \"timestamp2\": 1708695471000,\n \"timestamp3\": 1708695471000000,\n \"num_one\": 1\n}", + "type": "object" + } + ] + }, + "includeOtherFields": true, + "options": {} + }, + "id": "a41dfb0d-38aa-42d2-b3e2-1854090bd319", + "name": "With long expression", + "type": "n8n-nodes-base.set", + "typeVersion": 3.3, + "position": [ + 1100, + 460 + ] + } + ], + "connections": { + "Edit Fields1": { + "main": [ + [ + { + "node": "Slack", + "type": "main", + "index": 0 + } + ] + ] + }, + "Slack": { + "main": [ + [ + { + "node": "With long expression", + "type": "main", + "index": 0 + } + ] + ] + }, + "Schedule Trigger": { + "main": [ + [ + { + "node": "Edit Fields1", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "pinData": {} +} diff --git a/cypress/pages/ndv.ts b/cypress/pages/ndv.ts index cae1fb47b0..4504552e26 100644 --- a/cypress/pages/ndv.ts +++ b/cypress/pages/ndv.ts @@ -20,7 +20,8 @@ export class NDV extends BasePage { outputDataContainer: () => this.getters.outputPanel().findChildByTestId('ndv-data-container'), outputDisplayMode: () => this.getters.outputPanel().findChildByTestId('ndv-run-data-display-mode').first(), - pinDataButton: () => cy.getByTestId('ndv-pin-data'), + pinDataButton: () => this.getters.outputPanel().findChildByTestId('ndv-pin-data'), + unpinDataLink: () => this.getters.outputPanel().findChildByTestId('ndv-unpin-data'), editPinnedDataButton: () => cy.getByTestId('ndv-edit-pinned-data'), pinnedDataEditor: () => this.getters.outputPanel().find('.cm-editor .cm-scroller .cm-content'), runDataPaneHeader: () => cy.getByTestId('run-data-pane-header'), @@ -63,6 +64,7 @@ export class NDV extends BasePage { nodeRenameInput: () => cy.getByTestId('node-rename-input'), executePrevious: () => cy.getByTestId('execute-previous-node'), httpRequestNotice: () => cy.getByTestId('node-parameters-http-notice'), + nodeCredentialsLabel: () => cy.getByTestId('credentials-label'), nthParam: (n: number) => cy.getByTestId('node-parameters').find('.parameter-item').eq(n), inputRunSelector: () => this.getters.inputPanel().findChildByTestId('run-selector'), inputLinkRun: () => this.getters.inputPanel().findChildByTestId('link-run'), @@ -130,8 +132,9 @@ export class NDV extends BasePage { codeEditorFullscreenButton: () => cy.getByTestId('code-editor-fullscreen-button'), codeEditorDialog: () => cy.getByTestId('code-editor-fullscreen'), codeEditorFullscreen: () => this.getters.codeEditorDialog().find('.cm-content'), - nodeRunSuccessIndicator: () => cy.getByTestId('node-run-info-success'), - nodeRunErrorIndicator: () => cy.getByTestId('node-run-info-danger'), + nodeRunTooltipIndicator: () => cy.getByTestId('node-run-info'), + nodeRunSuccessIndicator: () => cy.getByTestId('node-run-status-success'), + nodeRunErrorIndicator: () => cy.getByTestId('node-run-status-danger'), nodeRunErrorMessage: () => cy.getByTestId('node-error-message'), nodeRunErrorDescription: () => cy.getByTestId('node-error-description'), fixedCollectionParameter: (paramName: string) => @@ -146,6 +149,9 @@ export class NDV extends BasePage { pinData: () => { this.getters.pinDataButton().click({ force: true }); }, + unPinData: () => { + this.getters.unpinDataLink().click({ force: true }); + }, editPinnedData: () => { this.getters.editPinnedDataButton().click(); }, diff --git a/cypress/pages/workflow.ts b/cypress/pages/workflow.ts index 89186ee34e..cd1e7d9462 100644 --- a/cypress/pages/workflow.ts +++ b/cypress/pages/workflow.ts @@ -2,7 +2,7 @@ import { BasePage } from './base'; import { NodeCreator } from './features/node-creator'; import { META_KEY } from '../constants'; import { getVisibleSelect } from '../utils'; -import { getUniqueWorkflowName } from '../utils/workflowUtils'; +import { getUniqueWorkflowName, isCanvasV2 } from '../utils/workflowUtils'; const nodeCreator = new NodeCreator(); export class WorkflowPage extends BasePage { @@ -27,7 +27,11 @@ export class WorkflowPage extends BasePage { nodeCreatorSearchBar: () => cy.getByTestId('node-creator-search-bar'), nodeCreatorPlusButton: () => cy.getByTestId('node-creator-plus-button'), canvasPlusButton: () => cy.getByTestId('canvas-plus-button'), - canvasNodes: () => cy.getByTestId('canvas-node'), + canvasNodes: () => + cy.ifCanvasVersion( + () => cy.getByTestId('canvas-node'), + () => cy.getByTestId('canvas-node').not('[data-node-type="n8n-nodes-internal.addNodes"]'), + ), canvasNodeByName: (nodeName: string) => this.getters.canvasNodes().filter(`:contains(${nodeName})`), nodeIssuesByName: (nodeName: string) => @@ -37,6 +41,17 @@ export class WorkflowPage extends BasePage { .should('have.length.greaterThan', 0) .findChildByTestId('node-issues'), getEndpointSelector: (type: 'input' | 'output' | 'plus', nodeName: string, index = 0) => { + if (isCanvasV2()) { + if (type === 'input') { + return `[data-test-id="canvas-node-input-handle"][data-node-name="${nodeName}"][data-handle-index="${index}"]`; + } + if (type === 'output') { + return `[data-test-id="canvas-node-output-handle"][data-node-name="${nodeName}"][data-handle-index="${index}"]`; + } + if (type === 'plus') { + return `[data-test-id="canvas-node-output-handle"][data-node-name="${nodeName}"][data-handle-index="${index}"] [data-test-id="canvas-handle-plus"] .clickable`; + } + } return `[data-endpoint-name='${nodeName}'][data-endpoint-type='${type}'][data-input-index='${index}']`; }, canvasNodeInputEndpointByName: (nodeName: string, index = 0) => { @@ -46,7 +61,15 @@ export class WorkflowPage extends BasePage { return cy.get(this.getters.getEndpointSelector('output', nodeName, index)); }, canvasNodePlusEndpointByName: (nodeName: string, index = 0) => { - return cy.get(this.getters.getEndpointSelector('plus', nodeName, index)); + return cy.ifCanvasVersion( + () => cy.get(this.getters.getEndpointSelector('plus', nodeName, index)), + () => + cy + .get( + `[data-test-id="canvas-node-output-handle"][data-node-name="${nodeName}"] [data-test-id="canvas-handle-plus"] .clickable`, + ) + .eq(index), + ); }, activatorSwitch: () => cy.getByTestId('workflow-activate-switch'), workflowMenu: () => cy.getByTestId('workflow-menu'), @@ -56,13 +79,29 @@ export class WorkflowPage extends BasePage { expressionModalInput: () => cy.getByTestId('expression-modal-input').find('[role=textbox]'), expressionModalOutput: () => cy.getByTestId('expression-modal-output'), - nodeViewRoot: () => cy.getByTestId('node-view-root'), + nodeViewRoot: () => + cy.ifCanvasVersion( + () => cy.getByTestId('node-view-root'), + () => this.getters.nodeView(), + ), copyPasteInput: () => cy.getByTestId('hidden-copy-paste'), - nodeConnections: () => cy.get('.jtk-connector'), + nodeConnections: () => + cy.ifCanvasVersion( + () => cy.get('.jtk-connector'), + () => cy.getByTestId('edge-label-wrapper'), + ), zoomToFitButton: () => cy.getByTestId('zoom-to-fit'), nodeEndpoints: () => cy.get('.jtk-endpoint-connected'), - disabledNodes: () => cy.get('.node-box.disabled'), - selectedNodes: () => this.getters.canvasNodes().filter('.jtk-drag-selected'), + disabledNodes: () => + cy.ifCanvasVersion( + () => cy.get('.node-box.disabled'), + () => cy.get('[data-test-id="canvas-trigger-node"][class*="disabled"]'), + ), + selectedNodes: () => + cy.ifCanvasVersion( + () => this.getters.canvasNodes().filter('.jtk-drag-selected'), + () => this.getters.canvasNodes().parent().filter('.selected'), + ), // Workflow menu items workflowMenuItemDuplicate: () => cy.getByTestId('workflow-menu-item-duplicate'), workflowMenuItemDownload: () => cy.getByTestId('workflow-menu-item-download'), @@ -92,8 +131,21 @@ export class WorkflowPage extends BasePage { shareButton: () => cy.getByTestId('workflow-share-button'), duplicateWorkflowModal: () => cy.getByTestId('duplicate-modal'), - nodeViewBackground: () => cy.getByTestId('node-view-background'), - nodeView: () => cy.getByTestId('node-view'), + nodeViewBackground: () => + cy.ifCanvasVersion( + () => cy.getByTestId('node-view-background'), + () => cy.getByTestId('canvas'), + ), + nodeView: () => + cy.ifCanvasVersion( + () => cy.getByTestId('node-view'), + () => cy.get('[data-test-id="canvas-wrapper"]'), + ), + canvasViewport: () => + cy.ifCanvasVersion( + () => cy.getByTestId('node-view'), + () => cy.get('.vue-flow__transformationpane.vue-flow__container'), + ), inlineExpressionEditorInput: () => cy.getByTestId('inline-expression-editor-input').find('[role=textbox]'), inlineExpressionEditorOutput: () => cy.getByTestId('inline-expression-editor-output'), @@ -115,12 +167,26 @@ export class WorkflowPage extends BasePage { ndvParameters: () => cy.getByTestId('parameter-item'), nodeCredentialsLabel: () => cy.getByTestId('credentials-label'), getConnectionBetweenNodes: (sourceNodeName: string, targetNodeName: string) => - cy.get( - `.jtk-connector[data-source-node="${sourceNodeName}"][data-target-node="${targetNodeName}"]`, + cy.ifCanvasVersion( + () => + cy.get( + `.jtk-connector[data-source-node="${sourceNodeName}"][data-target-node="${targetNodeName}"]`, + ), + () => + cy.get( + `[data-test-id="edge-label-wrapper"][data-source-node-name="${sourceNodeName}"][data-target-node-name="${targetNodeName}"]`, + ), ), getConnectionActionsBetweenNodes: (sourceNodeName: string, targetNodeName: string) => - cy.get( - `.connection-actions[data-source-node="${sourceNodeName}"][data-target-node="${targetNodeName}"]`, + cy.ifCanvasVersion( + () => + cy.get( + `.connection-actions[data-source-node="${sourceNodeName}"][data-target-node="${targetNodeName}"]`, + ), + () => + cy.get( + `[data-test-id="edge-label-wrapper"][data-source-node-name="${sourceNodeName}"][data-target-node-name="${targetNodeName}"] [data-test-id="canvas-edge-toolbar"]`, + ), ), addStickyButton: () => cy.getByTestId('add-sticky-button'), stickies: () => cy.getByTestId('sticky'), @@ -128,6 +194,18 @@ export class WorkflowPage extends BasePage { workflowHistoryButton: () => cy.getByTestId('workflow-history-button'), colors: () => cy.getByTestId('color'), contextMenuAction: (action: string) => cy.getByTestId(`context-menu-item-${action}`), + getNodeLeftPosition: (element: JQuery) => { + if (isCanvasV2()) { + return parseFloat(element.parent().css('transform').split(',')[4]); + } + return parseFloat(element.css('left')); + }, + getNodeTopPosition: (element: JQuery) => { + if (isCanvasV2()) { + return parseFloat(element.parent().css('transform').split(',')[5]); + } + return parseFloat(element.css('top')); + }, }; actions = { @@ -332,7 +410,7 @@ export class WorkflowPage extends BasePage { pinchToZoom: (steps: number, mode: 'zoomIn' | 'zoomOut' = 'zoomIn') => { cy.window().then((win) => { // Pinch-to-zoom simulates a 'wheel' event with ctrlKey: true (same as zooming by scrolling) - this.getters.nodeViewBackground().trigger('wheel', { + this.getters.nodeView().trigger('wheel', { force: true, bubbles: true, ctrlKey: true, @@ -391,9 +469,12 @@ export class WorkflowPage extends BasePage { action?: string, ) => { this.getters.getConnectionBetweenNodes(sourceNodeName, targetNodeName).first().realHover(); - this.getters - .getConnectionActionsBetweenNodes(sourceNodeName, targetNodeName) - .find('.add') + const connectionsBetweenNodes = () => + this.getters.getConnectionActionsBetweenNodes(sourceNodeName, targetNodeName); + cy.ifCanvasVersion( + () => connectionsBetweenNodes().find('.add'), + () => connectionsBetweenNodes().get('[data-test-id="add-connection-button"]'), + ) .first() .click({ force: true }); @@ -401,9 +482,12 @@ export class WorkflowPage extends BasePage { }, deleteNodeBetweenNodes: (sourceNodeName: string, targetNodeName: string) => { this.getters.getConnectionBetweenNodes(sourceNodeName, targetNodeName).first().realHover(); - this.getters - .getConnectionActionsBetweenNodes(sourceNodeName, targetNodeName) - .find('.delete') + const connectionsBetweenNodes = () => + this.getters.getConnectionActionsBetweenNodes(sourceNodeName, targetNodeName); + cy.ifCanvasVersion( + () => connectionsBetweenNodes().find('.delete'), + () => connectionsBetweenNodes().get('[data-test-id="delete-connection-button"]'), + ) .first() .click({ force: true }); }, diff --git a/cypress/support/commands.ts b/cypress/support/commands.ts index 35f100fded..6cad68b34f 100644 --- a/cypress/support/commands.ts +++ b/cypress/support/commands.ts @@ -10,7 +10,7 @@ import { N8N_AUTH_COOKIE, } from '../constants'; import { WorkflowPage } from '../pages'; -import { getUniqueWorkflowName } from '../utils/workflowUtils'; +import { getUniqueWorkflowName, isCanvasV2 } from '../utils/workflowUtils'; Cypress.Commands.add('setAppDate', (targetDate: number | Date) => { cy.window().then((win) => { @@ -26,6 +26,10 @@ Cypress.Commands.add('getByTestId', (selector, ...args) => { return cy.get(`[data-test-id="${selector}"]`, ...args); }); +Cypress.Commands.add('ifCanvasVersion', (getterV1, getterV2) => { + return isCanvasV2() ? getterV2() : getterV1(); +}); + Cypress.Commands.add( 'createFixtureWorkflow', (fixtureKey: string, workflowName = getUniqueWorkflowName()) => { @@ -70,6 +74,10 @@ Cypress.Commands.add('signin', ({ email, password }) => { }) .then((response) => { Cypress.env('currentUserId', response.body.data.id); + + cy.window().then((win) => { + win.localStorage.setItem('NodeView.switcher.discovered', 'true'); // @TODO Remove this once the switcher is removed + }); }); }); }); diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index 0fe782499d..4261cb4b63 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -20,6 +20,11 @@ beforeEach(() => { win.localStorage.setItem('N8N_THEME', 'light'); win.localStorage.setItem('N8N_AUTOCOMPLETE_ONBOARDED', 'true'); win.localStorage.setItem('N8N_MAPPING_ONBOARDED', 'true'); + + const nodeViewVersion = Cypress.env('NODE_VIEW_VERSION'); + if (nodeViewVersion) { + win.localStorage.setItem('NodeView.version', nodeViewVersion); + } }); cy.intercept('GET', '/rest/settings', (req) => { diff --git a/cypress/support/index.ts b/cypress/support/index.ts index a5f1caf5b2..2fd1faeb22 100644 --- a/cypress/support/index.ts +++ b/cypress/support/index.ts @@ -28,6 +28,7 @@ declare global { selector: string, ...args: Array | undefined> ): Chainable>; + ifCanvasVersion(getterV1: () => T1, getterV2: () => T2): T1 | T2; findChildByTestId(childTestId: string): Chainable>; /** * Creates a workflow from the given fixture and optionally renames it. diff --git a/cypress/utils/workflowUtils.ts b/cypress/utils/workflowUtils.ts index 5001dbe1b6..0c91a097bd 100644 --- a/cypress/utils/workflowUtils.ts +++ b/cypress/utils/workflowUtils.ts @@ -3,3 +3,7 @@ import { nanoid } from 'nanoid'; export function getUniqueWorkflowName(workflowNamePrefix?: string) { return workflowNamePrefix ? `${workflowNamePrefix} ${nanoid(12)}` : nanoid(12); } + +export function isCanvasV2() { + return Cypress.env('NODE_VIEW_VERSION') === 2; +} diff --git a/package.json b/package.json index ee888f53dd..d25f0a13b2 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "n8n-monorepo", - "version": "1.63.0", + "version": "1.64.0", "private": true, "engines": { "node": ">=20.15", diff --git a/packages/@n8n/api-types/package.json b/packages/@n8n/api-types/package.json index e2614bcf68..0c4440eb6b 100644 --- a/packages/@n8n/api-types/package.json +++ b/packages/@n8n/api-types/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/api-types", - "version": "0.4.0", + "version": "0.5.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/benchmark/scenarios/binary-data/binary-data.script.js b/packages/@n8n/benchmark/scenarios/binary-data/binary-data.script.js index c10e667cbb..28edfdf9ec 100644 --- a/packages/@n8n/benchmark/scenarios/binary-data/binary-data.script.js +++ b/packages/@n8n/benchmark/scenarios/binary-data/binary-data.script.js @@ -15,6 +15,12 @@ export default function () { const res = http.post(`${apiBaseUrl}/webhook/binary-files-benchmark`, data); + if (res.status !== 200) { + console.error( + `Invalid response. Received status ${res.status}. Body: ${JSON.stringify(res.body)}`, + ); + } + check(res, { 'is status 200': (r) => r.status === 200, 'has correct content type': (r) => diff --git a/packages/@n8n/benchmark/scenarios/http-node/http-node.script.js b/packages/@n8n/benchmark/scenarios/http-node/http-node.script.js index b391982259..4ecee9d1bd 100644 --- a/packages/@n8n/benchmark/scenarios/http-node/http-node.script.js +++ b/packages/@n8n/benchmark/scenarios/http-node/http-node.script.js @@ -6,6 +6,12 @@ const apiBaseUrl = __ENV.API_BASE_URL; export default function () { const res = http.post(`${apiBaseUrl}/webhook/benchmark-http-node`); + if (res.status !== 200) { + console.error( + `Invalid response. Received status ${res.status}. Body: ${JSON.stringify(res.body)}`, + ); + } + check(res, { 'is status 200': (r) => r.status === 200, 'http requests were OK': (r) => { diff --git a/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js index 74cef4f441..b2fd8eb315 100644 --- a/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js +++ b/packages/@n8n/benchmark/scenarios/js-code-node/js-code-node.script.js @@ -5,6 +5,13 @@ const apiBaseUrl = __ENV.API_BASE_URL; export default function () { const res = http.post(`${apiBaseUrl}/webhook/code-node-benchmark`, {}); + + if (res.status !== 200) { + console.error( + `Invalid response. Received status ${res.status}. Body: ${JSON.stringify(res.body)}`, + ); + } + check(res, { 'is status 200': (r) => r.status === 200, 'has items in response': (r) => { diff --git a/packages/@n8n/benchmark/scenarios/set-node-expressions/set-node-expressions.script.js b/packages/@n8n/benchmark/scenarios/set-node-expressions/set-node-expressions.script.js index 4bea17eb9f..9564fcc53c 100644 --- a/packages/@n8n/benchmark/scenarios/set-node-expressions/set-node-expressions.script.js +++ b/packages/@n8n/benchmark/scenarios/set-node-expressions/set-node-expressions.script.js @@ -5,6 +5,13 @@ const apiBaseUrl = __ENV.API_BASE_URL; export default function () { const res = http.post(`${apiBaseUrl}/webhook/set-expressions-benchmark`, {}); + + if (res.status !== 200) { + console.error( + `Invalid response. Received status ${res.status}. Body: ${JSON.stringify(res.body)}`, + ); + } + check(res, { 'is status 200': (r) => r.status === 200, }); diff --git a/packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.manifest.json b/packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.manifest.json index 9c68908eef..2113c73ec9 100644 --- a/packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.manifest.json +++ b/packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.manifest.json @@ -3,5 +3,5 @@ "name": "SingleWebhook", "description": "A single webhook trigger that responds with a 200 status code", "scenarioData": { "workflowFiles": ["single-webhook.json"] }, - "scriptPath": "single-webhook.script.ts" + "scriptPath": "single-webhook.script.js" } diff --git a/packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.script.ts b/packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.script.js similarity index 63% rename from packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.script.ts rename to packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.script.js index 72e2563cbe..41facc8aeb 100644 --- a/packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.script.ts +++ b/packages/@n8n/benchmark/scenarios/single-webhook/single-webhook.script.js @@ -5,6 +5,13 @@ const apiBaseUrl = __ENV.API_BASE_URL; export default function () { const res = http.get(`${apiBaseUrl}/webhook/single-webhook`); + + if (res.status !== 200) { + console.error( + `Invalid response. Received status ${res.status}. Body: ${JSON.stringify(res.body)}`, + ); + } + check(res, { 'is status 200': (r) => r.status === 200, }); diff --git a/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/docker-compose.yml b/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/docker-compose.yml index ca3ad9c23d..c686f581b3 100644 --- a/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/docker-compose.yml +++ b/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/docker-compose.yml @@ -176,7 +176,7 @@ services: # Load balancer that acts as an entry point for n8n n8n: - image: nginx:latest + image: nginx:1.27.2 ports: - '5678:80' volumes: diff --git a/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/nginx.conf b/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/nginx.conf index 86100f8c50..142da7416e 100644 --- a/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/nginx.conf +++ b/packages/@n8n/benchmark/scripts/n8n-setups/scaling-multi-main/nginx.conf @@ -3,6 +3,7 @@ events {} http { client_max_body_size 50M; access_log off; + error_log /dev/stderr warn; upstream backend { server n8n_main1:5678; diff --git a/packages/@n8n/benchmark/scripts/run-in-cloud.mjs b/packages/@n8n/benchmark/scripts/run-in-cloud.mjs index c61c0901d4..35e90bdee5 100755 --- a/packages/@n8n/benchmark/scripts/run-in-cloud.mjs +++ b/packages/@n8n/benchmark/scripts/run-in-cloud.mjs @@ -78,12 +78,6 @@ async function runBenchmarksOnVm(config, benchmarkEnv) { const bootstrapScriptPath = path.join(scriptsDir, 'bootstrap.sh'); await sshClient.ssh(`chmod a+x ${bootstrapScriptPath} && ${bootstrapScriptPath}`); - // Benchmarking the VM - const vmBenchmarkScriptPath = path.join(scriptsDir, 'vm-benchmark.sh'); - await sshClient.ssh(`chmod a+x ${vmBenchmarkScriptPath} && ${vmBenchmarkScriptPath}`, { - verbose: true, - }); - // Give some time for the VM to be ready await sleep(1000); diff --git a/packages/@n8n/benchmark/scripts/vm-benchmark.sh b/packages/@n8n/benchmark/scripts/vm-benchmark.sh deleted file mode 100644 index 13b7eb2b1a..0000000000 --- a/packages/@n8n/benchmark/scripts/vm-benchmark.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -# Install fio -DEBIAN_FRONTEND=noninteractive sudo apt-get -y install fio > /dev/null - -# Run the disk benchmark -fio --name=rand_rw --ioengine=libaio --rw=randrw --rwmixread=70 --bs=4k --numjobs=4 --size=1G --runtime=30 --directory=/n8n --group_reporting - -# Remove files -sudo rm /n8n/rand_rw.* - -# Uninstall fio -DEBIAN_FRONTEND=noninteractive sudo apt-get -y remove fio > /dev/null diff --git a/packages/@n8n/benchmark/src/test-execution/scenario-runner.ts b/packages/@n8n/benchmark/src/test-execution/scenario-runner.ts index 84d1d8b096..def841ccf5 100644 --- a/packages/@n8n/benchmark/src/test-execution/scenario-runner.ts +++ b/packages/@n8n/benchmark/src/test-execution/scenario-runner.ts @@ -1,3 +1,5 @@ +import { sleep } from 'zx'; + import { AuthenticatedN8nApiClient } from '@/n8n-api-client/authenticated-n8n-api-client'; import type { N8nApiClient } from '@/n8n-api-client/n8n-api-client'; import type { ScenarioDataFileLoader } from '@/scenario/scenario-data-loader'; @@ -47,6 +49,10 @@ export class ScenarioRunner { const testData = await this.dataLoader.loadDataForScenario(scenario); await testDataImporter.importTestScenarioData(testData.workflows); + // Wait for 1s before executing the scenario to ensure that the workflows are activated. + // In multi-main mode it can take some time before the workflow becomes active. + await sleep(1000); + console.log('Executing scenario script'); await this.k6Executor.executeTestScenario(scenario, { scenarioRunName, diff --git a/packages/@n8n/chat/package.json b/packages/@n8n/chat/package.json index 24d6cf6f1c..5dc5881181 100644 --- a/packages/@n8n/chat/package.json +++ b/packages/@n8n/chat/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/chat", - "version": "0.28.0", + "version": "0.29.0", "scripts": { "dev": "pnpm run storybook", "build": "pnpm build:vite && pnpm build:bundle", diff --git a/packages/@n8n/config/package.json b/packages/@n8n/config/package.json index 10c8cbcf5b..627e9b7ef3 100644 --- a/packages/@n8n/config/package.json +++ b/packages/@n8n/config/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/config", - "version": "1.13.0", + "version": "1.14.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/config/src/configs/generic.config.ts b/packages/@n8n/config/src/configs/generic.config.ts new file mode 100644 index 0000000000..f6960b2415 --- /dev/null +++ b/packages/@n8n/config/src/configs/generic.config.ts @@ -0,0 +1,15 @@ +import { Config, Env } from '../decorators'; + +@Config +export class GenericConfig { + /** Default timezone for the n8n instance. Can be overridden on a per-workflow basis. */ + @Env('GENERIC_TIMEZONE') + timezone: string = 'America/New_York'; + + @Env('N8N_RELEASE_TYPE') + releaseChannel: 'stable' | 'beta' | 'nightly' | 'dev' = 'dev'; + + /** Grace period (in seconds) to wait for components to shut down before process exit. */ + @Env('N8N_GRACEFUL_SHUTDOWN_TIMEOUT') + gracefulShutdownTimeout: number = 30; +} diff --git a/packages/@n8n/config/src/configs/multi-main-setup.config.ts b/packages/@n8n/config/src/configs/multi-main-setup.config.ts new file mode 100644 index 0000000000..e3599c1d55 --- /dev/null +++ b/packages/@n8n/config/src/configs/multi-main-setup.config.ts @@ -0,0 +1,16 @@ +import { Config, Env } from '../decorators'; + +@Config +export class MultiMainSetupConfig { + /** Whether to enable multi-main setup (if licensed) for scaling mode. */ + @Env('N8N_MULTI_MAIN_SETUP_ENABLED') + enabled: boolean = false; + + /** Time to live (in seconds) for leader key in multi-main setup. */ + @Env('N8N_MULTI_MAIN_SETUP_KEY_TTL') + ttl: number = 10; + + /** Interval (in seconds) for leader check in multi-main setup. */ + @Env('N8N_MULTI_MAIN_SETUP_CHECK_INTERVAL') + interval: number = 3; +} diff --git a/packages/@n8n/config/src/configs/scaling-mode.config.ts b/packages/@n8n/config/src/configs/scaling-mode.config.ts index 05ee6b4841..f202440a5b 100644 --- a/packages/@n8n/config/src/configs/scaling-mode.config.ts +++ b/packages/@n8n/config/src/configs/scaling-mode.config.ts @@ -82,10 +82,6 @@ class BullConfig { @Nested redis: RedisConfig; - /** How often (in seconds) to poll the Bull queue to identify executions finished during a Redis crash. `0` to disable. May increase Redis traffic significantly. */ - @Env('QUEUE_RECOVERY_INTERVAL') - queueRecoveryInterval: number = 60; // watchdog interval - /** @deprecated How long (in seconds) a worker must wait for active executions to finish before exiting. Use `N8N_GRACEFUL_SHUTDOWN_TIMEOUT` instead */ @Env('QUEUE_WORKER_TIMEOUT') gracefulShutdownTimeout: number = 30; diff --git a/packages/@n8n/config/src/index.ts b/packages/@n8n/config/src/index.ts index 9044ffa0fa..b76e5c455c 100644 --- a/packages/@n8n/config/src/index.ts +++ b/packages/@n8n/config/src/index.ts @@ -5,7 +5,9 @@ import { EndpointsConfig } from './configs/endpoints.config'; import { EventBusConfig } from './configs/event-bus.config'; import { ExternalSecretsConfig } from './configs/external-secrets.config'; import { ExternalStorageConfig } from './configs/external-storage.config'; +import { GenericConfig } from './configs/generic.config'; import { LoggingConfig } from './configs/logging.config'; +import { MultiMainSetupConfig } from './configs/multi-main-setup.config'; import { NodesConfig } from './configs/nodes.config'; import { PublicApiConfig } from './configs/public-api.config'; import { TaskRunnersConfig } from './configs/runners.config'; @@ -93,4 +95,10 @@ export class GlobalConfig { @Nested taskRunners: TaskRunnersConfig; + + @Nested + multiMainSetup: MultiMainSetupConfig; + + @Nested + generic: GenericConfig; } diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index 56f3bc6de7..af40e7a8e1 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -211,7 +211,6 @@ describe('GlobalConfig', () => { clusterNodes: '', tls: false, }, - queueRecoveryInterval: 60, gracefulShutdownTimeout: 30, prefix: 'bull', settings: { @@ -246,6 +245,16 @@ describe('GlobalConfig', () => { }, scopes: [], }, + multiMainSetup: { + enabled: false, + ttl: 10, + interval: 3, + }, + generic: { + timezone: 'America/New_York', + releaseChannel: 'dev', + gracefulShutdownTimeout: 30, + }, }; it('should use all default values when no env variables are defined', () => { diff --git a/packages/@n8n/json-schema-to-zod/.eslintrc.js b/packages/@n8n/json-schema-to-zod/.eslintrc.js new file mode 100644 index 0000000000..03caaf4930 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/.eslintrc.js @@ -0,0 +1,21 @@ +const sharedOptions = require('@n8n_io/eslint-config/shared'); + +/** + * @type {import('@types/eslint').ESLint.ConfigData} + */ +module.exports = { + extends: ['@n8n_io/eslint-config/node'], + + ...sharedOptions(__dirname), + + ignorePatterns: ['jest.config.js'], + + rules: { + 'unicorn/filename-case': ['error', { case: 'kebabCase' }], + '@typescript-eslint/no-duplicate-imports': 'off', + 'import/no-cycle': 'off', + 'n8n-local-rules/no-plain-errors': 'off', + + complexity: 'error', + }, +}; diff --git a/packages/@n8n/json-schema-to-zod/.gitignore b/packages/@n8n/json-schema-to-zod/.gitignore new file mode 100644 index 0000000000..d11ff827d5 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/.gitignore @@ -0,0 +1,4 @@ +node_modules +dist +coverage +test/output diff --git a/packages/@n8n/json-schema-to-zod/.npmignore b/packages/@n8n/json-schema-to-zod/.npmignore new file mode 100644 index 0000000000..3aeebeb66b --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/.npmignore @@ -0,0 +1,3 @@ +src +tsconfig* +test diff --git a/packages/@n8n/json-schema-to-zod/LICENSE b/packages/@n8n/json-schema-to-zod/LICENSE new file mode 100644 index 0000000000..aa24f46da6 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) 2024, n8n +Copyright (c) 2021, Stefan Terdell + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/packages/@n8n/json-schema-to-zod/README.md b/packages/@n8n/json-schema-to-zod/README.md new file mode 100644 index 0000000000..cb76a141b5 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/README.md @@ -0,0 +1,34 @@ +# Json-Schema-to-Zod + +A package to convert JSON schema (draft 4+) objects into Zod schemas in the form of Zod objects at runtime. + +## Installation + +```sh +npm install @n8n/json-schema-to-zod +``` + +### Simple example + +```typescript +import { jsonSchemaToZod } from "json-schema-to-zod"; + +const jsonSchema = { + type: "object", + properties: { + hello: { + type: "string", + }, + }, +}; + +const zodSchema = jsonSchemaToZod(myObject); +``` + +### Overriding a parser + +You can pass a function to the `overrideParser` option, which represents a function that receives the current schema node and the reference object, and should return a zod object when it wants to replace a default output. If the default output should be used for the node just return undefined. + +## Acknowledgements + +This is a fork of [`json-schema-to-zod`](https://github.com/StefanTerdell/json-schema-to-zod). diff --git a/packages/@n8n/json-schema-to-zod/jest.config.js b/packages/@n8n/json-schema-to-zod/jest.config.js new file mode 100644 index 0000000000..b8e98e8970 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/jest.config.js @@ -0,0 +1,5 @@ +/** @type {import('jest').Config} */ +module.exports = { + ...require('../../../jest.config'), + setupFilesAfterEnv: ['/test/extend-expect.ts'], +}; diff --git a/packages/@n8n/json-schema-to-zod/package.json b/packages/@n8n/json-schema-to-zod/package.json new file mode 100644 index 0000000000..8900213381 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/package.json @@ -0,0 +1,69 @@ +{ + "name": "@n8n/json-schema-to-zod", + "version": "1.0.0", + "description": "Converts JSON schema objects into Zod schemas", + "types": "./dist/types/index.d.ts", + "main": "./dist/cjs/index.js", + "module": "./dist/esm/index.js", + "exports": { + "import": { + "types": "./dist/types/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/types/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "scripts": { + "clean": "rimraf dist .turbo", + "typecheck": "tsc --noEmit", + "dev": "tsc -w", + "format": "biome format --write src", + "format:check": "biome ci src", + "lint": "eslint . --quiet", + "lintfix": "eslint . --fix", + "build:types": "tsc -p tsconfig.types.json", + "build:cjs": "tsc -p tsconfig.cjs.json && node postcjs.js", + "build:esm": "tsc -p tsconfig.esm.json && node postesm.js", + "build": "rimraf ./dist && pnpm run build:types && pnpm run build:cjs && pnpm run build:esm", + "dry": "pnpm run build && pnpm pub --dry-run", + "test": "jest", + "test:watch": "jest --watch" + }, + "keywords": [ + "zod", + "json", + "schema", + "converter", + "cli" + ], + "author": "Stefan Terdell", + "contributors": [ + "Chen (https://github.com/werifu)", + "Nuno Carduso (https://github.com/ncardoso-barracuda)", + "Lars Strojny (https://github.com/lstrojny)", + "Navtoj Chahal (https://github.com/navtoj)", + "Ben McCann (https://github.com/benmccann)", + "Dmitry Zakharov (https://github.com/DZakh)", + "Michel Turpin (https://github.com/grimly)", + "David Barratt (https://github.com/davidbarratt)", + "pevisscher (https://github.com/pevisscher)", + "Aidin Abedi (https://github.com/aidinabedi)", + "Brett Zamir (https://github.com/brettz9)", + "n8n (https://github.com/n8n-io)" + ], + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/n8n-io/n8n" + }, + "peerDependencies": { + "zod": "^3.0.0" + }, + "devDependencies": { + "@types/json-schema": "^7.0.15", + "@types/node": "^20.9.0", + "zod": "catalog:" + } +} diff --git a/packages/@n8n/json-schema-to-zod/postcjs.js b/packages/@n8n/json-schema-to-zod/postcjs.js new file mode 100644 index 0000000000..618aa03a96 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/postcjs.js @@ -0,0 +1 @@ +require('fs').writeFileSync('./dist/cjs/package.json', '{"type":"commonjs"}', 'utf-8'); diff --git a/packages/@n8n/json-schema-to-zod/postesm.js b/packages/@n8n/json-schema-to-zod/postesm.js new file mode 100644 index 0000000000..5235734d6c --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/postesm.js @@ -0,0 +1 @@ +require('fs').writeFileSync('./dist/esm/package.json', '{"type":"module"}', 'utf-8'); diff --git a/packages/@n8n/json-schema-to-zod/src/index.ts b/packages/@n8n/json-schema-to-zod/src/index.ts new file mode 100644 index 0000000000..10dae97784 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/index.ts @@ -0,0 +1,2 @@ +export type * from './types'; +export { jsonSchemaToZod } from './json-schema-to-zod.js'; diff --git a/packages/@n8n/json-schema-to-zod/src/json-schema-to-zod.ts b/packages/@n8n/json-schema-to-zod/src/json-schema-to-zod.ts new file mode 100644 index 0000000000..6f1c6a1315 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/json-schema-to-zod.ts @@ -0,0 +1,15 @@ +import type { z } from 'zod'; + +import { parseSchema } from './parsers/parse-schema'; +import type { JsonSchemaToZodOptions, JsonSchema } from './types'; + +export const jsonSchemaToZod = ( + schema: JsonSchema, + options: JsonSchemaToZodOptions = {}, +): T => { + return parseSchema(schema, { + path: [], + seen: new Map(), + ...options, + }) as T; +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-all-of.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-all-of.ts new file mode 100644 index 0000000000..be8fd2c7e5 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-all-of.ts @@ -0,0 +1,46 @@ +import { z } from 'zod'; + +import { parseSchema } from './parse-schema'; +import type { JsonSchemaObject, JsonSchema, Refs } from '../types'; +import { half } from '../utils/half'; + +const originalIndex = Symbol('Original index'); + +const ensureOriginalIndex = (arr: JsonSchema[]) => { + const newArr = []; + + for (let i = 0; i < arr.length; i++) { + const item = arr[i]; + if (typeof item === 'boolean') { + newArr.push(item ? { [originalIndex]: i } : { [originalIndex]: i, not: {} }); + } else if (originalIndex in item) { + return arr; + } else { + newArr.push({ ...item, [originalIndex]: i }); + } + } + + return newArr; +}; + +export function parseAllOf( + jsonSchema: JsonSchemaObject & { allOf: JsonSchema[] }, + refs: Refs, +): z.ZodTypeAny { + if (jsonSchema.allOf.length === 0) { + return z.never(); + } + + if (jsonSchema.allOf.length === 1) { + const item = jsonSchema.allOf[0]; + + return parseSchema(item, { + ...refs, + path: [...refs.path, 'allOf', (item as never)[originalIndex]], + }); + } + + const [left, right] = half(ensureOriginalIndex(jsonSchema.allOf)); + + return z.intersection(parseAllOf({ allOf: left }, refs), parseAllOf({ allOf: right }, refs)); +} diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-any-of.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-any-of.ts new file mode 100644 index 0000000000..73b19b1739 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-any-of.ts @@ -0,0 +1,19 @@ +import { z } from 'zod'; + +import { parseSchema } from './parse-schema'; +import type { JsonSchemaObject, JsonSchema, Refs } from '../types'; + +export const parseAnyOf = (jsonSchema: JsonSchemaObject & { anyOf: JsonSchema[] }, refs: Refs) => { + return jsonSchema.anyOf.length + ? jsonSchema.anyOf.length === 1 + ? parseSchema(jsonSchema.anyOf[0], { + ...refs, + path: [...refs.path, 'anyOf', 0], + }) + : z.union( + jsonSchema.anyOf.map((schema, i) => + parseSchema(schema, { ...refs, path: [...refs.path, 'anyOf', i] }), + ) as [z.ZodTypeAny, z.ZodTypeAny], + ) + : z.any(); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-array.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-array.ts new file mode 100644 index 0000000000..5e01473fd6 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-array.ts @@ -0,0 +1,34 @@ +import { z } from 'zod'; + +import { parseSchema } from './parse-schema'; +import type { JsonSchemaObject, Refs } from '../types'; +import { extendSchemaWithMessage } from '../utils/extend-schema'; + +export const parseArray = (jsonSchema: JsonSchemaObject & { type: 'array' }, refs: Refs) => { + if (Array.isArray(jsonSchema.items)) { + return z.tuple( + jsonSchema.items.map((v, i) => + parseSchema(v, { ...refs, path: [...refs.path, 'items', i] }), + ) as [z.ZodTypeAny], + ); + } + + let zodSchema = !jsonSchema.items + ? z.array(z.any()) + : z.array(parseSchema(jsonSchema.items, { ...refs, path: [...refs.path, 'items'] })); + + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'minItems', + (zs, minItems, errorMessage) => zs.min(minItems, errorMessage), + ); + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'maxItems', + (zs, maxItems, errorMessage) => zs.max(maxItems, errorMessage), + ); + + return zodSchema; +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-boolean.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-boolean.ts new file mode 100644 index 0000000000..be8e309e43 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-boolean.ts @@ -0,0 +1,7 @@ +import { z } from 'zod'; + +import type { JsonSchemaObject } from '../types'; + +export const parseBoolean = (_jsonSchema: JsonSchemaObject & { type: 'boolean' }) => { + return z.boolean(); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-const.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-const.ts new file mode 100644 index 0000000000..445523652d --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-const.ts @@ -0,0 +1,7 @@ +import { z } from 'zod'; + +import type { JsonSchemaObject, Serializable } from '../types'; + +export const parseConst = (jsonSchema: JsonSchemaObject & { const: Serializable }) => { + return z.literal(jsonSchema.const as z.Primitive); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-default.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-default.ts new file mode 100644 index 0000000000..d64bcf85c8 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-default.ts @@ -0,0 +1,7 @@ +import { z } from 'zod'; + +import type { JsonSchemaObject } from '../types'; + +export const parseDefault = (_jsonSchema: JsonSchemaObject) => { + return z.any(); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-enum.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-enum.ts new file mode 100644 index 0000000000..26385472cc --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-enum.ts @@ -0,0 +1,25 @@ +import { z } from 'zod'; + +import type { JsonSchemaObject, Serializable } from '../types'; + +export const parseEnum = (jsonSchema: JsonSchemaObject & { enum: Serializable[] }) => { + if (jsonSchema.enum.length === 0) { + return z.never(); + } + + if (jsonSchema.enum.length === 1) { + // union does not work when there is only one element + return z.literal(jsonSchema.enum[0] as z.Primitive); + } + + if (jsonSchema.enum.every((x) => typeof x === 'string')) { + return z.enum(jsonSchema.enum as [string]); + } + + return z.union( + jsonSchema.enum.map((x) => z.literal(x as z.Primitive)) as unknown as [ + z.ZodTypeAny, + z.ZodTypeAny, + ], + ); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-if-then-else.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-if-then-else.ts new file mode 100644 index 0000000000..7cb595a615 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-if-then-else.ts @@ -0,0 +1,31 @@ +import { z } from 'zod'; + +import { parseSchema } from './parse-schema'; +import type { JsonSchemaObject, JsonSchema, Refs } from '../types'; + +export const parseIfThenElse = ( + jsonSchema: JsonSchemaObject & { + if: JsonSchema; + then: JsonSchema; + else: JsonSchema; + }, + refs: Refs, +) => { + const $if = parseSchema(jsonSchema.if, { ...refs, path: [...refs.path, 'if'] }); + const $then = parseSchema(jsonSchema.then, { + ...refs, + path: [...refs.path, 'then'], + }); + const $else = parseSchema(jsonSchema.else, { + ...refs, + path: [...refs.path, 'else'], + }); + + return z.union([$then, $else]).superRefine((value, ctx) => { + const result = $if.safeParse(value).success ? $then.safeParse(value) : $else.safeParse(value); + + if (!result.success) { + result.error.errors.forEach((error) => ctx.addIssue(error)); + } + }); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-multiple-type.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-multiple-type.ts new file mode 100644 index 0000000000..65ff3c35b5 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-multiple-type.ts @@ -0,0 +1,16 @@ +import { z } from 'zod'; + +import { parseSchema } from './parse-schema'; +import type { JsonSchema, JsonSchemaObject, Refs } from '../types'; + +export const parseMultipleType = ( + jsonSchema: JsonSchemaObject & { type: string[] }, + refs: Refs, +) => { + return z.union( + jsonSchema.type.map((type) => parseSchema({ ...jsonSchema, type } as JsonSchema, refs)) as [ + z.ZodTypeAny, + z.ZodTypeAny, + ], + ); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-not.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-not.ts new file mode 100644 index 0000000000..219d32c8dd --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-not.ts @@ -0,0 +1,15 @@ +import { z } from 'zod'; + +import { parseSchema } from './parse-schema'; +import type { JsonSchemaObject, JsonSchema, Refs } from '../types'; + +export const parseNot = (jsonSchema: JsonSchemaObject & { not: JsonSchema }, refs: Refs) => { + return z.any().refine( + (value) => + !parseSchema(jsonSchema.not, { + ...refs, + path: [...refs.path, 'not'], + }).safeParse(value).success, + 'Invalid input: Should NOT be valid against schema', + ); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-null.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-null.ts new file mode 100644 index 0000000000..86dbfea439 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-null.ts @@ -0,0 +1,7 @@ +import { z } from 'zod'; + +import type { JsonSchemaObject } from '../types'; + +export const parseNull = (_jsonSchema: JsonSchemaObject & { type: 'null' }) => { + return z.null(); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-nullable.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-nullable.ts new file mode 100644 index 0000000000..cfc575e9c7 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-nullable.ts @@ -0,0 +1,10 @@ +import { parseSchema } from './parse-schema'; +import type { JsonSchemaObject, Refs } from '../types'; +import { omit } from '../utils/omit'; + +/** + * For compatibility with open api 3.0 nullable + */ +export const parseNullable = (jsonSchema: JsonSchemaObject & { nullable: true }, refs: Refs) => { + return parseSchema(omit(jsonSchema, 'nullable'), refs, true).nullable(); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-number.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-number.ts new file mode 100644 index 0000000000..504a453faf --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-number.ts @@ -0,0 +1,88 @@ +import { z } from 'zod'; + +import type { JsonSchemaObject } from '../types'; +import { extendSchemaWithMessage } from '../utils/extend-schema'; + +export const parseNumber = (jsonSchema: JsonSchemaObject & { type: 'number' | 'integer' }) => { + let zodSchema = z.number(); + + let isInteger = false; + if (jsonSchema.type === 'integer') { + isInteger = true; + zodSchema = extendSchemaWithMessage(zodSchema, jsonSchema, 'type', (zs, _, errorMsg) => + zs.int(errorMsg), + ); + } else if (jsonSchema.format === 'int64') { + isInteger = true; + zodSchema = extendSchemaWithMessage(zodSchema, jsonSchema, 'format', (zs, _, errorMsg) => + zs.int(errorMsg), + ); + } + + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'multipleOf', + (zs, multipleOf, errorMsg) => { + if (multipleOf === 1) { + if (isInteger) return zs; + + return zs.int(errorMsg); + } + + return zs.multipleOf(multipleOf, errorMsg); + }, + ); + + if (typeof jsonSchema.minimum === 'number') { + if (jsonSchema.exclusiveMinimum === true) { + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'minimum', + (zs, minimum, errorMsg) => zs.gt(minimum, errorMsg), + ); + } else { + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'minimum', + (zs, minimum, errorMsg) => zs.gte(minimum, errorMsg), + ); + } + } else if (typeof jsonSchema.exclusiveMinimum === 'number') { + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'exclusiveMinimum', + (zs, exclusiveMinimum, errorMsg) => zs.gt(exclusiveMinimum as number, errorMsg), + ); + } + + if (typeof jsonSchema.maximum === 'number') { + if (jsonSchema.exclusiveMaximum === true) { + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'maximum', + (zs, maximum, errorMsg) => zs.lt(maximum, errorMsg), + ); + } else { + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'maximum', + (zs, maximum, errorMsg) => zs.lte(maximum, errorMsg), + ); + } + } else if (typeof jsonSchema.exclusiveMaximum === 'number') { + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'exclusiveMaximum', + (zs, exclusiveMaximum, errorMsg) => zs.lt(exclusiveMaximum as number, errorMsg), + ); + } + + return zodSchema; +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-object.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-object.ts new file mode 100644 index 0000000000..6a87b7162b --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-object.ts @@ -0,0 +1,219 @@ +import * as z from 'zod'; + +import { parseAllOf } from './parse-all-of'; +import { parseAnyOf } from './parse-any-of'; +import { parseOneOf } from './parse-one-of'; +import { parseSchema } from './parse-schema'; +import type { JsonSchemaObject, Refs } from '../types'; +import { its } from '../utils/its'; + +function parseObjectProperties(objectSchema: JsonSchemaObject & { type: 'object' }, refs: Refs) { + if (!objectSchema.properties) { + return undefined; + } + + const propertyKeys = Object.keys(objectSchema.properties); + if (propertyKeys.length === 0) { + return z.object({}); + } + + const properties: Record = {}; + + for (const key of propertyKeys) { + const propJsonSchema = objectSchema.properties[key]; + + const propZodSchema = parseSchema(propJsonSchema, { + ...refs, + path: [...refs.path, 'properties', key], + }); + + const hasDefault = typeof propJsonSchema === 'object' && propJsonSchema.default !== undefined; + + const required = Array.isArray(objectSchema.required) + ? objectSchema.required.includes(key) + : typeof propJsonSchema === 'object' && propJsonSchema.required === true; + + const isOptional = !hasDefault && !required; + + properties[key] = isOptional ? propZodSchema.optional() : propZodSchema; + } + + return z.object(properties); +} + +export function parseObject( + objectSchema: JsonSchemaObject & { type: 'object' }, + refs: Refs, +): z.ZodTypeAny { + const hasPatternProperties = Object.keys(objectSchema.patternProperties ?? {}).length > 0; + + const propertiesSchema: + | z.ZodObject, 'strip', z.ZodTypeAny> + | undefined = parseObjectProperties(objectSchema, refs); + let zodSchema: z.ZodTypeAny | undefined = propertiesSchema; + + const additionalProperties = + objectSchema.additionalProperties !== undefined + ? parseSchema(objectSchema.additionalProperties, { + ...refs, + path: [...refs.path, 'additionalProperties'], + }) + : undefined; + + if (objectSchema.patternProperties) { + const parsedPatternProperties = Object.fromEntries( + Object.entries(objectSchema.patternProperties).map(([key, value]) => { + return [ + key, + parseSchema(value, { + ...refs, + path: [...refs.path, 'patternProperties', key], + }), + ]; + }), + ); + const patternPropertyValues = Object.values(parsedPatternProperties); + + if (propertiesSchema) { + if (additionalProperties) { + zodSchema = propertiesSchema.catchall( + z.union([...patternPropertyValues, additionalProperties] as [z.ZodTypeAny, z.ZodTypeAny]), + ); + } else if (Object.keys(parsedPatternProperties).length > 1) { + zodSchema = propertiesSchema.catchall( + z.union(patternPropertyValues as [z.ZodTypeAny, z.ZodTypeAny]), + ); + } else { + zodSchema = propertiesSchema.catchall(patternPropertyValues[0]); + } + } else { + if (additionalProperties) { + zodSchema = z.record( + z.union([...patternPropertyValues, additionalProperties] as [z.ZodTypeAny, z.ZodTypeAny]), + ); + } else if (patternPropertyValues.length > 1) { + zodSchema = z.record(z.union(patternPropertyValues as [z.ZodTypeAny, z.ZodTypeAny])); + } else { + zodSchema = z.record(patternPropertyValues[0]); + } + } + + const objectPropertyKeys = new Set(Object.keys(objectSchema.properties ?? {})); + zodSchema = zodSchema.superRefine((value: Record, ctx) => { + for (const key in value) { + let wasMatched = objectPropertyKeys.has(key); + + for (const patternPropertyKey in objectSchema.patternProperties) { + const regex = new RegExp(patternPropertyKey); + if (key.match(regex)) { + wasMatched = true; + const result = parsedPatternProperties[patternPropertyKey].safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + } + + if (!wasMatched && additionalProperties) { + const result = additionalProperties.safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: 'Invalid input: must match catchall schema', + params: { + issues: result.error.issues, + }, + }); + } + } + } + }); + } + + let output: z.ZodTypeAny; + if (propertiesSchema) { + if (hasPatternProperties) { + output = zodSchema!; + } else if (additionalProperties) { + if (additionalProperties instanceof z.ZodNever) { + output = propertiesSchema.strict(); + } else { + output = propertiesSchema.catchall(additionalProperties); + } + } else { + output = zodSchema!; + } + } else { + if (hasPatternProperties) { + output = zodSchema!; + } else if (additionalProperties) { + output = z.record(additionalProperties); + } else { + output = z.record(z.any()); + } + } + + if (its.an.anyOf(objectSchema)) { + output = output.and( + parseAnyOf( + { + ...objectSchema, + anyOf: objectSchema.anyOf.map((x) => + typeof x === 'object' && + !x.type && + (x.properties ?? x.additionalProperties ?? x.patternProperties) + ? { ...x, type: 'object' } + : x, + ), + }, + refs, + ), + ); + } + + if (its.a.oneOf(objectSchema)) { + output = output.and( + parseOneOf( + { + ...objectSchema, + oneOf: objectSchema.oneOf.map((x) => + typeof x === 'object' && + !x.type && + (x.properties ?? x.additionalProperties ?? x.patternProperties) + ? { ...x, type: 'object' } + : x, + ), + }, + refs, + ), + ); + } + + if (its.an.allOf(objectSchema)) { + output = output.and( + parseAllOf( + { + ...objectSchema, + allOf: objectSchema.allOf.map((x) => + typeof x === 'object' && + !x.type && + (x.properties ?? x.additionalProperties ?? x.patternProperties) + ? { ...x, type: 'object' } + : x, + ), + }, + refs, + ), + ); + } + + return output; +} diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-one-of.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-one-of.ts new file mode 100644 index 0000000000..10931a9675 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-one-of.ts @@ -0,0 +1,41 @@ +import { z } from 'zod'; + +import { parseSchema } from './parse-schema'; +import type { JsonSchemaObject, JsonSchema, Refs } from '../types'; + +export const parseOneOf = (jsonSchema: JsonSchemaObject & { oneOf: JsonSchema[] }, refs: Refs) => { + if (!jsonSchema.oneOf.length) { + return z.any(); + } + + if (jsonSchema.oneOf.length === 1) { + return parseSchema(jsonSchema.oneOf[0], { + ...refs, + path: [...refs.path, 'oneOf', 0], + }); + } + + return z.any().superRefine((x, ctx) => { + const schemas = jsonSchema.oneOf.map((schema, i) => + parseSchema(schema, { + ...refs, + path: [...refs.path, 'oneOf', i], + }), + ); + + const unionErrors = schemas.reduce( + (errors, schema) => + ((result) => (result.error ? [...errors, result.error] : errors))(schema.safeParse(x)), + [], + ); + + if (schemas.length - unionErrors.length !== 1) { + ctx.addIssue({ + path: ctx.path, + code: 'invalid_union', + unionErrors, + message: 'Invalid input: Should pass single schema', + }); + } + }); +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-schema.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-schema.ts new file mode 100644 index 0000000000..24818bf490 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-schema.ts @@ -0,0 +1,130 @@ +import * as z from 'zod'; + +import { parseAllOf } from './parse-all-of'; +import { parseAnyOf } from './parse-any-of'; +import { parseArray } from './parse-array'; +import { parseBoolean } from './parse-boolean'; +import { parseConst } from './parse-const'; +import { parseDefault } from './parse-default'; +import { parseEnum } from './parse-enum'; +import { parseIfThenElse } from './parse-if-then-else'; +import { parseMultipleType } from './parse-multiple-type'; +import { parseNot } from './parse-not'; +import { parseNull } from './parse-null'; +import { parseNullable } from './parse-nullable'; +import { parseNumber } from './parse-number'; +import { parseObject } from './parse-object'; +import { parseOneOf } from './parse-one-of'; +import { parseString } from './parse-string'; +import type { ParserSelector, Refs, JsonSchemaObject, JsonSchema } from '../types'; +import { its } from '../utils/its'; + +const addDescribes = (jsonSchema: JsonSchemaObject, zodSchema: z.ZodTypeAny): z.ZodTypeAny => { + if (jsonSchema.description) { + zodSchema = zodSchema.describe(jsonSchema.description); + } + + return zodSchema; +}; + +const addDefaults = (jsonSchema: JsonSchemaObject, zodSchema: z.ZodTypeAny): z.ZodTypeAny => { + if (jsonSchema.default !== undefined) { + zodSchema = zodSchema.default(jsonSchema.default); + } + + return zodSchema; +}; + +const addAnnotations = (jsonSchema: JsonSchemaObject, zodSchema: z.ZodTypeAny): z.ZodTypeAny => { + if (jsonSchema.readOnly) { + zodSchema = zodSchema.readonly(); + } + + return zodSchema; +}; + +const selectParser: ParserSelector = (schema, refs) => { + if (its.a.nullable(schema)) { + return parseNullable(schema, refs); + } else if (its.an.object(schema)) { + return parseObject(schema, refs); + } else if (its.an.array(schema)) { + return parseArray(schema, refs); + } else if (its.an.anyOf(schema)) { + return parseAnyOf(schema, refs); + } else if (its.an.allOf(schema)) { + return parseAllOf(schema, refs); + } else if (its.a.oneOf(schema)) { + return parseOneOf(schema, refs); + } else if (its.a.not(schema)) { + return parseNot(schema, refs); + } else if (its.an.enum(schema)) { + return parseEnum(schema); //<-- needs to come before primitives + } else if (its.a.const(schema)) { + return parseConst(schema); + } else if (its.a.multipleType(schema)) { + return parseMultipleType(schema, refs); + } else if (its.a.primitive(schema, 'string')) { + return parseString(schema); + } else if (its.a.primitive(schema, 'number') || its.a.primitive(schema, 'integer')) { + return parseNumber(schema); + } else if (its.a.primitive(schema, 'boolean')) { + return parseBoolean(schema); + } else if (its.a.primitive(schema, 'null')) { + return parseNull(schema); + } else if (its.a.conditional(schema)) { + return parseIfThenElse(schema, refs); + } else { + return parseDefault(schema); + } +}; + +export const parseSchema = ( + jsonSchema: JsonSchema, + refs: Refs = { seen: new Map(), path: [] }, + blockMeta?: boolean, +): z.ZodTypeAny => { + if (typeof jsonSchema !== 'object') return jsonSchema ? z.any() : z.never(); + + if (refs.parserOverride) { + const custom = refs.parserOverride(jsonSchema, refs); + + if (custom instanceof z.ZodType) { + return custom; + } + } + + let seen = refs.seen.get(jsonSchema); + + if (seen) { + if (seen.r !== undefined) { + return seen.r; + } + + if (refs.depth === undefined || seen.n >= refs.depth) { + return z.any(); + } + + seen.n += 1; + } else { + seen = { r: undefined, n: 0 }; + refs.seen.set(jsonSchema, seen); + } + + let parsedZodSchema = selectParser(jsonSchema, refs); + if (!blockMeta) { + if (!refs.withoutDescribes) { + parsedZodSchema = addDescribes(jsonSchema, parsedZodSchema); + } + + if (!refs.withoutDefaults) { + parsedZodSchema = addDefaults(jsonSchema, parsedZodSchema); + } + + parsedZodSchema = addAnnotations(jsonSchema, parsedZodSchema); + } + + seen.r = parsedZodSchema; + + return parsedZodSchema; +}; diff --git a/packages/@n8n/json-schema-to-zod/src/parsers/parse-string.ts b/packages/@n8n/json-schema-to-zod/src/parsers/parse-string.ts new file mode 100644 index 0000000000..ea2be63c30 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/parsers/parse-string.ts @@ -0,0 +1,58 @@ +import { z } from 'zod'; + +import type { JsonSchemaObject } from '../types'; +import { extendSchemaWithMessage } from '../utils/extend-schema'; + +export const parseString = (jsonSchema: JsonSchemaObject & { type: 'string' }) => { + let zodSchema = z.string(); + + zodSchema = extendSchemaWithMessage(zodSchema, jsonSchema, 'format', (zs, format, errorMsg) => { + switch (format) { + case 'email': + return zs.email(errorMsg); + case 'ip': + return zs.ip(errorMsg); + case 'ipv4': + return zs.ip({ version: 'v4', message: errorMsg }); + case 'ipv6': + return zs.ip({ version: 'v6', message: errorMsg }); + case 'uri': + return zs.url(errorMsg); + case 'uuid': + return zs.uuid(errorMsg); + case 'date-time': + return zs.datetime({ offset: true, message: errorMsg }); + case 'time': + return zs.time(errorMsg); + case 'date': + return zs.date(errorMsg); + case 'binary': + return zs.base64(errorMsg); + case 'duration': + return zs.duration(errorMsg); + default: + return zs; + } + }); + + zodSchema = extendSchemaWithMessage(zodSchema, jsonSchema, 'contentEncoding', (zs, _, errorMsg) => + zs.base64(errorMsg), + ); + zodSchema = extendSchemaWithMessage(zodSchema, jsonSchema, 'pattern', (zs, pattern, errorMsg) => + zs.regex(new RegExp(pattern), errorMsg), + ); + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'minLength', + (zs, minLength, errorMsg) => zs.min(minLength, errorMsg), + ); + zodSchema = extendSchemaWithMessage( + zodSchema, + jsonSchema, + 'maxLength', + (zs, maxLength, errorMsg) => zs.max(maxLength, errorMsg), + ); + + return zodSchema; +}; diff --git a/packages/@n8n/json-schema-to-zod/src/types.ts b/packages/@n8n/json-schema-to-zod/src/types.ts new file mode 100644 index 0000000000..bb342af230 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/types.ts @@ -0,0 +1,82 @@ +import type { ZodTypeAny } from 'zod'; + +export type Serializable = + | { [key: string]: Serializable } + | Serializable[] + | string + | number + | boolean + | null; + +export type JsonSchema = JsonSchemaObject | boolean; +export type JsonSchemaObject = { + // left permissive by design + type?: string | string[]; + + // object + properties?: { [key: string]: JsonSchema }; + additionalProperties?: JsonSchema; + unevaluatedProperties?: JsonSchema; + patternProperties?: { [key: string]: JsonSchema }; + minProperties?: number; + maxProperties?: number; + required?: string[] | boolean; + propertyNames?: JsonSchema; + + // array + items?: JsonSchema | JsonSchema[]; + additionalItems?: JsonSchema; + minItems?: number; + maxItems?: number; + uniqueItems?: boolean; + + // string + minLength?: number; + maxLength?: number; + pattern?: string; + format?: string; + + // number + minimum?: number; + maximum?: number; + exclusiveMinimum?: number | boolean; + exclusiveMaximum?: number | boolean; + multipleOf?: number; + + // unions + anyOf?: JsonSchema[]; + allOf?: JsonSchema[]; + oneOf?: JsonSchema[]; + + if?: JsonSchema; + then?: JsonSchema; + else?: JsonSchema; + + // shared + const?: Serializable; + enum?: Serializable[]; + + errorMessage?: { [key: string]: string | undefined }; + + description?: string; + default?: Serializable; + readOnly?: boolean; + not?: JsonSchema; + contentEncoding?: string; + nullable?: boolean; +}; + +export type ParserSelector = (schema: JsonSchemaObject, refs: Refs) => ZodTypeAny; +export type ParserOverride = (schema: JsonSchemaObject, refs: Refs) => ZodTypeAny | undefined; + +export type JsonSchemaToZodOptions = { + withoutDefaults?: boolean; + withoutDescribes?: boolean; + parserOverride?: ParserOverride; + depth?: number; +}; + +export type Refs = JsonSchemaToZodOptions & { + path: Array; + seen: Map; +}; diff --git a/packages/@n8n/json-schema-to-zod/src/utils/extend-schema.ts b/packages/@n8n/json-schema-to-zod/src/utils/extend-schema.ts new file mode 100644 index 0000000000..1fd0ed720b --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/utils/extend-schema.ts @@ -0,0 +1,23 @@ +import type { z } from 'zod'; + +import type { JsonSchemaObject } from '../types'; + +export function extendSchemaWithMessage< + TZod extends z.ZodTypeAny, + TJson extends JsonSchemaObject, + TKey extends keyof TJson, +>( + zodSchema: TZod, + jsonSchema: TJson, + key: TKey, + extend: (zodSchema: TZod, value: NonNullable, errorMessage?: string) => TZod, +) { + const value = jsonSchema[key]; + + if (value !== undefined) { + const errorMessage = jsonSchema.errorMessage?.[key as string]; + return extend(zodSchema, value as NonNullable, errorMessage); + } + + return zodSchema; +} diff --git a/packages/@n8n/json-schema-to-zod/src/utils/half.ts b/packages/@n8n/json-schema-to-zod/src/utils/half.ts new file mode 100644 index 0000000000..810776e6c2 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/utils/half.ts @@ -0,0 +1,3 @@ +export const half = (arr: T[]): [T[], T[]] => { + return [arr.slice(0, arr.length / 2), arr.slice(arr.length / 2)]; +}; diff --git a/packages/@n8n/json-schema-to-zod/src/utils/its.ts b/packages/@n8n/json-schema-to-zod/src/utils/its.ts new file mode 100644 index 0000000000..494c1f6372 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/utils/its.ts @@ -0,0 +1,57 @@ +import type { JsonSchema, JsonSchemaObject, Serializable } from '../types'; + +export const its = { + an: { + object: (x: JsonSchemaObject): x is JsonSchemaObject & { type: 'object' } => + x.type === 'object', + array: (x: JsonSchemaObject): x is JsonSchemaObject & { type: 'array' } => x.type === 'array', + anyOf: ( + x: JsonSchemaObject, + ): x is JsonSchemaObject & { + anyOf: JsonSchema[]; + } => x.anyOf !== undefined, + allOf: ( + x: JsonSchemaObject, + ): x is JsonSchemaObject & { + allOf: JsonSchema[]; + } => x.allOf !== undefined, + enum: ( + x: JsonSchemaObject, + ): x is JsonSchemaObject & { + enum: Serializable | Serializable[]; + } => x.enum !== undefined, + }, + a: { + nullable: (x: JsonSchemaObject): x is JsonSchemaObject & { nullable: true } => + // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-member-access + (x as any).nullable === true, + multipleType: (x: JsonSchemaObject): x is JsonSchemaObject & { type: string[] } => + Array.isArray(x.type), + not: ( + x: JsonSchemaObject, + ): x is JsonSchemaObject & { + not: JsonSchema; + } => x.not !== undefined, + const: ( + x: JsonSchemaObject, + ): x is JsonSchemaObject & { + const: Serializable; + } => x.const !== undefined, + primitive: ( + x: JsonSchemaObject, + p: T, + ): x is JsonSchemaObject & { type: T } => x.type === p, + conditional: ( + x: JsonSchemaObject, + ): x is JsonSchemaObject & { + if: JsonSchema; + then: JsonSchema; + else: JsonSchema; + } => Boolean('if' in x && x.if && 'then' in x && 'else' in x && x.then && x.else), + oneOf: ( + x: JsonSchemaObject, + ): x is JsonSchemaObject & { + oneOf: JsonSchema[]; + } => x.oneOf !== undefined, + }, +}; diff --git a/packages/@n8n/json-schema-to-zod/src/utils/omit.ts b/packages/@n8n/json-schema-to-zod/src/utils/omit.ts new file mode 100644 index 0000000000..af9d579fb6 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/src/utils/omit.ts @@ -0,0 +1,8 @@ +export const omit = (obj: T, ...keys: K[]): Omit => + Object.keys(obj).reduce((acc: Record, key) => { + if (!keys.includes(key as K)) { + acc[key] = obj[key as K]; + } + + return acc; + }, {}) as Omit; diff --git a/packages/@n8n/json-schema-to-zod/test/all.json b/packages/@n8n/json-schema-to-zod/test/all.json new file mode 100644 index 0000000000..f270ca3fa1 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/all.json @@ -0,0 +1,143 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "properties": { + "allOf": { + "allOf": [ + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + } + ] + }, + "anyOf": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + } + ] + }, + "oneOf": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + } + ] + }, + "array": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 2, + "maxItems": 3 + }, + "tuple": { + "type": "array", + "items": [ + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + } + ], + "minItems": 2, + "maxItems": 3 + }, + "const": { + "const": "xbox" + }, + "enum": { + "enum": ["ps4", "ps5"] + }, + "ifThenElse": { + "if": { + "type": "string" + }, + "then": { + "const": "x" + }, + "else": { + "enum": [1, 2, 3] + } + }, + "null": { + "type": "null" + }, + "multiple": { + "type": ["array", "boolean"] + }, + "objAdditionalTrue": { + "type": "object", + "properties": { + "x": { + "type": "string" + } + }, + "additionalProperties": true + }, + "objAdditionalFalse": { + "type": "object", + "properties": { + "x": { + "type": "string" + } + }, + "additionalProperties": false + }, + "objAdditionalNumber": { + "type": "object", + "properties": { + "x": { + "type": "string" + } + }, + "additionalProperties": { + "type": "number" + } + }, + "objAdditionalOnly": { + "type": "object", + "additionalProperties": { + "type": "number" + } + }, + "patternProps": { + "type": "object", + "patternProperties": { + "^x": { + "type": "string" + }, + "^y": { + "type": "number" + } + }, + "properties": { + "z": { + "type": "string" + } + }, + "additionalProperties": false + } + } +} diff --git a/packages/@n8n/json-schema-to-zod/test/extend-expect.ts b/packages/@n8n/json-schema-to-zod/test/extend-expect.ts new file mode 100644 index 0000000000..5196e43416 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/extend-expect.ts @@ -0,0 +1,16 @@ +import type { z } from 'zod'; + +expect.extend({ + toMatchZod(this: jest.MatcherContext, actual: z.ZodTypeAny, expected: z.ZodTypeAny) { + const actualSerialized = JSON.stringify(actual._def, null, 2); + const expectedSerialized = JSON.stringify(expected._def, null, 2); + const pass = this.equals(actualSerialized, expectedSerialized); + + return { + pass, + message: pass + ? () => `Expected ${actualSerialized} not to match ${expectedSerialized}` + : () => `Expected ${actualSerialized} to match ${expectedSerialized}`, + }; + }, +}); diff --git a/packages/@n8n/json-schema-to-zod/test/jest.d.ts b/packages/@n8n/json-schema-to-zod/test/jest.d.ts new file mode 100644 index 0000000000..dff5a5fa4c --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/jest.d.ts @@ -0,0 +1,5 @@ +namespace jest { + interface Matchers { + toMatchZod(expected: unknown): T; + } +} diff --git a/packages/@n8n/json-schema-to-zod/test/json-schema-to-zod.test.ts b/packages/@n8n/json-schema-to-zod/test/json-schema-to-zod.test.ts new file mode 100644 index 0000000000..5f383ae71b --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/json-schema-to-zod.test.ts @@ -0,0 +1,106 @@ +import type { JSONSchema4, JSONSchema6Definition, JSONSchema7Definition } from 'json-schema'; +import { z } from 'zod'; + +import { jsonSchemaToZod } from '../src'; + +describe('jsonSchemaToZod', () => { + test('should accept json schema 7 and 4', () => { + const schema = { type: 'string' } as unknown; + + expect(jsonSchemaToZod(schema as JSONSchema4)); + expect(jsonSchemaToZod(schema as JSONSchema6Definition)); + expect(jsonSchemaToZod(schema as JSONSchema7Definition)); + }); + + test('can exclude defaults', () => { + expect( + jsonSchemaToZod( + { + type: 'string', + default: 'foo', + }, + { withoutDefaults: true }, + ), + ).toMatchZod(z.string()); + }); + + test('should include describes', () => { + expect( + jsonSchemaToZod({ + type: 'string', + description: 'foo', + }), + ).toMatchZod(z.string().describe('foo')); + }); + + test('can exclude describes', () => { + expect( + jsonSchemaToZod( + { + type: 'string', + description: 'foo', + }, + { + withoutDescribes: true, + }, + ), + ).toMatchZod(z.string()); + }); + + test('will remove optionality if default is present', () => { + expect( + jsonSchemaToZod({ + type: 'object', + properties: { + prop: { + type: 'string', + default: 'def', + }, + }, + }), + ).toMatchZod(z.object({ prop: z.string().default('def') })); + }); + + test('will handle falsy defaults', () => { + expect( + jsonSchemaToZod({ + type: 'boolean', + default: false, + }), + ).toMatchZod(z.boolean().default(false)); + }); + + test('will ignore undefined as default', () => { + expect( + jsonSchemaToZod({ + type: 'null', + default: undefined, + }), + ).toMatchZod(z.null()); + }); + + test('should be possible to define a custom parser', () => { + expect( + jsonSchemaToZod( + { + allOf: [{ type: 'string' }, { type: 'number' }, { type: 'boolean', description: 'foo' }], + }, + { + parserOverride: (schema, refs) => { + if ( + refs.path.length === 2 && + refs.path[0] === 'allOf' && + refs.path[1] === 2 && + schema.type === 'boolean' && + schema.description === 'foo' + ) { + return z.null(); + } + + return undefined; + }, + }, + ), + ).toMatchZod(z.intersection(z.string(), z.intersection(z.number(), z.null()))); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-all-of.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-all-of.test.ts new file mode 100644 index 0000000000..546572255c --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-all-of.test.ts @@ -0,0 +1,48 @@ +import { z } from 'zod'; + +import { parseAllOf } from '../../src/parsers/parse-all-of'; + +describe('parseAllOf', () => { + test('should create never if empty', () => { + expect( + parseAllOf( + { + allOf: [], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.never()); + }); + + test('should handle true values', () => { + expect( + parseAllOf( + { + allOf: [{ type: 'string' }, true], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.intersection(z.string(), z.any())); + }); + + test('should handle false values', () => { + expect( + parseAllOf( + { + allOf: [{ type: 'string' }, false], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z.intersection( + z.string(), + z + .any() + .refine( + (value) => !z.any().safeParse(value).success, + 'Invalid input: Should NOT be valid against schema', + ), + ), + ); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-any-of.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-any-of.test.ts new file mode 100644 index 0000000000..72abcab047 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-any-of.test.ts @@ -0,0 +1,31 @@ +import { z } from 'zod'; + +import { parseAnyOf } from '../../src/parsers/parse-any-of'; + +describe('parseAnyOf', () => { + test('should create a union from two or more schemas', () => { + expect( + parseAnyOf( + { + anyOf: [ + { + type: 'string', + }, + { type: 'number' }, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.union([z.string(), z.number()])); + }); + + test('should extract a single schema', () => { + expect(parseAnyOf({ anyOf: [{ type: 'string' }] }, { path: [], seen: new Map() })).toMatchZod( + z.string(), + ); + }); + + test('should return z.any() if array is empty', () => { + expect(parseAnyOf({ anyOf: [] }, { path: [], seen: new Map() })).toMatchZod(z.any()); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-array.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-array.test.ts new file mode 100644 index 0000000000..b96df3958c --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-array.test.ts @@ -0,0 +1,68 @@ +import { z } from 'zod'; + +import { parseArray } from '../../src/parsers/parse-array'; + +describe('parseArray', () => { + test('should create tuple with items array', () => { + expect( + parseArray( + { + type: 'array', + items: [ + { + type: 'string', + }, + { + type: 'number', + }, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.tuple([z.string(), z.number()])); + }); + + test('should create array with items object', () => { + expect( + parseArray( + { + type: 'array', + items: { + type: 'string', + }, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.array(z.string())); + }); + + test('should create min for minItems', () => { + expect( + parseArray( + { + type: 'array', + minItems: 2, + items: { + type: 'string', + }, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.array(z.string()).min(2)); + }); + + test('should create max for maxItems', () => { + expect( + parseArray( + { + type: 'array', + maxItems: 2, + items: { + type: 'string', + }, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.array(z.string()).max(2)); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-const.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-const.test.ts new file mode 100644 index 0000000000..b4f7a5afd5 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-const.test.ts @@ -0,0 +1,13 @@ +import { z } from 'zod'; + +import { parseConst } from '../../src/parsers/parse-const'; + +describe('parseConst', () => { + test('should handle falsy constants', () => { + expect( + parseConst({ + const: false, + }), + ).toMatchZod(z.literal(false)); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-enum.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-enum.test.ts new file mode 100644 index 0000000000..2ed00e3def --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-enum.test.ts @@ -0,0 +1,36 @@ +import { z } from 'zod'; + +import { parseEnum } from '../../src/parsers/parse-enum'; + +describe('parseEnum', () => { + test('should create never with empty enum', () => { + expect( + parseEnum({ + enum: [], + }), + ).toMatchZod(z.never()); + }); + + test('should create literal with single item enum', () => { + expect( + parseEnum({ + enum: ['someValue'], + }), + ).toMatchZod(z.literal('someValue')); + }); + + test('should create enum array with string enums', () => { + expect( + parseEnum({ + enum: ['someValue', 'anotherValue'], + }), + ).toMatchZod(z.enum(['someValue', 'anotherValue'])); + }); + test('should create union with mixed enums', () => { + expect( + parseEnum({ + enum: ['someValue', 57], + }), + ).toMatchZod(z.union([z.literal('someValue'), z.literal(57)])); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-not.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-not.test.ts new file mode 100644 index 0000000000..f1bc1d7ab2 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-not.test.ts @@ -0,0 +1,25 @@ +import { z } from 'zod'; + +import { parseNot } from '../../src/parsers/parse-not'; + +describe('parseNot', () => { + test('parseNot', () => { + expect( + parseNot( + { + not: { + type: 'string', + }, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z + .any() + .refine( + (value) => !z.string().safeParse(value).success, + 'Invalid input: Should NOT be valid against schema', + ), + ); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-nullable.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-nullable.test.ts new file mode 100644 index 0000000000..046c3f41a1 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-nullable.test.ts @@ -0,0 +1,18 @@ +import { z } from 'zod'; + +import { parseSchema } from '../../src/parsers/parse-schema'; + +describe('parseNullable', () => { + test('parseSchema should not add default twice', () => { + expect( + parseSchema( + { + type: 'string', + nullable: true, + default: null, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.string().nullable().default(null)); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-number.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-number.test.ts new file mode 100644 index 0000000000..7b3cdf4ded --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-number.test.ts @@ -0,0 +1,83 @@ +import { z } from 'zod'; + +import { parseNumber } from '../../src/parsers/parse-number'; + +describe('parseNumber', () => { + test('should handle integer', () => { + expect( + parseNumber({ + type: 'integer', + }), + ).toMatchZod(z.number().int()); + + expect( + parseNumber({ + type: 'integer', + multipleOf: 1, + }), + ).toMatchZod(z.number().int()); + + expect( + parseNumber({ + type: 'number', + multipleOf: 1, + }), + ).toMatchZod(z.number().int()); + }); + + test('should handle maximum with exclusiveMinimum', () => { + expect( + parseNumber({ + type: 'number', + exclusiveMinimum: true, + minimum: 2, + }), + ).toMatchZod(z.number().gt(2)); + }); + + test('should handle maximum with exclusiveMinimum', () => { + expect( + parseNumber({ + type: 'number', + minimum: 2, + }), + ).toMatchZod(z.number().gte(2)); + }); + + test('should handle maximum with exclusiveMaximum', () => { + expect( + parseNumber({ + type: 'number', + exclusiveMaximum: true, + maximum: 2, + }), + ).toMatchZod(z.number().lt(2)); + }); + + test('should handle numeric exclusiveMaximum', () => { + expect( + parseNumber({ + type: 'number', + exclusiveMaximum: 2, + }), + ).toMatchZod(z.number().lt(2)); + }); + + test('should accept errorMessage', () => { + expect( + parseNumber({ + type: 'number', + format: 'int64', + exclusiveMinimum: 0, + maximum: 2, + multipleOf: 2, + errorMessage: { + format: 'ayy', + multipleOf: 'lmao', + exclusiveMinimum: 'deez', + maximum: 'nuts', + }, + }), + ).toMatchZod(z.number().int('ayy').multipleOf(2, 'lmao').gt(0, 'deez').lte(2, 'nuts')); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-object.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-object.test.ts new file mode 100644 index 0000000000..00a2e194cd --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-object.test.ts @@ -0,0 +1,904 @@ +/* eslint-disable n8n-local-rules/no-skipped-tests */ +import type { JSONSchema7 } from 'json-schema'; +import { z, ZodError } from 'zod'; + +import { parseObject } from '../../src/parsers/parse-object'; + +describe('parseObject', () => { + test('should handle with missing properties', () => { + expect( + parseObject( + { + type: 'object', + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.record(z.any())); + }); + + test('should handle with empty properties', () => { + expect( + parseObject( + { + type: 'object', + properties: {}, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.object({})); + }); + + test('With properties - should handle optional and required properties', () => { + expect( + parseObject( + { + type: 'object', + required: ['myRequiredString'], + properties: { + myOptionalString: { + type: 'string', + }, + myRequiredString: { + type: 'string', + }, + }, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z.object({ myOptionalString: z.string().optional(), myRequiredString: z.string() }), + ); + }); + + test('With properties - should handle additionalProperties when set to false', () => { + expect( + parseObject( + { + type: 'object', + required: ['myString'], + properties: { + myString: { + type: 'string', + }, + }, + additionalProperties: false, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.object({ myString: z.string() }).strict()); + }); + + test('With properties - should handle additionalProperties when set to true', () => { + expect( + parseObject( + { + type: 'object', + required: ['myString'], + properties: { + myString: { + type: 'string', + }, + }, + additionalProperties: true, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.object({ myString: z.string() }).catchall(z.any())); + }); + + test('With properties - should handle additionalProperties when provided a schema', () => { + expect( + parseObject( + { + type: 'object', + required: ['myString'], + properties: { + myString: { + type: 'string', + }, + }, + additionalProperties: { type: 'number' }, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.object({ myString: z.string() }).catchall(z.number())); + }); + + test('Without properties - should handle additionalProperties when set to false', () => { + expect( + parseObject( + { + type: 'object', + additionalProperties: false, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.record(z.never())); + }); + + test('Without properties - should handle additionalProperties when set to true', () => { + expect( + parseObject( + { + type: 'object', + additionalProperties: true, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.record(z.any())); + }); + + test('Without properties - should handle additionalProperties when provided a schema', () => { + expect( + parseObject( + { + type: 'object', + additionalProperties: { type: 'number' }, + }, + + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.record(z.number())); + }); + + test('Without properties - should include falsy defaults', () => { + expect( + parseObject( + { + type: 'object', + properties: { + s: { + type: 'string', + default: '', + }, + }, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.object({ s: z.string().default('') })); + }); + + test('eh', () => { + expect( + parseObject( + { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + }, + anyOf: [ + { + required: ['b'], + properties: { + b: { + type: 'string', + }, + }, + }, + { + required: ['c'], + properties: { + c: { + type: 'string', + }, + }, + }, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z + .object({ a: z.string() }) + .and(z.union([z.object({ b: z.string() }), z.object({ c: z.string() })])), + ); + + expect( + parseObject( + { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + }, + anyOf: [ + { + required: ['b'], + properties: { + b: { + type: 'string', + }, + }, + }, + {}, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.object({ a: z.string() }).and(z.union([z.object({ b: z.string() }), z.any()]))); + + expect( + parseObject( + { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + }, + oneOf: [ + { + required: ['b'], + properties: { + b: { + type: 'string', + }, + }, + }, + { + required: ['c'], + properties: { + c: { + type: 'string', + }, + }, + }, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z.object({ a: z.string() }).and( + z.any().superRefine((x, ctx) => { + const schemas = [z.object({ b: z.string() }), z.object({ c: z.string() })]; + const errors = schemas.reduce( + (errors, schema) => + ((result) => (result.error ? [...errors, result.error] : errors))( + schema.safeParse(x), + ), + [], + ); + if (schemas.length - errors.length !== 1) { + ctx.addIssue({ + path: ctx.path, + code: 'invalid_union', + unionErrors: errors, + message: 'Invalid input: Should pass single schema', + }); + } + }), + ), + ); + + expect( + parseObject( + { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + }, + oneOf: [ + { + required: ['b'], + properties: { + b: { + type: 'string', + }, + }, + }, + {}, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z.object({ a: z.string() }).and( + z.any().superRefine((x, ctx) => { + const schemas = [z.object({ b: z.string() }), z.any()]; + const errors = schemas.reduce( + (errors, schema) => + ((result) => (result.error ? [...errors, result.error] : errors))( + schema.safeParse(x), + ), + [], + ); + if (schemas.length - errors.length !== 1) { + ctx.addIssue({ + path: ctx.path, + code: 'invalid_union', + unionErrors: errors, + message: 'Invalid input: Should pass single schema', + }); + } + }), + ), + ); + + expect( + parseObject( + { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + }, + allOf: [ + { + required: ['b'], + properties: { + b: { + type: 'string', + }, + }, + }, + { + required: ['c'], + properties: { + c: { + type: 'string', + }, + }, + }, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z + .object({ a: z.string() }) + .and(z.intersection(z.object({ b: z.string() }), z.object({ c: z.string() }))), + ); + + expect( + parseObject( + { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + }, + allOf: [ + { + required: ['b'], + properties: { + b: { + type: 'string', + }, + }, + }, + {}, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z.object({ a: z.string() }).and(z.intersection(z.object({ b: z.string() }), z.any())), + ); + }); + + const run = (zodSchema: z.ZodTypeAny, data: unknown) => zodSchema.safeParse(data); + + test('Functional tests - run', () => { + expect(run(z.string(), 'hello')).toEqual({ + success: true, + data: 'hello', + }); + }); + + test('Functional tests - properties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + b: { + type: 'number', + }, + }, + }; + + const expected = z.object({ a: z.string(), b: z.number().optional() }); + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(result).toMatchZod(expected); + + expect(run(result, { a: 'hello' })).toEqual({ + success: true, + data: { + a: 'hello', + }, + }); + + expect(run(result, { a: 'hello', b: 123 })).toEqual({ + success: true, + data: { + a: 'hello', + b: 123, + }, + }); + + expect(run(result, { b: 'hello', x: true })).toEqual({ + success: false, + error: new ZodError([ + { + code: 'invalid_type', + expected: 'string', + received: 'undefined', + path: ['a'], + message: 'Required', + }, + { + code: 'invalid_type', + expected: 'number', + received: 'string', + path: ['b'], + message: 'Expected number, received string', + }, + ]), + }); + }); + + test('Functional tests - properties and additionalProperties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + b: { + type: 'number', + }, + }, + additionalProperties: { type: 'boolean' }, + }; + + const expected = z.object({ a: z.string(), b: z.number().optional() }).catchall(z.boolean()); + + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(result).toMatchZod(expected); + + expect(run(result, { b: 'hello', x: 'true' })).toEqual({ + success: false, + error: new ZodError([ + { + code: 'invalid_type', + expected: 'string', + received: 'undefined', + path: ['a'], + message: 'Required', + }, + { + code: 'invalid_type', + expected: 'number', + received: 'string', + path: ['b'], + message: 'Expected number, received string', + }, + { + code: 'invalid_type', + expected: 'boolean', + received: 'string', + path: ['x'], + message: 'Expected boolean, received string', + }, + ]), + }); + }); + + test('Functional tests - properties and single-item patternProperties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + b: { + type: 'number', + }, + }, + patternProperties: { + '\\.': { type: 'array' }, + }, + }; + + const expected = z + .object({ a: z.string(), b: z.number().optional() }) + .catchall(z.array(z.any())) + .superRefine((value, ctx) => { + for (const key in value) { + if (key.match(new RegExp('\\\\.'))) { + const result = z.array(z.any()).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + } + }); + + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(result).toMatchZod(expected); + + expect(run(result, { a: 'a', b: 2, '.': [] })).toEqual({ + success: true, + data: { a: 'a', b: 2, '.': [] }, + }); + + expect(run(result, { a: 'a', b: 2, '.': '[]' })).toEqual({ + success: false, + error: new ZodError([ + { + code: 'invalid_type', + expected: 'array', + received: 'string', + path: ['.'], + message: 'Expected array, received string', + }, + ]), + }); + }); + + test('Functional tests - properties, additionalProperties and patternProperties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + b: { + type: 'number', + }, + }, + additionalProperties: { type: 'boolean' }, + patternProperties: { + '\\.': { type: 'array' }, + '\\,': { type: 'array', minItems: 1 }, + }, + }; + + const expected = z + .object({ a: z.string(), b: z.number().optional() }) + .catchall(z.union([z.array(z.any()), z.array(z.any()).min(1), z.boolean()])) + .superRefine((value, ctx) => { + for (const key in value) { + let evaluated = ['a', 'b'].includes(key); + if (key.match(new RegExp('\\\\.'))) { + evaluated = true; + const result = z.array(z.any()).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + if (key.match(new RegExp('\\\\,'))) { + evaluated = true; + const result = z.array(z.any()).min(1).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + if (!evaluated) { + const result = z.boolean().safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: 'Invalid input: must match catchall schema', + params: { + issues: result.error.issues, + }, + }); + } + } + } + }); + + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(result).toMatchZod(expected); + }); + + test('Functional tests - additionalProperties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + additionalProperties: { type: 'boolean' }, + }; + + const expected = z.record(z.boolean()); + + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(result).toMatchZod(expected); + }); + + test('Functional tests - additionalProperties and patternProperties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + additionalProperties: { type: 'boolean' }, + patternProperties: { + '\\.': { type: 'array' }, + '\\,': { type: 'array', minItems: 1 }, + }, + }; + + const expected = z + .record(z.union([z.array(z.any()), z.array(z.any()).min(1), z.boolean()])) + .superRefine((value, ctx) => { + for (const key in value) { + let evaluated = false; + if (key.match(new RegExp('\\\\.'))) { + evaluated = true; + const result = z.array(z.any()).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + if (key.match(new RegExp('\\\\,'))) { + evaluated = true; + const result = z.array(z.any()).min(1).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + if (!evaluated) { + const result = z.boolean().safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: 'Invalid input: must match catchall schema', + params: { + issues: result.error.issues, + }, + }); + } + } + } + }); + + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(result).toMatchZod(expected); + + expect(run(result, { x: true, '.': [], ',': [] })).toEqual({ + success: false, + error: new ZodError([ + { + path: [','], + code: 'custom', + message: 'Invalid input: Key matching regex /,/ must match schema', + params: { + issues: [ + { + code: 'too_small', + minimum: 1, + type: 'array', + inclusive: true, + exact: false, + message: 'Array must contain at least 1 element(s)', + path: [], + }, + ], + }, + }, + ]), + }); + }); + + test('Functional tests - single-item patternProperties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + patternProperties: { + '\\.': { type: 'array' }, + }, + }; + + const expected = z.record(z.array(z.any())).superRefine((value, ctx) => { + for (const key in value) { + if (key.match(new RegExp('\\\\.'))) { + const result = z.array(z.any()).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + } + }); + + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(result).toMatchZod(expected); + }); + + test('Functional tests - patternProperties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + patternProperties: { + '\\.': { type: 'array' }, + '\\,': { type: 'array', minItems: 1 }, + }, + }; + + const expected = z + .record(z.union([z.array(z.any()), z.array(z.any()).min(1)])) + .superRefine((value, ctx) => { + for (const key in value) { + if (key.match(new RegExp('\\.'))) { + const result = z.array(z.any()).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + if (key.match(new RegExp('\\,'))) { + const result = z.array(z.any()).min(1).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + } + }); + + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(run(result, { '.': [] })).toEqual({ + success: true, + data: { '.': [] }, + }); + + expect(run(result, { ',': [] })).toEqual({ + success: false, + error: new ZodError([ + { + path: [','], + code: 'custom', + message: 'Invalid input: Key matching regex /,/ must match schema', + params: { + issues: [ + { + code: 'too_small', + minimum: 1, + type: 'array', + inclusive: true, + exact: false, + message: 'Array must contain at least 1 element(s)', + path: [], + }, + ], + }, + }, + ]), + }); + + expect(result).toMatchZod(expected); + }); + + test('Functional tests - patternProperties and properties', () => { + const schema: JSONSchema7 & { type: 'object' } = { + type: 'object', + required: ['a'], + properties: { + a: { + type: 'string', + }, + b: { + type: 'number', + }, + }, + patternProperties: { + '\\.': { type: 'array' }, + '\\,': { type: 'array', minItems: 1 }, + }, + }; + + const expected = z + .object({ a: z.string(), b: z.number().optional() }) + .catchall(z.union([z.array(z.any()), z.array(z.any()).min(1)])) + .superRefine((value, ctx) => { + for (const key in value) { + if (key.match(new RegExp('\\.'))) { + const result = z.array(z.any()).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + if (key.match(new RegExp('\\,'))) { + const result = z.array(z.any()).min(1).safeParse(value[key]); + if (!result.success) { + ctx.addIssue({ + path: [...ctx.path, key], + code: 'custom', + message: `Invalid input: Key matching regex /${key}/ must match schema`, + params: { + issues: result.error.issues, + }, + }); + } + } + } + }); + + const result = parseObject(schema, { path: [], seen: new Map() }); + + expect(result).toMatchZod(expected); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-one-of.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-one-of.test.ts new file mode 100644 index 0000000000..3295200576 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-one-of.test.ts @@ -0,0 +1,48 @@ +import { z } from 'zod'; + +import { parseOneOf } from '../../src/parsers/parse-one-of'; + +describe('parseOneOf', () => { + test('should create a union from two or more schemas', () => { + expect( + parseOneOf( + { + oneOf: [ + { + type: 'string', + }, + { type: 'number' }, + ], + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod( + z.any().superRefine((x, ctx) => { + const schemas = [z.string(), z.number()]; + const errors = schemas.reduce( + (errors, schema) => + ((result) => (result.error ? [...errors, result.error] : errors))(schema.safeParse(x)), + [], + ); + if (schemas.length - errors.length !== 1) { + ctx.addIssue({ + path: ctx.path, + code: 'invalid_union', + unionErrors: errors, + message: 'Invalid input: Should pass single schema', + }); + } + }), + ); + }); + + test('should extract a single schema', () => { + expect(parseOneOf({ oneOf: [{ type: 'string' }] }, { path: [], seen: new Map() })).toMatchZod( + z.string(), + ); + }); + + test('should return z.any() if array is empty', () => { + expect(parseOneOf({ oneOf: [] }, { path: [], seen: new Map() })).toMatchZod(z.any()); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-schema.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-schema.test.ts new file mode 100644 index 0000000000..c0f0899e28 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-schema.test.ts @@ -0,0 +1,113 @@ +import { z } from 'zod'; + +import { parseSchema } from '../../src/parsers/parse-schema'; + +describe('parseSchema', () => { + test('should be usable without providing refs', () => { + expect(parseSchema({ type: 'string' })).toMatchZod(z.string()); + }); + + test('should return a seen and processed ref', () => { + const seen = new Map(); + const schema = { + type: 'object', + properties: { + prop: { + type: 'string', + }, + }, + }; + expect(parseSchema(schema, { seen, path: [] })); + expect(parseSchema(schema, { seen, path: [] })); + }); + + test('should be possible to describe a readonly schema', () => { + expect(parseSchema({ type: 'string', readOnly: true })).toMatchZod(z.string().readonly()); + }); + + test('should handle nullable', () => { + expect( + parseSchema( + { + type: 'string', + nullable: true, + }, + { path: [], seen: new Map() }, + ), + ).toMatchZod(z.string().nullable()); + }); + + test('should handle enum', () => { + expect(parseSchema({ enum: ['someValue', 57] })).toMatchZod( + z.union([z.literal('someValue'), z.literal(57)]), + ); + }); + + test('should handle multiple type', () => { + expect(parseSchema({ type: ['string', 'number'] })).toMatchZod( + z.union([z.string(), z.number()]), + ); + }); + + test('should handle if-then-else type', () => { + expect( + parseSchema({ + if: { type: 'string' }, + then: { type: 'number' }, + else: { type: 'boolean' }, + }), + ).toMatchZod( + z.union([z.number(), z.boolean()]).superRefine((value, ctx) => { + const result = z.string().safeParse(value).success + ? z.number().safeParse(value) + : z.boolean().safeParse(value); + if (!result.success) { + result.error.errors.forEach((error) => ctx.addIssue(error)); + } + }), + ); + }); + + test('should handle anyOf', () => { + expect( + parseSchema({ + anyOf: [ + { + type: 'string', + }, + { type: 'number' }, + ], + }), + ).toMatchZod(z.union([z.string(), z.number()])); + }); + + test('should handle oneOf', () => { + expect( + parseSchema({ + oneOf: [ + { + type: 'string', + }, + { type: 'number' }, + ], + }), + ).toMatchZod( + z.any().superRefine((x, ctx) => { + const schemas = [z.string(), z.number()]; + const errors = schemas.reduce( + (errors, schema) => + ((result) => (result.error ? [...errors, result.error] : errors))(schema.safeParse(x)), + [], + ); + if (schemas.length - errors.length !== 1) { + ctx.addIssue({ + path: ctx.path, + code: 'invalid_union', + unionErrors: errors, + message: 'Invalid input: Should pass single schema', + }); + } + }), + ); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/parsers/parse-string.test.ts b/packages/@n8n/json-schema-to-zod/test/parsers/parse-string.test.ts new file mode 100644 index 0000000000..5e53135c2d --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/parsers/parse-string.test.ts @@ -0,0 +1,152 @@ +import { z } from 'zod'; + +import { parseString } from '../../src/parsers/parse-string'; + +describe('parseString', () => { + const run = (schema: z.ZodString, data: unknown) => schema.safeParse(data); + + test('DateTime format', () => { + const datetime = '2018-11-13T20:20:39Z'; + + const code = parseString({ + type: 'string', + format: 'date-time', + errorMessage: { format: 'hello' }, + }); + + expect(code).toMatchZod(z.string().datetime({ offset: true, message: 'hello' })); + + expect(run(code, datetime)).toEqual({ success: true, data: datetime }); + }); + + test('email', () => { + expect( + parseString({ + type: 'string', + format: 'email', + }), + ).toMatchZod(z.string().email()); + }); + + test('ip', () => { + expect( + parseString({ + type: 'string', + format: 'ip', + }), + ).toMatchZod(z.string().ip()); + + expect( + parseString({ + type: 'string', + format: 'ipv6', + }), + ).toMatchZod(z.string().ip({ version: 'v6' })); + }); + + test('uri', () => { + expect( + parseString({ + type: 'string', + format: 'uri', + }), + ).toMatchZod(z.string().url()); + }); + + test('uuid', () => { + expect( + parseString({ + type: 'string', + format: 'uuid', + }), + ).toMatchZod(z.string().uuid()); + }); + + test('time', () => { + expect( + parseString({ + type: 'string', + format: 'time', + }), + ).toMatchZod(z.string().time()); + }); + + test('date', () => { + expect( + parseString({ + type: 'string', + format: 'date', + }), + ).toMatchZod(z.string().date()); + }); + + test('duration', () => { + expect( + parseString({ + type: 'string', + format: 'duration', + }), + ).toMatchZod(z.string().duration()); + }); + + test('base64', () => { + expect( + parseString({ + type: 'string', + contentEncoding: 'base64', + }), + ).toMatchZod(z.string().base64()); + + expect( + parseString({ + type: 'string', + contentEncoding: 'base64', + errorMessage: { + contentEncoding: 'x', + }, + }), + ).toMatchZod(z.string().base64('x')); + + expect( + parseString({ + type: 'string', + format: 'binary', + }), + ).toMatchZod(z.string().base64()); + + expect( + parseString({ + type: 'string', + format: 'binary', + errorMessage: { + format: 'x', + }, + }), + ).toMatchZod(z.string().base64('x')); + }); + + test('should accept errorMessage', () => { + expect( + parseString({ + type: 'string', + format: 'ipv4', + pattern: 'x', + minLength: 1, + maxLength: 2, + errorMessage: { + format: 'ayy', + pattern: 'lmao', + minLength: 'deez', + maxLength: 'nuts', + }, + }), + ).toMatchZod( + z + .string() + .ip({ version: 'v4', message: 'ayy' }) + .regex(new RegExp('x'), 'lmao') + .min(1, 'deez') + .max(2, 'nuts'), + ); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/utils/half.test.ts b/packages/@n8n/json-schema-to-zod/test/utils/half.test.ts new file mode 100644 index 0000000000..afd9ee85fd --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/utils/half.test.ts @@ -0,0 +1,15 @@ +import { half } from '../../src/utils/half'; + +describe('half', () => { + test('half', () => { + const [a, b] = half(['A', 'B', 'C', 'D', 'E']); + + if (1 < 0) { + // type should be string + a[0].endsWith(''); + } + + expect(a).toEqual(['A', 'B']); + expect(b).toEqual(['C', 'D', 'E']); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/test/utils/omit.test.ts b/packages/@n8n/json-schema-to-zod/test/utils/omit.test.ts new file mode 100644 index 0000000000..f5f51313f5 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/test/utils/omit.test.ts @@ -0,0 +1,27 @@ +import { omit } from '../../src/utils/omit'; + +describe('omit', () => { + test('omit', () => { + const input = { + a: true, + b: true, + }; + + omit( + input, + 'b', + // @ts-expect-error + 'c', + ); + + const output = omit(input, 'b'); + + // @ts-expect-error + output.b; + + expect(output.a).toBe(true); + + // @ts-expect-error + expect(output.b).toBeUndefined(); + }); +}); diff --git a/packages/@n8n/json-schema-to-zod/tsconfig.cjs.json b/packages/@n8n/json-schema-to-zod/tsconfig.cjs.json new file mode 100644 index 0000000000..2a17765d74 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/tsconfig.cjs.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "target": "es2020", + "module": "commonjs", + "outDir": "dist/cjs", + "strict": true, + "skipLibCheck": true, + "esModuleInterop": true + }, + "include": ["src"] +} diff --git a/packages/@n8n/json-schema-to-zod/tsconfig.esm.json b/packages/@n8n/json-schema-to-zod/tsconfig.esm.json new file mode 100644 index 0000000000..21f4508341 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/tsconfig.esm.json @@ -0,0 +1,12 @@ +{ + "compilerOptions": { + "target": "es2020", + "module": "es2020", + "moduleResolution": "node", + "outDir": "dist/esm", + "strict": true, + "skipLibCheck": true, + "esModuleInterop": true + }, + "include": ["src"] +} diff --git a/packages/@n8n/json-schema-to-zod/tsconfig.json b/packages/@n8n/json-schema-to-zod/tsconfig.json new file mode 100644 index 0000000000..f8e6508e74 --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": ["../../../tsconfig.json"], + "compilerOptions": { + "rootDir": ".", + "baseUrl": "src", + "strict": true, + "noEmit": true, + "skipLibCheck": true, + "esModuleInterop": true + }, + "include": ["src/**/*.ts", "test/**/*.ts"] +} diff --git a/packages/@n8n/json-schema-to-zod/tsconfig.types.json b/packages/@n8n/json-schema-to-zod/tsconfig.types.json new file mode 100644 index 0000000000..63451df65a --- /dev/null +++ b/packages/@n8n/json-schema-to-zod/tsconfig.types.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "declaration": true, + "emitDeclarationOnly": true, + "outDir": "dist/types", + "strict": true, + "skipLibCheck": true, + "esModuleInterop": true + }, + "include": ["src"] +} diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts index a14e4195c9..295eaa9296 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts @@ -8,7 +8,7 @@ import type { INodeTypeDescription, INodeProperties, } from 'n8n-workflow'; -import { promptTypeOptions, textInput } from '../../../utils/descriptions'; + import { conversationalAgentProperties } from './agents/ConversationalAgent/description'; import { conversationalAgentExecute } from './agents/ConversationalAgent/execute'; import { openAiFunctionsAgentProperties } from './agents/OpenAiFunctionsAgent/description'; @@ -21,6 +21,7 @@ import { sqlAgentAgentProperties } from './agents/SqlAgent/description'; import { sqlAgentAgentExecute } from './agents/SqlAgent/execute'; import { toolsAgentProperties } from './agents/ToolsAgent/description'; import { toolsAgentExecute } from './agents/ToolsAgent/execute'; +import { promptTypeOptions, textInput } from '../../../utils/descriptions'; // Function used in the inputs expression to figure out which inputs to // display based on the agent type @@ -351,6 +352,23 @@ export class Agent implements INodeType { }, }, }, + { + displayName: 'For more reliable structured output parsing, consider using the Tools agent', + name: 'notice', + type: 'notice', + default: '', + displayOptions: { + show: { + hasOutputParser: [true], + agent: [ + 'conversationalAgent', + 'reActAgent', + 'planAndExecuteAgent', + 'openAiFunctionsAgent', + ], + }, + }, + }, { displayName: 'Require Specific Output Format', name: 'hasOutputParser', @@ -372,6 +390,7 @@ export class Agent implements INodeType { displayOptions: { show: { hasOutputParser: [true], + agent: ['toolsAgent'], }, }, }, diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts index 887017ccaf..d171efe95f 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts @@ -1,19 +1,19 @@ -import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; - -import { initializeAgentExecutorWithOptions } from 'langchain/agents'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; import type { BaseOutputParser } from '@langchain/core/output_parsers'; import { PromptTemplate } from '@langchain/core/prompts'; +import { initializeAgentExecutorWithOptions } from 'langchain/agents'; import { CombiningOutputParser } from 'langchain/output_parsers'; +import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; +import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; + import { isChatInstance, getPromptInputByType, - getOptionalOutputParsers, getConnectedTools, } from '../../../../../utils/helpers'; -import { getTracingConfig } from '../../../../../utils/tracing'; +import { getOptionalOutputParsers } from '../../../../../utils/output_parsers/N8nOutputParser'; import { throwIfToolSchema } from '../../../../../utils/schemaParsing'; +import { getTracingConfig } from '../../../../../utils/tracing'; export async function conversationalAgentExecute( this: IExecuteFunctions, diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts index 12e1dbda4e..0518234c29 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts @@ -1,3 +1,10 @@ +import type { BaseOutputParser } from '@langchain/core/output_parsers'; +import { PromptTemplate } from '@langchain/core/prompts'; +import { ChatOpenAI } from '@langchain/openai'; +import type { AgentExecutorInput } from 'langchain/agents'; +import { AgentExecutor, OpenAIAgent } from 'langchain/agents'; +import { BufferMemory, type BaseChatMemory } from 'langchain/memory'; +import { CombiningOutputParser } from 'langchain/output_parsers'; import { type IExecuteFunctions, type INodeExecutionData, @@ -5,18 +12,8 @@ import { NodeOperationError, } from 'n8n-workflow'; -import type { AgentExecutorInput } from 'langchain/agents'; -import { AgentExecutor, OpenAIAgent } from 'langchain/agents'; -import type { BaseOutputParser } from '@langchain/core/output_parsers'; -import { PromptTemplate } from '@langchain/core/prompts'; -import { CombiningOutputParser } from 'langchain/output_parsers'; -import { BufferMemory, type BaseChatMemory } from 'langchain/memory'; -import { ChatOpenAI } from '@langchain/openai'; -import { - getConnectedTools, - getOptionalOutputParsers, - getPromptInputByType, -} from '../../../../../utils/helpers'; +import { getConnectedTools, getPromptInputByType } from '../../../../../utils/helpers'; +import { getOptionalOutputParsers } from '../../../../../utils/output_parsers/N8nOutputParser'; import { getTracingConfig } from '../../../../../utils/tracing'; export async function openAiFunctionsAgentExecute( diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts index a4ae1a0f1c..f10207c715 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts @@ -1,3 +1,8 @@ +import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import type { BaseOutputParser } from '@langchain/core/output_parsers'; +import { PromptTemplate } from '@langchain/core/prompts'; +import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute'; +import { CombiningOutputParser } from 'langchain/output_parsers'; import { type IExecuteFunctions, type INodeExecutionData, @@ -5,18 +10,10 @@ import { NodeOperationError, } from 'n8n-workflow'; -import type { BaseOutputParser } from '@langchain/core/output_parsers'; -import { PromptTemplate } from '@langchain/core/prompts'; -import { CombiningOutputParser } from 'langchain/output_parsers'; -import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute'; -import { - getConnectedTools, - getOptionalOutputParsers, - getPromptInputByType, -} from '../../../../../utils/helpers'; -import { getTracingConfig } from '../../../../../utils/tracing'; +import { getConnectedTools, getPromptInputByType } from '../../../../../utils/helpers'; +import { getOptionalOutputParsers } from '../../../../../utils/output_parsers/N8nOutputParser'; import { throwIfToolSchema } from '../../../../../utils/schemaParsing'; +import { getTracingConfig } from '../../../../../utils/tracing'; export async function planAndExecuteAgentExecute( this: IExecuteFunctions, diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts index 11a5acb040..5707baa9d6 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts @@ -1,3 +1,9 @@ +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import type { BaseOutputParser } from '@langchain/core/output_parsers'; +import { PromptTemplate } from '@langchain/core/prompts'; +import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents'; +import { CombiningOutputParser } from 'langchain/output_parsers'; import { type IExecuteFunctions, type INodeExecutionData, @@ -5,20 +11,14 @@ import { NodeOperationError, } from 'n8n-workflow'; -import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents'; -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import type { BaseOutputParser } from '@langchain/core/output_parsers'; -import { PromptTemplate } from '@langchain/core/prompts'; -import { CombiningOutputParser } from 'langchain/output_parsers'; -import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { getConnectedTools, - getOptionalOutputParsers, getPromptInputByType, isChatInstance, } from '../../../../../utils/helpers'; -import { getTracingConfig } from '../../../../../utils/tracing'; +import { getOptionalOutputParsers } from '../../../../../utils/output_parsers/N8nOutputParser'; import { throwIfToolSchema } from '../../../../../utils/schemaParsing'; +import { getTracingConfig } from '../../../../../utils/tracing'; export async function reActAgentAgentExecute( this: IExecuteFunctions, diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts index 90952bac41..84d775d0f5 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts @@ -1,7 +1,6 @@ import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; import { HumanMessage } from '@langchain/core/messages'; import type { BaseMessage } from '@langchain/core/messages'; -import type { BaseOutputParser, StructuredOutputParser } from '@langchain/core/output_parsers'; import type { BaseMessagePromptTemplateLike } from '@langchain/core/prompts'; import { ChatPromptTemplate } from '@langchain/core/prompts'; import { RunnableSequence } from '@langchain/core/runnables'; @@ -9,7 +8,6 @@ import type { Tool } from '@langchain/core/tools'; import { DynamicStructuredTool } from '@langchain/core/tools'; import type { AgentAction, AgentFinish } from 'langchain/agents'; import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'; -import { OutputFixingParser } from 'langchain/output_parsers'; import { omit } from 'lodash'; import { BINARY_ENCODING, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; @@ -20,24 +18,16 @@ import { SYSTEM_MESSAGE } from './prompt'; import { isChatInstance, getPromptInputByType, - getOptionalOutputParsers, getConnectedTools, } from '../../../../../utils/helpers'; +import { + getOptionalOutputParsers, + type N8nOutputParser, +} from '../../../../../utils/output_parsers/N8nOutputParser'; -function getOutputParserSchema(outputParser: BaseOutputParser): ZodObject { - const parserType = outputParser.lc_namespace[outputParser.lc_namespace.length - 1]; - let schema: ZodObject; - - if (parserType === 'structured') { - // If the output parser is a structured output parser, we will use the schema from the parser - schema = (outputParser as StructuredOutputParser>).schema; - } else if (parserType === 'fix' && outputParser instanceof OutputFixingParser) { - // If the output parser is a fixing parser, we will use the schema from the connected structured output parser - schema = (outputParser.parser as StructuredOutputParser>).schema; - } else { - // If the output parser is not a structured output parser, we will use a fallback schema - schema = z.object({ text: z.string() }); - } +function getOutputParserSchema(outputParser: N8nOutputParser): ZodObject { + const schema = + (outputParser.getSchema() as ZodObject) ?? z.object({ text: z.string() }); return schema; } @@ -205,10 +195,9 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise step.tool === 'format_final_response'); if (responseParserTool) { const toolInput = responseParserTool?.toolInput; - const returnValues = (await outputParser.parse(toolInput as unknown as string)) as Record< - string, - unknown - >; + // Check if the tool input is a string or an object and convert it to a string + const parserInput = toolInput instanceof Object ? JSON.stringify(toolInput) : toolInput; + const returnValues = (await outputParser.parse(parserInput)) as Record; return handleParsedStepOutput(returnValues); } @@ -294,14 +283,6 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise { const chatTemplate: ChatPromptTemplate | PromptTemplate = await getChainPromptTemplate( diff --git a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts index 7ccfddc5e4..ab6cd8f201 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts @@ -1,3 +1,8 @@ +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import { HumanMessage } from '@langchain/core/messages'; +import { ChatPromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts'; +import type { JSONSchema7 } from 'json-schema'; +import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { INodeType, @@ -6,21 +11,17 @@ import type { INodeExecutionData, INodePropertyOptions, } from 'n8n-workflow'; -import type { JSONSchema7 } from 'json-schema'; -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import { ChatPromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts'; import type { z } from 'zod'; -import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; -import { HumanMessage } from '@langchain/core/messages'; -import { generateSchema, getSandboxWithZod } from '../../../utils/schemaParsing'; + +import { makeZodSchemaFromAttributes } from './helpers'; +import type { AttributeDefinition } from './types'; import { inputSchemaField, jsonSchemaExampleField, schemaTypeField, } from '../../../utils/descriptions'; +import { convertJsonSchemaToZod, generateSchema } from '../../../utils/schemaParsing'; import { getTracingConfig } from '../../../utils/tracing'; -import type { AttributeDefinition } from './types'; -import { makeZodSchemaFromAttributes } from './helpers'; const SYSTEM_PROMPT_TEMPLATE = `You are an expert extraction algorithm. Only extract relevant information from the text. @@ -261,8 +262,7 @@ export class InformationExtractor implements INodeType { jsonSchema = jsonParse(inputSchema); } - const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0); - const zodSchema = await zodSchemaSandbox.runCode>(); + const zodSchema = convertJsonSchemaToZod>(jsonSchema); parser = OutputFixingParser.fromLLM(llm, StructuredOutputParser.fromZodSchema(zodSchema)); } diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts index 046f3e4f56..3c40e03203 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts @@ -79,7 +79,7 @@ export class EmbeddingsOpenAi implements INodeType { }, ], group: ['transform'], - version: 1, + version: [1, 1.1], description: 'Use Embeddings OpenAI', defaults: { name: 'Embeddings OpenAI', diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts index dcf483a751..3556bca0cf 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts @@ -128,7 +128,7 @@ export class LmChatOpenAi implements INodeType { property: 'model', }, }, - default: 'gpt-3.5-turbo', + default: 'gpt-4o-mini', }, { displayName: diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/description.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/description.ts index 382de60fdd..f91c9a1148 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/description.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/description.ts @@ -17,7 +17,7 @@ export const ollamaModel: INodeProperties = { displayName: 'Model', name: 'model', type: 'options', - default: 'llama2', + default: 'llama3.2', description: 'The model which will generate the completion. To download models, visit Ollama Models Library.', typeOptions: { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts index 7d9049a037..a0f47677f7 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts @@ -26,7 +26,7 @@ type RunDetail = { options: SerializedSecret | SerializedNotImplemented | SerializedFields; }; -const TIKTOKEN_ESTIMATE_MODEL = 'gpt-3.5-turbo'; +const TIKTOKEN_ESTIMATE_MODEL = 'gpt-4o'; export class N8nLlmTracing extends BaseCallbackHandler { name = 'N8nLlmTracing'; diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts index 97c86506b7..7d676c7607 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts @@ -1,15 +1,11 @@ -/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ -import { - NodeConnectionType, - type IExecuteFunctions, - type INodeType, - type INodeTypeDescription, - type SupplyData, -} from 'n8n-workflow'; -import { OutputFixingParser } from 'langchain/output_parsers'; -import type { BaseOutputParser } from '@langchain/core/output_parsers'; import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import { logWrapper } from '../../../utils/logWrapper'; +import { NodeConnectionType } from 'n8n-workflow'; +import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow'; + +import { + N8nOutputFixingParser, + type N8nStructuredOutputParser, +} from '../../../utils/output_parsers/N8nOutputParser'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; export class OutputParserAutofixing implements INodeType { @@ -75,12 +71,12 @@ export class OutputParserAutofixing implements INodeType { const outputParser = (await this.getInputConnectionData( NodeConnectionType.AiOutputParser, itemIndex, - )) as BaseOutputParser; + )) as N8nStructuredOutputParser; - const parser = OutputFixingParser.fromLLM(model, outputParser); + const parser = new N8nOutputFixingParser(this, model, outputParser); return { - response: logWrapper(parser, this), + response: parser, }; } } diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts new file mode 100644 index 0000000000..32d25d4f73 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts @@ -0,0 +1,120 @@ +/* eslint-disable @typescript-eslint/unbound-method */ +/* eslint-disable @typescript-eslint/no-unsafe-call */ +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import type { MockProxy } from 'jest-mock-extended'; +import { mock } from 'jest-mock-extended'; +import { normalizeItems } from 'n8n-core'; +import type { IExecuteFunctions, IWorkflowDataProxyData } from 'n8n-workflow'; +import { ApplicationError, NodeConnectionType } from 'n8n-workflow'; + +import { N8nOutputFixingParser } from '../../../../utils/output_parsers/N8nOutputParser'; +import type { N8nStructuredOutputParser } from '../../../../utils/output_parsers/N8nOutputParser'; +import { OutputParserAutofixing } from '../OutputParserAutofixing.node'; + +describe('OutputParserAutofixing', () => { + let outputParser: OutputParserAutofixing; + let thisArg: MockProxy; + let mockModel: MockProxy; + let mockStructuredOutputParser: MockProxy; + + beforeEach(() => { + outputParser = new OutputParserAutofixing(); + thisArg = mock({ + helpers: { normalizeItems }, + }); + mockModel = mock(); + mockStructuredOutputParser = mock(); + + thisArg.getWorkflowDataProxy.mockReturnValue(mock({ $input: mock() })); + thisArg.addInputData.mockReturnValue({ index: 0 }); + thisArg.addOutputData.mockReturnValue(); + thisArg.getInputConnectionData.mockImplementation(async (type: NodeConnectionType) => { + if (type === NodeConnectionType.AiLanguageModel) return mockModel; + if (type === NodeConnectionType.AiOutputParser) return mockStructuredOutputParser; + + throw new ApplicationError('Unexpected connection type'); + }); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + function getMockedRetryChain(output: string) { + return jest.fn().mockReturnValue({ + invoke: jest.fn().mockResolvedValue({ + content: output, + }), + }); + } + + it('should successfully parse valid output without needing to fix it', async () => { + const validOutput = { name: 'Alice', age: 25 }; + + mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput); + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; + + // Ensure the response contains the output-fixing parser + expect(response).toBeDefined(); + expect(response).toBeInstanceOf(N8nOutputFixingParser); + + const result = await response.parse('{"name": "Alice", "age": 25}'); + + // Validate that the parser succeeds without retry + expect(result).toEqual(validOutput); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1); // Only one call to parse + }); + + it('should throw an error when both structured parser and fixing parser fail', async () => { + mockStructuredOutputParser.parse + .mockRejectedValueOnce(new Error('Invalid JSON')) // First attempt fails + .mockRejectedValueOnce(new Error('Fixing attempt failed')); // Second attempt fails + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; + + response.getRetryChain = getMockedRetryChain('{}'); + + await expect(response.parse('Invalid JSON string')).rejects.toThrow('Fixing attempt failed'); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); + }); + + it('should reject on the first attempt and succeed on retry with the parsed content', async () => { + const validOutput = { name: 'Bob', age: 28 }; + + mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Invalid JSON')); + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; + + response.getRetryChain = getMockedRetryChain(JSON.stringify(validOutput)); + + mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput); + + const result = await response.parse('Invalid JSON string'); + + expect(result).toEqual(validOutput); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); // First fails, second succeeds + }); + + it('should handle non-JSON formatted response from fixing parser', async () => { + mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Invalid JSON')); + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nOutputFixingParser; + }; + + response.getRetryChain = getMockedRetryChain('This is not JSON'); + + mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Unexpected token')); + + // Expect the structured parser to throw an error on invalid JSON from retry + await expect(response.parse('Invalid JSON string')).rejects.toThrow('Unexpected token'); + expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); // First fails, second tries and fails + }); +}); diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts index 24327b2970..cb67afb453 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts @@ -6,9 +6,9 @@ import { type INodeTypeDescription, type SupplyData, } from 'n8n-workflow'; -import { logWrapper } from '../../../utils/logWrapper'; + +import { N8nItemListOutputParser } from '../../../utils/output_parsers/N8nItemListOutputParser'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { ItemListOutputParser } from './ItemListOutputParser'; export class OutputParserItemList implements INodeType { description: INodeTypeDescription = { @@ -86,10 +86,10 @@ export class OutputParserItemList implements INodeType { separator?: string; }; - const parser = new ItemListOutputParser(options); + const parser = new N8nItemListOutputParser(options); return { - response: logWrapper(parser, this), + response: parser, }; } } diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/test/OutputParserItemList.node.test.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/test/OutputParserItemList.node.test.ts new file mode 100644 index 0000000000..31e96077c4 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/test/OutputParserItemList.node.test.ts @@ -0,0 +1,123 @@ +import { mock } from 'jest-mock-extended'; +import { normalizeItems } from 'n8n-core'; +import { + ApplicationError, + type IExecuteFunctions, + type IWorkflowDataProxyData, +} from 'n8n-workflow'; + +import { N8nItemListOutputParser } from '../../../../utils/output_parsers/N8nItemListOutputParser'; +import { OutputParserItemList } from '../OutputParserItemList.node'; + +describe('OutputParserItemList', () => { + let outputParser: OutputParserItemList; + const thisArg = mock({ + helpers: { normalizeItems }, + }); + const workflowDataProxy = mock({ $input: mock() }); + + beforeEach(() => { + outputParser = new OutputParserItemList(); + thisArg.getWorkflowDataProxy.mockReturnValue(workflowDataProxy); + thisArg.addInputData.mockReturnValue({ index: 0 }); + thisArg.addOutputData.mockReturnValue(); + thisArg.getNodeParameter.mockReset(); + }); + + describe('supplyData', () => { + it('should create a parser with default options', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options') { + return {}; + } + throw new ApplicationError('Not implemented'); + }); + + const { response } = await outputParser.supplyData.call(thisArg, 0); + expect(response).toBeInstanceOf(N8nItemListOutputParser); + }); + + it('should create a parser with custom number of items', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options') { + return { numberOfItems: 5 }; + } + throw new ApplicationError('Not implemented'); + }); + + const { response } = await outputParser.supplyData.call(thisArg, 0); + expect(response).toBeInstanceOf(N8nItemListOutputParser); + expect((response as any).numberOfItems).toBe(5); + }); + + it('should create a parser with custom separator', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options') { + return { separator: ',' }; + } + throw new ApplicationError('Not implemented'); + }); + + const { response } = await outputParser.supplyData.call(thisArg, 0); + expect(response).toBeInstanceOf(N8nItemListOutputParser); + expect((response as any).separator).toBe(','); + }); + }); + + describe('parse', () => { + it('should parse a list with default separator', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options') { + return {}; + } + throw new ApplicationError('Not implemented'); + }); + + const { response } = await outputParser.supplyData.call(thisArg, 0); + const result = await (response as N8nItemListOutputParser).parse('item1\nitem2\nitem3'); + expect(result).toEqual(['item1', 'item2', 'item3']); + }); + + it('should parse a list with custom separator', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options') { + return { separator: ',' }; + } + throw new ApplicationError('Not implemented'); + }); + + const { response } = await outputParser.supplyData.call(thisArg, 0); + const result = await (response as N8nItemListOutputParser).parse('item1,item2,item3'); + expect(result).toEqual(['item1', 'item2', 'item3']); + }); + + it('should limit the number of items returned', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options') { + return { numberOfItems: 2 }; + } + throw new ApplicationError('Not implemented'); + }); + + const { response } = await outputParser.supplyData.call(thisArg, 0); + const result = await (response as N8nItemListOutputParser).parse( + 'item1\nitem2\nitem3\nitem4', + ); + expect(result).toEqual(['item1', 'item2']); + }); + + it('should throw an error if not enough items are returned', async () => { + thisArg.getNodeParameter.mockImplementation((parameterName) => { + if (parameterName === 'options') { + return { numberOfItems: 5 }; + } + throw new ApplicationError('Not implemented'); + }); + + const { response } = await outputParser.supplyData.call(thisArg, 0); + await expect( + (response as N8nItemListOutputParser).parse('item1\nitem2\nitem3'), + ).rejects.toThrow('Wrong number of items returned'); + }); + }); +}); diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts index 6ce6bff76b..b5b6a5846c 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts @@ -1,4 +1,4 @@ -/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { JSONSchema7 } from 'json-schema'; import { jsonParse, type IExecuteFunctions, @@ -8,83 +8,17 @@ import { NodeOperationError, NodeConnectionType, } from 'n8n-workflow'; -import { z } from 'zod'; -import type { JSONSchema7 } from 'json-schema'; -import { StructuredOutputParser } from 'langchain/output_parsers'; -import { OutputParserException } from '@langchain/core/output_parsers'; -import get from 'lodash/get'; -import type { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { logWrapper } from '../../../utils/logWrapper'; -import { generateSchema, getSandboxWithZod } from '../../../utils/schemaParsing'; +import type { z } from 'zod'; + import { inputSchemaField, jsonSchemaExampleField, schemaTypeField, } from '../../../utils/descriptions'; +import { N8nStructuredOutputParser } from '../../../utils/output_parsers/N8nOutputParser'; +import { convertJsonSchemaToZod, generateSchema } from '../../../utils/schemaParsing'; +import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -const STRUCTURED_OUTPUT_KEY = '__structured__output'; -const STRUCTURED_OUTPUT_OBJECT_KEY = '__structured__output__object'; -const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array'; - -export class N8nStructuredOutputParser extends StructuredOutputParser { - async parse(text: string): Promise> { - try { - const parsed = (await super.parse(text)) as object; - - return ( - get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_OBJECT_KEY]) ?? - get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_ARRAY_KEY]) ?? - get(parsed, STRUCTURED_OUTPUT_KEY) ?? - parsed - ); - } catch (e) { - // eslint-disable-next-line n8n-nodes-base/node-execute-block-wrong-error-thrown - throw new OutputParserException(`Failed to parse. Text: "${text}". Error: ${e}`, text); - } - } - - static async fromZedJsonSchema( - sandboxedSchema: JavaScriptSandbox, - nodeVersion: number, - ): Promise>> { - const zodSchema = await sandboxedSchema.runCode>(); - - let returnSchema: z.ZodSchema; - if (nodeVersion === 1) { - returnSchema = z.object({ - [STRUCTURED_OUTPUT_KEY]: z - .object({ - [STRUCTURED_OUTPUT_OBJECT_KEY]: zodSchema.optional(), - [STRUCTURED_OUTPUT_ARRAY_KEY]: z.array(zodSchema).optional(), - }) - .describe( - `Wrapper around the output data. It can only contain ${STRUCTURED_OUTPUT_OBJECT_KEY} or ${STRUCTURED_OUTPUT_ARRAY_KEY} but never both.`, - ) - .refine( - (data) => { - // Validate that one and only one of the properties exists - return ( - Boolean(data[STRUCTURED_OUTPUT_OBJECT_KEY]) !== - Boolean(data[STRUCTURED_OUTPUT_ARRAY_KEY]) - ); - }, - { - message: - 'One and only one of __structured__output__object and __structured__output__array should be present.', - path: [STRUCTURED_OUTPUT_KEY], - }, - ), - }); - } else { - returnSchema = z.object({ - output: zodSchema.optional(), - }); - } - - return N8nStructuredOutputParser.fromZodSchema(returnSchema); - } -} export class OutputParserStructured implements INodeType { description: INodeTypeDescription = { displayName: 'Structured Output Parser', @@ -204,15 +138,16 @@ export class OutputParserStructured implements INodeType { const jsonSchema = schemaType === 'fromJson' ? generateSchema(jsonExample) : jsonParse(inputSchema); - const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0); + const zodSchema = convertJsonSchemaToZod>(jsonSchema); const nodeVersion = this.getNode().typeVersion; try { - const parser = await N8nStructuredOutputParser.fromZedJsonSchema( - zodSchemaSandbox, + const parser = await N8nStructuredOutputParser.fromZodJsonSchema( + zodSchema, nodeVersion, + this, ); return { - response: logWrapper(parser, this), + response: parser, }; } catch (error) { throw new NodeOperationError(this.getNode(), 'Error during parsing of JSON Schema.'); diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/test/OutputParserStructured.node.test.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/test/OutputParserStructured.node.test.ts index b4dd6708eb..af72c49d7e 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/test/OutputParserStructured.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/test/OutputParserStructured.node.test.ts @@ -1,8 +1,13 @@ -import type { IExecuteFunctions, INode, IWorkflowDataProxyData } from 'n8n-workflow'; import { mock } from 'jest-mock-extended'; import { normalizeItems } from 'n8n-core'; -import type { z } from 'zod'; -import type { StructuredOutputParser } from 'langchain/output_parsers'; +import { + jsonParse, + type IExecuteFunctions, + type INode, + type IWorkflowDataProxyData, +} from 'n8n-workflow'; + +import type { N8nStructuredOutputParser } from '../../../../utils/output_parsers/N8nStructuredOutputParser'; import { OutputParserStructured } from '../OutputParserStructured.node'; describe('OutputParserStructured', () => { @@ -11,139 +16,451 @@ describe('OutputParserStructured', () => { helpers: { normalizeItems }, }); const workflowDataProxy = mock({ $input: mock() }); - thisArg.getWorkflowDataProxy.mockReturnValue(workflowDataProxy); - thisArg.getNode.mockReturnValue(mock({ typeVersion: 1.1 })); - thisArg.addInputData.mockReturnValue({ index: 0 }); - thisArg.addOutputData.mockReturnValue(); beforeEach(() => { outputParser = new OutputParserStructured(); + thisArg.getWorkflowDataProxy.mockReturnValue(workflowDataProxy); + thisArg.addInputData.mockReturnValue({ index: 0 }); + thisArg.addOutputData.mockReturnValue(); }); describe('supplyData', () => { - it('should parse a valid JSON schema', async () => { - const schema = `{ - "type": "object", - "properties": { - "name": { - "type": "string" + describe('Version 1.1 and below', () => { + beforeEach(() => { + thisArg.getNode.mockReturnValue(mock({ typeVersion: 1.1 })); + }); + + it('should parse a complex nested schema', async () => { + const schema = `{ + "type": "object", + "properties": { + "user": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "details": { + "type": "object", + "properties": { + "age": { "type": "number" }, + "hobbies": { "type": "array", "items": { "type": "string" } } + } + } + } + }, + "timestamp": { "type": "string", "format": "date-time" } }, - "age": { - "type": "number" - } - }, - "required": ["name", "age"] - }`; - thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); - const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { - response: StructuredOutputParser>; - }; - const outputObject = { output: { name: 'Mac', age: 27 } }; - const parsersOutput = await response.parse(`Here's the output! - \`\`\`json - ${JSON.stringify(outputObject)} - \`\`\` - `); - - expect(parsersOutput).toEqual(outputObject); - }); - it('should handle missing required properties', async () => { - const schema = `{ - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "age": { - "type": "number" - } - }, - "required": ["name", "age"] - }`; - thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); - const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { - response: StructuredOutputParser>; - }; - const outputObject = { output: { name: 'Mac' } }; - - await expect( - response.parse(`Here's the output! - \`\`\`json - ${JSON.stringify(outputObject)} - \`\`\` - `), - ).rejects.toThrow('Required'); - }); - - it('should throw on wrong type', async () => { - const schema = `{ - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "age": { - "type": "number" - } - }, - "required": ["name", "age"] - }`; - thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); - const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { - response: StructuredOutputParser>; - }; - const outputObject = { output: { name: 'Mac', age: '27' } }; - - await expect( - response.parse(`Here's the output! - \`\`\`json - ${JSON.stringify(outputObject)} - \`\`\` - `), - ).rejects.toThrow('Expected number, received string'); - }); - - it('should parse array output', async () => { - const schema = `{ - "type": "object", - "properties": { - "myArr": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "age": { - "type": "number" - } + "required": ["user", "timestamp"] + }`; + thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + const outputObject = { + output: { + user: { + name: 'Alice', + details: { + age: 30, + hobbies: ['reading', 'hiking'], }, - "required": ["name", "age"] - } - } - }, - "required": ["myArr"] - }`; - thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); - const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { - response: StructuredOutputParser>; - }; - const outputObject = { - output: { - myArr: [ - { name: 'Mac', age: 27 }, - { name: 'Alice', age: 25 }, - ], - }, - }; - const parsersOutput = await response.parse(`Here's the output! - \`\`\`json - ${JSON.stringify(outputObject)} - \`\`\` - `); + }, + timestamp: '2023-04-01T12:00:00Z', + }, + }; + const parsersOutput = await response.parse(`Here's the complex output: + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `); - expect(parsersOutput).toEqual(outputObject); + expect(parsersOutput).toEqual(outputObject); + }); + + it('should handle optional fields correctly', async () => { + const schema = `{ + "type": "object", + "properties": { + "name": { "type": "string" }, + "age": { "type": "number" }, + "email": { "type": "string", "format": "email" } + }, + "required": ["name"] + }`; + thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + const outputObject = { + output: { + name: 'Bob', + email: 'bob@example.com', + }, + }; + const parsersOutput = await response.parse(`Here's the output with optional fields: + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `); + + expect(parsersOutput).toEqual(outputObject); + }); + + it('should handle arrays of objects', async () => { + const schema = `{ + "type": "object", + "properties": { + "users": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { "type": "number" }, + "name": { "type": "string" } + }, + "required": ["id", "name"] + } + } + }, + "required": ["users"] + }`; + thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + const outputObject = { + output: { + users: [ + { id: 1, name: 'Alice' }, + { id: 2, name: 'Bob' }, + ], + }, + }; + const parsersOutput = await response.parse(`Here's the array output: + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `); + + expect(parsersOutput).toEqual(outputObject); + }); + + it('should handle empty objects', async () => { + const schema = `{ + "type": "object", + "properties": { + "data": { + "type": "object" + } + }, + "required": ["data"] + }`; + thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + const outputObject = { + output: { + data: {}, + }, + }; + const parsersOutput = await response.parse(`Here's the empty object output: + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `); + + expect(parsersOutput).toEqual(outputObject); + }); + + it('should throw error for null values in non-nullable fields', async () => { + const schema = `{ + "type": "object", + "properties": { + "name": { "type": "string" }, + "age": { "type": "number" } + }, + "required": ["name", "age"] + }`; + thisArg.getNodeParameter.calledWith('jsonSchema', 0).mockReturnValueOnce(schema); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + const outputObject = { + output: { + name: 'Charlie', + age: null, + }, + }; + + await expect( + response.parse( + `Here's the output with null value: + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `, + undefined, + (e) => e, + ), + ).rejects.toThrow('Expected number, received null'); + }); + }); + + describe('Version 1.2 and above', () => { + beforeEach(() => { + thisArg.getNode.mockReturnValue(mock({ typeVersion: 1.2 })); + }); + + it('should parse output using schema generated from complex JSON example', async () => { + const jsonExample = `{ + "user": { + "name": "Alice", + "details": { + "age": 30, + "address": { + "street": "123 Main St", + "city": "Anytown", + "zipCode": "12345" + } + } + }, + "orders": [ + { + "id": "ORD-001", + "items": ["item1", "item2"], + "total": 50.99 + }, + { + "id": "ORD-002", + "items": ["item3"], + "total": 25.50 + } + ], + "isActive": true + }`; + thisArg.getNodeParameter.calledWith('schemaType', 0).mockReturnValueOnce('fromJson'); + thisArg.getNodeParameter + .calledWith('jsonSchemaExample', 0) + .mockReturnValueOnce(jsonExample); + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + + const outputObject = { + output: jsonParse(jsonExample), + }; + + const parsersOutput = await response.parse(`Here's the complex output: + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `); + + expect(parsersOutput).toEqual(outputObject); + }); + + it('should validate enum values', async () => { + const inputSchema = `{ + "type": "object", + "properties": { + "color": { + "type": "string", + "enum": ["red", "green", "blue"] + } + }, + "required": ["color"] + }`; + thisArg.getNodeParameter.calledWith('schemaType', 0).mockReturnValueOnce('manual'); + thisArg.getNodeParameter.calledWith('inputSchema', 0).mockReturnValueOnce(inputSchema); + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + + const validOutput = { + output: { + color: 'green', + }, + }; + + const invalidOutput = { + output: { + color: 'yellow', + }, + }; + + await expect( + response.parse(`Valid output: + \`\`\`json + ${JSON.stringify(validOutput)} + \`\`\` + `), + ).resolves.toEqual(validOutput); + + await expect( + response.parse( + `Invalid output: + \`\`\`json + ${JSON.stringify(invalidOutput)} + \`\`\` + `, + undefined, + (e) => e, + ), + ).rejects.toThrow(); + }); + + it('should handle recursive structures', async () => { + const inputSchema = `{ + "type": "object", + "properties": { + "name": { "type": "string" }, + "children": { + "type": "array", + "items": { "$ref": "#" } + } + }, + "required": ["name"] + }`; + thisArg.getNodeParameter.calledWith('schemaType', 0).mockReturnValueOnce('manual'); + thisArg.getNodeParameter.calledWith('inputSchema', 0).mockReturnValueOnce(inputSchema); + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + + const outputObject = { + output: { + name: 'Root', + children: [ + { + name: 'Child1', + children: [{ name: 'Grandchild1' }, { name: 'Grandchild2' }], + }, + { + name: 'Child2', + }, + ], + }, + }; + + const parsersOutput = await response.parse(`Here's the recursive structure output: + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `); + + expect(parsersOutput).toEqual(outputObject); + }); + + it('should handle missing required properties', async () => { + const schema = `{ + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "age": { + "type": "number" + } + }, + "required": ["name", "age"] + }`; + thisArg.getNodeParameter.calledWith('schemaType', 0).mockReturnValueOnce('manual'); + thisArg.getNodeParameter.calledWith('inputSchema', 0).mockReturnValueOnce(schema); + + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + const outputObject = { output: { name: 'Mac' } }; + + await expect( + response.parse( + `Here's the output! + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `, + undefined, + (e) => e, + ), + ).rejects.toThrow('Required'); + }); + it('should throw on wrong type', async () => { + const schema = `{ + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "age": { + "type": "number" + } + }, + "required": ["name", "age"] + }`; + thisArg.getNodeParameter.calledWith('inputSchema', 0).mockReturnValueOnce(schema); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + const outputObject = { output: { name: 'Mac', age: '27' } }; + + await expect( + response.parse( + `Here's the output! + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `, + undefined, + (e) => e, + ), + ).rejects.toThrow('Expected number, received string'); + }); + + it('should parse array output', async () => { + const schema = `{ + "type": "object", + "properties": { + "myArr": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "age": { + "type": "number" + } + }, + "required": ["name", "age"] + } + } + }, + "required": ["myArr"] + }`; + thisArg.getNodeParameter.calledWith('inputSchema', 0).mockReturnValueOnce(schema); + const { response } = (await outputParser.supplyData.call(thisArg, 0)) as { + response: N8nStructuredOutputParser; + }; + const outputObject = { + output: { + myArr: [ + { name: 'Mac', age: 27 }, + { name: 'Alice', age: 25 }, + ], + }, + }; + const parsersOutput = await response.parse(`Here's the output! + \`\`\`json + ${JSON.stringify(outputObject)} + \`\`\` + `); + + expect(parsersOutput).toEqual(outputObject); + }); }); }); }); diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts index 7980f5fa9d..2a2a635c90 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts @@ -1,4 +1,10 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools'; +import type { JSONSchema7 } from 'json-schema'; +import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox'; +import { PythonSandbox } from 'n8n-nodes-base/dist/nodes/Code/PythonSandbox'; +import type { Sandbox } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; +import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; import type { IExecuteFunctions, INodeType, @@ -7,23 +13,16 @@ import type { ExecutionError, IDataObject, } from 'n8n-workflow'; - import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import type { Sandbox } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; -import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; -import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox'; -import { PythonSandbox } from 'n8n-nodes-base/dist/nodes/Code/PythonSandbox'; -import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import type { DynamicZodObject } from '../../../types/zod.types'; import { inputSchemaField, jsonSchemaExampleField, schemaTypeField, } from '../../../utils/descriptions'; -import { generateSchema, getSandboxWithZod } from '../../../utils/schemaParsing'; -import type { JSONSchema7 } from 'json-schema'; -import type { DynamicZodObject } from '../../../types/zod.types'; +import { convertJsonSchemaToZod, generateSchema } from '../../../utils/schemaParsing'; +import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; export class ToolCode implements INodeType { description: INodeTypeDescription = { @@ -273,10 +272,9 @@ export class ToolCode implements INodeType { ? generateSchema(jsonExample) : jsonParse(inputSchema); - const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0); - const zodSchema = await zodSchemaSandbox.runCode(); + const zodSchema = convertJsonSchemaToZod(jsonSchema); - tool = new DynamicStructuredTool({ + tool = new DynamicStructuredTool({ schema: zodSchema, ...commonToolOptions, }); diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts index 352a727d11..6cc983eae4 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts @@ -1,3 +1,10 @@ +import type { CallbackManagerForToolRun } from '@langchain/core/callbacks/manager'; +import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools'; +import type { JSONSchema7 } from 'json-schema'; +import get from 'lodash/get'; +import isObject from 'lodash/isObject'; +import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces'; +import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode'; import type { IExecuteFunctions, IExecuteWorkflowInfo, @@ -11,22 +18,16 @@ import type { INodeParameterResourceLocator, } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; -import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces'; -import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode'; -import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools'; -import get from 'lodash/get'; -import isObject from 'lodash/isObject'; -import type { CallbackManagerForToolRun } from '@langchain/core/callbacks/manager'; -import type { JSONSchema7 } from 'json-schema'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import type { DynamicZodObject } from '../../../types/zod.types'; -import { generateSchema, getSandboxWithZod } from '../../../utils/schemaParsing'; import { jsonSchemaExampleField, schemaTypeField, inputSchemaField, } from '../../../utils/descriptions'; +import { convertJsonSchemaToZod, generateSchema } from '../../../utils/schemaParsing'; +import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + export class ToolWorkflow implements INodeType { description: INodeTypeDescription = { displayName: 'Call n8n Workflow Tool', @@ -529,10 +530,9 @@ export class ToolWorkflow implements INodeType { ? generateSchema(jsonExample) : jsonParse(inputSchema); - const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0); - const zodSchema = await zodSchemaSandbox.runCode(); + const zodSchema = convertJsonSchemaToZod(jsonSchema); - tool = new DynamicStructuredTool({ + tool = new DynamicStructuredTool({ schema: zodSchema, ...functionBase, }); diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts index d487969073..45d28542d7 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts @@ -1,5 +1,7 @@ /* eslint-disable n8n-nodes-base/node-filename-against-convention */ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { Document } from '@langchain/core/documents'; +import type { Embeddings } from '@langchain/core/embeddings'; import type { VectorStore } from '@langchain/core/vectorstores'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { @@ -15,14 +17,13 @@ import type { Icon, INodePropertyOptions, } from 'n8n-workflow'; -import type { Embeddings } from '@langchain/core/embeddings'; -import type { Document } from '@langchain/core/documents'; -import { logWrapper } from '../../../utils/logWrapper'; -import { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; -import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; -import { getMetadataFiltersValues, logAiEvent } from '../../../utils/helpers'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + import { processDocument } from './processDocuments'; +import { getMetadataFiltersValues, logAiEvent } from '../../../utils/helpers'; +import { logWrapper } from '../../../utils/logWrapper'; +import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; +import { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; +import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; type NodeOperationMode = 'insert' | 'load' | 'retrieve' | 'update'; @@ -296,6 +297,9 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => const resultData = []; for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { + if (this.getExecutionCancelSignal()?.aborted) { + break; + } const itemData = items[itemIndex]; const { processedDocuments, serializedDocuments } = await processDocument( documentInput, diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index d3da518f10..0a3dab15cb 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/n8n-nodes-langchain", - "version": "1.63.0", + "version": "1.64.0", "description": "", "main": "index.js", "scripts": { @@ -168,7 +168,7 @@ "generate-schema": "2.6.0", "html-to-text": "9.0.5", "jsdom": "23.0.1", - "json-schema-to-zod": "2.1.0", + "@n8n/json-schema-to-zod": "workspace:*", "langchain": "0.3.2", "lodash": "catalog:", "mammoth": "1.7.2", diff --git a/packages/@n8n/nodes-langchain/utils/helpers.ts b/packages/@n8n/nodes-langchain/utils/helpers.ts index c70c8a8991..a760c32ba8 100644 --- a/packages/@n8n/nodes-langchain/utils/helpers.ts +++ b/packages/@n8n/nodes-langchain/utils/helpers.ts @@ -1,12 +1,12 @@ -import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; -import type { AiEvent, IDataObject, IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow'; +import type { BaseChatMessageHistory } from '@langchain/core/chat_history'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import type { BaseOutputParser } from '@langchain/core/output_parsers'; +import type { BaseLLM } from '@langchain/core/language_models/llms'; import type { BaseMessage } from '@langchain/core/messages'; import type { Tool } from '@langchain/core/tools'; -import type { BaseLLM } from '@langchain/core/language_models/llms'; import type { BaseChatMemory } from 'langchain/memory'; -import type { BaseChatMessageHistory } from '@langchain/core/chat_history'; +import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; +import type { AiEvent, IDataObject, IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow'; + import { N8nTool } from './N8nTool'; function hasMethods(obj: unknown, ...methodNames: Array): obj is T { @@ -66,21 +66,6 @@ export function isToolsInstance(model: unknown): model is Tool { return namespace.includes('tools'); } -export async function getOptionalOutputParsers( - ctx: IExecuteFunctions, -): Promise>> { - let outputParsers: BaseOutputParser[] = []; - - if (ctx.getNodeParameter('hasOutputParser', 0, true) === true) { - outputParsers = (await ctx.getInputConnectionData( - NodeConnectionType.AiOutputParser, - 0, - )) as BaseOutputParser[]; - } - - return outputParsers; -} - export function getPromptInputByType(options: { ctx: IExecuteFunctions; i: number; diff --git a/packages/@n8n/nodes-langchain/utils/logWrapper.ts b/packages/@n8n/nodes-langchain/utils/logWrapper.ts index 8707726183..c1ecd1799c 100644 --- a/packages/@n8n/nodes-langchain/utils/logWrapper.ts +++ b/packages/@n8n/nodes-langchain/utils/logWrapper.ts @@ -1,24 +1,21 @@ -import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; -import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; - -import type { Tool } from '@langchain/core/tools'; -import type { BaseMessage } from '@langchain/core/messages'; -import type { InputValues, MemoryVariables, OutputValues } from '@langchain/core/memory'; -import type { BaseChatMessageHistory } from '@langchain/core/chat_history'; -import type { BaseCallbackConfig, Callbacks } from '@langchain/core/callbacks/manager'; - -import { Embeddings } from '@langchain/core/embeddings'; -import { VectorStore } from '@langchain/core/vectorstores'; -import type { Document } from '@langchain/core/documents'; -import { TextSplitter } from '@langchain/textsplitters'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; +import type { BaseCallbackConfig, Callbacks } from '@langchain/core/callbacks/manager'; +import type { BaseChatMessageHistory } from '@langchain/core/chat_history'; +import type { Document } from '@langchain/core/documents'; +import { Embeddings } from '@langchain/core/embeddings'; +import type { InputValues, MemoryVariables, OutputValues } from '@langchain/core/memory'; +import type { BaseMessage } from '@langchain/core/messages'; import { BaseRetriever } from '@langchain/core/retrievers'; -import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers'; -import { isObject } from 'lodash'; +import type { Tool } from '@langchain/core/tools'; +import { VectorStore } from '@langchain/core/vectorstores'; +import { TextSplitter } from '@langchain/textsplitters'; import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base'; -import { N8nJsonLoader } from './N8nJsonLoader'; -import { N8nBinaryLoader } from './N8nBinaryLoader'; +import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; +import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; + import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers'; +import { N8nBinaryLoader } from './N8nBinaryLoader'; +import { N8nJsonLoader } from './N8nJsonLoader'; const errorsMap: { [key: string]: { message: string; description: string } } = { 'You exceeded your current quota, please check your plan and billing details.': { @@ -40,10 +37,6 @@ export async function callMethodAsync( try { return await parameters.method.call(this, ...parameters.arguments); } catch (e) { - // Langchain checks for OutputParserException to run retry chain - // for auto-fixing the output so skip wrapping in this case - if (e instanceof OutputParserException) throw e; - // Propagate errors from sub-nodes if (e.functionality === 'configuration-node') throw e; const connectedNode = parameters.executeFunctions.getNode(); @@ -63,7 +56,9 @@ export async function callMethodAsync( error, ); if (error.message) { - error.description = error.message; + if (!error.description) { + error.description = error.message; + } throw error; } throw new NodeOperationError( @@ -109,7 +104,6 @@ export function logWrapper( | Tool | BaseChatMemory | BaseChatMessageHistory - | BaseOutputParser | BaseRetriever | Embeddings | Document[] @@ -219,44 +213,6 @@ export function logWrapper( } } - // ========== BaseOutputParser ========== - if (originalInstance instanceof BaseOutputParser) { - if (prop === 'parse' && 'parse' in target) { - return async (text: string | Record): Promise => { - connectionType = NodeConnectionType.AiOutputParser; - const stringifiedText = isObject(text) ? JSON.stringify(text) : text; - const { index } = executeFunctions.addInputData(connectionType, [ - [{ json: { action: 'parse', text: stringifiedText } }], - ]); - - try { - const response = (await callMethodAsync.call(target, { - executeFunctions, - connectionType, - currentNodeRunIndex: index, - method: target[prop], - arguments: [stringifiedText], - })) as object; - - void logAiEvent(executeFunctions, 'ai-output-parsed', { text, response }); - executeFunctions.addOutputData(connectionType, index, [ - [{ json: { action: 'parse', response } }], - ]); - return response; - } catch (error) { - void logAiEvent(executeFunctions, 'ai-output-parsed', { - text, - response: error.message ?? error, - }); - executeFunctions.addOutputData(connectionType, index, [ - [{ json: { action: 'parse', response: error.message ?? error } }], - ]); - throw error; - } - }; - } - } - // ========== BaseRetriever ========== if (originalInstance instanceof BaseRetriever) { if (prop === 'getRelevantDocuments' && 'getRelevantDocuments' in target) { diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/ItemListOutputParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nItemListOutputParser.ts similarity index 88% rename from packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/ItemListOutputParser.ts rename to packages/@n8n/nodes-langchain/utils/output_parsers/N8nItemListOutputParser.ts index 7e596a2b68..f24238690b 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/ItemListOutputParser.ts +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nItemListOutputParser.ts @@ -1,9 +1,9 @@ import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers'; -export class ItemListOutputParser extends BaseOutputParser { +export class N8nItemListOutputParser extends BaseOutputParser { lc_namespace = ['n8n-nodes-langchain', 'output_parsers', 'list_items']; - private numberOfItems: number | undefined; + private numberOfItems: number = 3; private separator: string; @@ -39,7 +39,7 @@ export class ItemListOutputParser extends BaseOutputParser { this.numberOfItems ? this.numberOfItems + ' ' : '' }items separated by`; - const numberOfExamples = this.numberOfItems ?? 3; + const numberOfExamples = this.numberOfItems; const examples: string[] = []; for (let i = 1; i <= numberOfExamples; i++) { @@ -48,4 +48,8 @@ export class ItemListOutputParser extends BaseOutputParser { return `${instructions} "${this.separator}" (for example: "${examples.join(this.separator)}")`; } + + getSchema() { + return; + } } diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts new file mode 100644 index 0000000000..bfcbf88b33 --- /dev/null +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts @@ -0,0 +1,95 @@ +import type { Callbacks } from '@langchain/core/callbacks/manager'; +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import type { AIMessage } from '@langchain/core/messages'; +import { BaseOutputParser } from '@langchain/core/output_parsers'; +import type { IExecuteFunctions } from 'n8n-workflow'; +import { NodeConnectionType } from 'n8n-workflow'; + +import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser'; +import { NAIVE_FIX_PROMPT } from './prompt'; +import { logAiEvent } from '../helpers'; + +export class N8nOutputFixingParser extends BaseOutputParser { + private context: IExecuteFunctions; + + private model: BaseLanguageModel; + + private outputParser: N8nStructuredOutputParser; + + lc_namespace = ['langchain', 'output_parsers', 'fix']; + + constructor( + context: IExecuteFunctions, + model: BaseLanguageModel, + outputParser: N8nStructuredOutputParser, + ) { + super(); + this.context = context; + this.model = model; + this.outputParser = outputParser; + } + + getRetryChain() { + return NAIVE_FIX_PROMPT.pipe(this.model); + } + + /** + * Attempts to parse the completion string using the output parser. + * If the initial parse fails, it tries to fix the output using a retry chain. + * @param completion The string to be parsed + * @returns The parsed response + * @throws Error if both parsing attempts fail + */ + async parse(completion: string, callbacks?: Callbacks) { + const { index } = this.context.addInputData(NodeConnectionType.AiOutputParser, [ + [{ json: { action: 'parse', text: completion } }], + ]); + + try { + // First attempt to parse the completion + const response = await this.outputParser.parse(completion, callbacks, (e) => e); + void logAiEvent(this.context, 'ai-output-parsed', { text: completion, response }); + + this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ + [{ json: { action: 'parse', response } }], + ]); + + return response; + } catch (error) { + try { + // Second attempt: use retry chain to fix the output + const result = (await this.getRetryChain().invoke({ + completion, + error, + instructions: this.getFormatInstructions(), + })) as AIMessage; + + const resultText = result.content.toString(); + const parsed = await this.outputParser.parse(resultText, callbacks); + + // Add the successfully parsed output to the context + this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ + [{ json: { action: 'parse', response: parsed } }], + ]); + + return parsed; + } catch (autoParseError) { + // If both attempts fail, add the error to the output and throw + this.context.addOutputData(NodeConnectionType.AiOutputParser, index, autoParseError); + throw autoParseError; + } + } + } + + /** + * Method to get the format instructions for the parser. + * @returns The format instructions for the parser. + */ + getFormatInstructions() { + return this.outputParser.getFormatInstructions(); + } + + getSchema() { + return this.outputParser.schema; + } +} diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputParser.ts new file mode 100644 index 0000000000..e9d23a0dea --- /dev/null +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputParser.ts @@ -0,0 +1,26 @@ +import type { IExecuteFunctions } from 'n8n-workflow'; +import { NodeConnectionType } from 'n8n-workflow'; + +import { N8nItemListOutputParser } from './N8nItemListOutputParser'; +import { N8nOutputFixingParser } from './N8nOutputFixingParser'; +import { N8nStructuredOutputParser } from './N8nStructuredOutputParser'; + +export type N8nOutputParser = + | N8nOutputFixingParser + | N8nStructuredOutputParser + | N8nItemListOutputParser; + +export { N8nOutputFixingParser, N8nItemListOutputParser, N8nStructuredOutputParser }; + +export async function getOptionalOutputParsers(ctx: IExecuteFunctions): Promise { + let outputParsers: N8nOutputParser[] = []; + + if (ctx.getNodeParameter('hasOutputParser', 0, true) === true) { + outputParsers = (await ctx.getInputConnectionData( + NodeConnectionType.AiOutputParser, + 0, + )) as N8nOutputParser[]; + } + + return outputParsers; +} diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts new file mode 100644 index 0000000000..4799193be6 --- /dev/null +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts @@ -0,0 +1,116 @@ +import type { Callbacks } from '@langchain/core/callbacks/manager'; +import { StructuredOutputParser } from 'langchain/output_parsers'; +import get from 'lodash/get'; +import type { IExecuteFunctions } from 'n8n-workflow'; +import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import { z } from 'zod'; + +import { logAiEvent } from '../helpers'; + +const STRUCTURED_OUTPUT_KEY = '__structured__output'; +const STRUCTURED_OUTPUT_OBJECT_KEY = '__structured__output__object'; +const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array'; + +export class N8nStructuredOutputParser extends StructuredOutputParser< + z.ZodType +> { + context: IExecuteFunctions; + + constructor(context: IExecuteFunctions, zodSchema: z.ZodSchema) { + super(zodSchema); + this.context = context; + } + + lc_namespace = ['langchain', 'output_parsers', 'structured']; + + async parse( + text: string, + _callbacks?: Callbacks, + errorMapper?: (error: Error) => Error, + ): Promise { + const { index } = this.context.addInputData(NodeConnectionType.AiOutputParser, [ + [{ json: { action: 'parse', text } }], + ]); + try { + const parsed = await super.parse(text); + + const result = (get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_OBJECT_KEY]) ?? + get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_ARRAY_KEY]) ?? + get(parsed, STRUCTURED_OUTPUT_KEY) ?? + parsed) as Record; + + void logAiEvent(this.context, 'ai-output-parsed', { text, response: result }); + + this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ + [{ json: { action: 'parse', response: result } }], + ]); + + return result; + } catch (e) { + const nodeError = new NodeOperationError( + this.context.getNode(), + "Model output doesn't fit required format", + { + description: + "To continue the execution when this happens, change the 'On Error' parameter in the root node's settings", + }, + ); + + void logAiEvent(this.context, 'ai-output-parsed', { + text, + response: e.message ?? e, + }); + + this.context.addOutputData(NodeConnectionType.AiOutputParser, index, nodeError); + if (errorMapper) { + throw errorMapper(e); + } + + throw nodeError; + } + } + + static async fromZodJsonSchema( + zodSchema: z.ZodSchema, + nodeVersion: number, + context: IExecuteFunctions, + ): Promise { + let returnSchema: z.ZodType; + if (nodeVersion === 1) { + returnSchema = z.object({ + [STRUCTURED_OUTPUT_KEY]: z + .object({ + [STRUCTURED_OUTPUT_OBJECT_KEY]: zodSchema.optional(), + [STRUCTURED_OUTPUT_ARRAY_KEY]: z.array(zodSchema).optional(), + }) + .describe( + `Wrapper around the output data. It can only contain ${STRUCTURED_OUTPUT_OBJECT_KEY} or ${STRUCTURED_OUTPUT_ARRAY_KEY} but never both.`, + ) + .refine( + (data) => { + // Validate that one and only one of the properties exists + return ( + Boolean(data[STRUCTURED_OUTPUT_OBJECT_KEY]) !== + Boolean(data[STRUCTURED_OUTPUT_ARRAY_KEY]) + ); + }, + { + message: + 'One and only one of __structured__output__object and __structured__output__array should be present.', + path: [STRUCTURED_OUTPUT_KEY], + }, + ), + }); + } else { + returnSchema = z.object({ + output: zodSchema.optional(), + }); + } + + return new N8nStructuredOutputParser(context, returnSchema); + } + + getSchema() { + return this.schema; + } +} diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/prompt.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/prompt.ts new file mode 100644 index 0000000000..47599d230c --- /dev/null +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/prompt.ts @@ -0,0 +1,20 @@ +import { PromptTemplate } from '@langchain/core/prompts'; + +export const NAIVE_FIX_TEMPLATE = `Instructions: +-------------- +{instructions} +-------------- +Completion: +-------------- +{completion} +-------------- + +Above, the Completion did not satisfy the constraints given in the Instructions. +Error: +-------------- +{error} +-------------- + +Please try again. Please only respond with an answer that satisfies the constraints laid out in the Instructions:`; + +export const NAIVE_FIX_PROMPT = PromptTemplate.fromTemplate(NAIVE_FIX_TEMPLATE); diff --git a/packages/@n8n/nodes-langchain/utils/schemaParsing.ts b/packages/@n8n/nodes-langchain/utils/schemaParsing.ts index 0591483e2c..592f1597c2 100644 --- a/packages/@n8n/nodes-langchain/utils/schemaParsing.ts +++ b/packages/@n8n/nodes-langchain/utils/schemaParsing.ts @@ -1,67 +1,10 @@ -import { makeResolverFromLegacyOptions } from '@n8n/vm2'; +import { jsonSchemaToZod } from '@n8n/json-schema-to-zod'; import { json as generateJsonSchema } from 'generate-schema'; import type { SchemaObject } from 'generate-schema'; import type { JSONSchema7 } from 'json-schema'; -import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox'; -import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; import type { IExecuteFunctions } from 'n8n-workflow'; import { NodeOperationError, jsonParse } from 'n8n-workflow'; - -const vmResolver = makeResolverFromLegacyOptions({ - external: { - modules: ['json-schema-to-zod', 'zod'], - transitive: false, - }, - resolve(moduleName, parentDirname) { - if (moduleName === 'json-schema-to-zod') { - return require.resolve( - '@n8n/n8n-nodes-langchain/node_modules/json-schema-to-zod/dist/cjs/jsonSchemaToZod.js', - { - paths: [parentDirname], - }, - ); - } - if (moduleName === 'zod') { - return require.resolve('@n8n/n8n-nodes-langchain/node_modules/zod.cjs', { - paths: [parentDirname], - }); - } - return; - }, - builtin: [], -}); - -export function getSandboxWithZod(ctx: IExecuteFunctions, schema: JSONSchema7, itemIndex: number) { - const context = getSandboxContext.call(ctx, itemIndex); - let itemSchema: JSONSchema7 = schema; - try { - // If the root type is not defined, we assume it's an object - if (itemSchema.type === undefined) { - itemSchema = { - type: 'object', - properties: itemSchema.properties ?? (itemSchema as { [key: string]: JSONSchema7 }), - }; - } - } catch (error) { - throw new NodeOperationError(ctx.getNode(), 'Error during parsing of JSON Schema.'); - } - - // Make sure to remove the description from root schema - const { description, ...restOfSchema } = itemSchema; - const sandboxedSchema = new JavaScriptSandbox( - context, - ` - const { z } = require('zod'); - const { parseSchema } = require('json-schema-to-zod'); - const zodSchema = parseSchema(${JSON.stringify(restOfSchema)}); - const itemSchema = new Function('z', 'return (' + zodSchema + ')')(z) - return itemSchema - `, - ctx.helpers, - { resolver: vmResolver }, - ); - return sandboxedSchema; -} +import type { z } from 'zod'; export function generateSchema(schemaString: string): JSONSchema7 { const parsedSchema = jsonParse(schemaString); @@ -69,6 +12,10 @@ export function generateSchema(schemaString: string): JSONSchema7 { return generateJsonSchema(parsedSchema) as JSONSchema7; } +export function convertJsonSchemaToZod(schema: JSONSchema7) { + return jsonSchemaToZod(schema); +} + export function throwIfToolSchema(ctx: IExecuteFunctions, error: Error) { if (error?.message?.includes('tool input did not match expected schema')) { throw new NodeOperationError( diff --git a/packages/@n8n/permissions/src/constants.ts b/packages/@n8n/permissions/src/constants.ts index c43677e843..7a0ebf2cb1 100644 --- a/packages/@n8n/permissions/src/constants.ts +++ b/packages/@n8n/permissions/src/constants.ts @@ -3,6 +3,7 @@ export const RESOURCES = { annotationTag: [...DEFAULT_OPERATIONS] as const, auditLogs: ['manage'] as const, banner: ['dismiss'] as const, + community: ['register'] as const, communityPackage: ['install', 'uninstall', 'update', 'list', 'manage'] as const, credential: ['share', 'move', ...DEFAULT_OPERATIONS] as const, externalSecretsProvider: ['sync', ...DEFAULT_OPERATIONS] as const, diff --git a/packages/@n8n/permissions/src/types.ts b/packages/@n8n/permissions/src/types.ts index 1a78f79f15..07ed750f91 100644 --- a/packages/@n8n/permissions/src/types.ts +++ b/packages/@n8n/permissions/src/types.ts @@ -13,6 +13,7 @@ export type WildcardScope = `${Resource}:*` | '*'; export type AnnotationTagScope = ResourceScope<'annotationTag'>; export type AuditLogsScope = ResourceScope<'auditLogs', 'manage'>; export type BannerScope = ResourceScope<'banner', 'dismiss'>; +export type CommunityScope = ResourceScope<'community', 'register'>; export type CommunityPackageScope = ResourceScope< 'communityPackage', 'install' | 'uninstall' | 'update' | 'list' | 'manage' @@ -48,6 +49,7 @@ export type Scope = | AnnotationTagScope | AuditLogsScope | BannerScope + | CommunityScope | CommunityPackageScope | CredentialScope | ExternalSecretProviderScope diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json index a82b97975d..38c7abba66 100644 --- a/packages/@n8n/task-runner/package.json +++ b/packages/@n8n/task-runner/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/task-runner", - "version": "1.1.0", + "version": "1.2.0", "scripts": { "clean": "rimraf dist .turbo", "start": "node dist/start.js", diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts index 499105f39d..9635e7eae6 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts @@ -281,6 +281,20 @@ describe('JsTaskRunner', () => { expect(outcome.result).toEqual([wrapIntoJson({ val: undefined })]); }); }); + + it('should allow access to Node.js Buffers', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: Buffer.from("test-buffer").toString() }', + nodeMode: 'runOnceForAllItems', + }), + taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), { + envProviderState: undefined, + }), + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: 'test-buffer' })]); + }); }); describe('runOnceForAllItems', () => { @@ -744,19 +758,20 @@ describe('JsTaskRunner', () => { await runner.receivedSettings(taskId, task.settings); - expect(sendSpy).toHaveBeenCalledWith( - JSON.stringify({ - type: 'runner:taskerror', - taskId, - error: { - message: 'unknown is not defined [line 1]', - description: 'ReferenceError', - lineNumber: 1, - }, - }), - ); - - console.log('DONE'); - }, 1000); + expect(sendSpy).toHaveBeenCalled(); + const calledWith = sendSpy.mock.calls[0][0] as string; + expect(typeof calledWith).toBe('string'); + const calledObject = JSON.parse(calledWith); + expect(calledObject).toEqual({ + type: 'runner:taskerror', + taskId, + error: { + stack: expect.any(String), + message: 'unknown is not defined [line 1]', + description: 'ReferenceError', + lineNumber: 1, + }, + }); + }); }); }); diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts index 3777940021..c85e52f977 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/__tests__/execution-error.test.ts @@ -42,6 +42,7 @@ describe('ExecutionError', () => { expect(JSON.stringify(executionError)).toBe( JSON.stringify({ + stack: defaultStack, message: 'a.unknown is not a function [line 2, for item 1]', description: 'TypeError', itemIndex: 1, diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts index cd0e568de0..ea6321746b 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/serializable-error.ts @@ -1,3 +1,24 @@ +/** + * Makes the given error's `message` and `stack` properties enumerable + * so they can be serialized with JSON.stringify + */ +export function makeSerializable(error: Error) { + Object.defineProperties(error, { + message: { + value: error.message, + enumerable: true, + configurable: true, + }, + stack: { + value: error.stack, + enumerable: true, + configurable: true, + }, + }); + + return error; +} + /** * Error that has its message property serialized as well. Used to transport * errors over the wire. @@ -6,16 +27,6 @@ export abstract class SerializableError extends Error { constructor(message: string) { super(message); - // So it is serialized as well - this.makeMessageEnumerable(); - } - - private makeMessageEnumerable() { - Object.defineProperty(this, 'message', { - value: this.message, - enumerable: true, // This makes the message property enumerable - writable: true, - configurable: true, - }); + makeSerializable(this); } } diff --git a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts index 5bf2e06f26..cf53ba425c 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts @@ -27,6 +27,7 @@ import { type Task, TaskRunner } from '@/task-runner'; import { isErrorLike } from './errors/error-like'; import { ExecutionError } from './errors/execution-error'; +import { makeSerializable } from './errors/serializable-error'; import type { RequireResolver } from './require-resolver'; import { createRequireResolver } from './require-resolver'; import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation'; @@ -181,6 +182,17 @@ export class JsTaskRunner extends TaskRunner { module: {}, console: customConsole, + // Exposed Node.js globals in vm2 + Buffer, + Function, + eval, + setTimeout, + setInterval, + setImmediate, + clearTimeout, + clearInterval, + clearImmediate, + items: inputItems, ...dataProxy, ...this.buildRpcCallObject(taskId), @@ -188,7 +200,7 @@ export class JsTaskRunner extends TaskRunner { try { const result = (await runInNewContext( - `module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, + `globalThis.global = globalThis; module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, context, )) as TaskResultData['result']; @@ -312,7 +324,7 @@ export class JsTaskRunner extends TaskRunner { private toExecutionErrorIfNeeded(error: unknown): Error { if (error instanceof Error) { - return error; + return makeSerializable(error); } if (isErrorLike(error)) { diff --git a/packages/cli/BREAKING-CHANGES.md b/packages/cli/BREAKING-CHANGES.md index bdb1ff5890..cf29f6d85f 100644 --- a/packages/cli/BREAKING-CHANGES.md +++ b/packages/cli/BREAKING-CHANGES.md @@ -2,6 +2,16 @@ This list shows all the versions which include breaking changes and how to upgrade. +# 1.65.0 + +### What changed? + +Queue polling via the env var `QUEUE_RECOVERY_INTERVAL` has been removed. + +### When is action necessary? + +If you have set the env var `QUEUE_RECOVERY_INTERVAL`, so you can remove it as it no longer has any effect. + # 1.63.0 ### What changed? diff --git a/packages/cli/bin/n8n b/packages/cli/bin/n8n index c4b593ccc9..c3355767af 100755 --- a/packages/cli/bin/n8n +++ b/packages/cli/bin/n8n @@ -18,21 +18,20 @@ if (process.argv.length === 2) { process.argv.push('start'); } -const nodeVersion = process.versions.node; -const { major, gte } = require('semver'); - -const MINIMUM_SUPPORTED_NODE_VERSION = '18.17.0'; -const ENFORCE_MIN_NODE_VERSION = process.env.E2E_TESTS !== 'true'; - -if ( - (ENFORCE_MIN_NODE_VERSION && !gte(nodeVersion, MINIMUM_SUPPORTED_NODE_VERSION)) || - ![18, 20, 22].includes(major(nodeVersion)) -) { - console.log(` - Your Node.js version ${nodeVersion} is currently not supported by n8n. - Please use Node.js v${MINIMUM_SUPPORTED_NODE_VERSION} (recommended), v20, or v22 instead! - `); - process.exit(1); +const ENFORCE_NODE_VERSION_RANGE = process.env.E2E_TESTS !== 'true'; +if (ENFORCE_NODE_VERSION_RANGE) { + const satisfies = require('semver/functions/satisfies'); + const nodeVersion = process.versions.node; + const { + engines: { node: supportedNodeVersions }, + } = require('../package.json'); + if (!satisfies(nodeVersion, supportedNodeVersions)) { + console.error(` +Your Node.js version ${nodeVersion} is currently not supported by n8n. +Please use a Node.js version that satisfies the following version range: ${supportedNodeVersions} +`); + process.exit(1); + } } // Disable nodejs custom inspection across the app diff --git a/packages/cli/package.json b/packages/cli/package.json index d7d92d96bc..6cdd3eba88 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "1.63.0", + "version": "1.64.0", "description": "n8n Workflow Automation Tool", "main": "dist/index", "types": "dist/index.d.ts", @@ -42,7 +42,7 @@ "workflow" ], "engines": { - "node": ">=18.10" + "node": ">=18.17 <= 22" }, "files": [ "bin", diff --git a/packages/cli/src/__tests__/license.test.ts b/packages/cli/src/__tests__/license.test.ts index 67a92b95cd..70aa80347a 100644 --- a/packages/cli/src/__tests__/license.test.ts +++ b/packages/cli/src/__tests__/license.test.ts @@ -1,3 +1,4 @@ +import type { GlobalConfig } from '@n8n/config'; import { LicenseManager } from '@n8n_io/license-sdk'; import { mock } from 'jest-mock-extended'; import type { InstanceSettings } from 'n8n-core'; @@ -31,7 +32,8 @@ describe('License', () => { }); beforeEach(async () => { - license = new License(mockLogger(), instanceSettings, mock(), mock(), mock()); + const globalConfig = mock({ multiMainSetup: { enabled: false } }); + license = new License(mockLogger(), instanceSettings, mock(), mock(), mock(), globalConfig); await license.init(); }); @@ -64,6 +66,7 @@ describe('License', () => { mock(), mock(), mock(), + mock(), ); await license.init(); expect(LicenseManager).toHaveBeenCalledWith( @@ -197,9 +200,9 @@ describe('License', () => { describe('in single-main setup', () => { describe('with `license.autoRenewEnabled` enabled', () => { it('should enable renewal', async () => { - config.set('multiMainSetup.enabled', false); + const globalConfig = mock({ multiMainSetup: { enabled: false } }); - await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }), @@ -211,7 +214,7 @@ describe('License', () => { it('should disable renewal', async () => { config.set('license.autoRenewEnabled', false); - await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock(), mock()).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -225,11 +228,11 @@ describe('License', () => { test.each(['unset', 'leader', 'follower'])( 'if %s status, should disable removal', async (status) => { - config.set('multiMainSetup.enabled', true); + const globalConfig = mock({ multiMainSetup: { enabled: true } }); config.set('multiMainSetup.instanceType', status); config.set('license.autoRenewEnabled', false); - await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -240,11 +243,11 @@ describe('License', () => { describe('with `license.autoRenewEnabled` enabled', () => { test.each(['unset', 'follower'])('if %s status, should disable removal', async (status) => { - config.set('multiMainSetup.enabled', true); + const globalConfig = mock({ multiMainSetup: { enabled: true } }); config.set('multiMainSetup.instanceType', status); config.set('license.autoRenewEnabled', false); - await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -252,10 +255,10 @@ describe('License', () => { }); it('if leader status, should enable renewal', async () => { - config.set('multiMainSetup.enabled', true); + const globalConfig = mock({ multiMainSetup: { enabled: true } }); config.set('multiMainSetup.instanceType', 'leader'); - await new License(mockLogger(), mock(), mock(), mock(), mock()).init(); + await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }), @@ -267,7 +270,7 @@ describe('License', () => { describe('reinit', () => { it('should reinitialize license manager', async () => { - const license = new License(mockLogger(), mock(), mock(), mock(), mock()); + const license = new License(mockLogger(), mock(), mock(), mock(), mock(), mock()); await license.init(); const initSpy = jest.spyOn(license, 'init'); diff --git a/packages/cli/src/__tests__/wait-tracker.test.ts b/packages/cli/src/__tests__/wait-tracker.test.ts index e51cd88ccb..66c26f00c6 100644 --- a/packages/cli/src/__tests__/wait-tracker.test.ts +++ b/packages/cli/src/__tests__/wait-tracker.test.ts @@ -3,7 +3,7 @@ import type { InstanceSettings } from 'n8n-core'; import type { ExecutionRepository } from '@/databases/repositories/execution.repository'; import type { IExecutionResponse } from '@/interfaces'; -import type { MultiMainSetup } from '@/services/orchestration/main/multi-main-setup.ee'; +import type { MultiMainSetup } from '@/scaling/multi-main-setup.ee'; import { OrchestrationService } from '@/services/orchestration.service'; import { WaitTracker } from '@/wait-tracker'; import { mockLogger } from '@test/mocking'; @@ -13,7 +13,7 @@ jest.useFakeTimers(); describe('WaitTracker', () => { const executionRepository = mock(); const multiMainSetup = mock(); - const orchestrationService = new OrchestrationService(mock(), mock(), multiMainSetup); + const orchestrationService = new OrchestrationService(mock(), multiMainSetup, mock()); const instanceSettings = mock({ isLeader: true }); const execution = mock({ diff --git a/packages/cli/src/abstract-server.ts b/packages/cli/src/abstract-server.ts index 4456470b86..f440b2879a 100644 --- a/packages/cli/src/abstract-server.ts +++ b/packages/cli/src/abstract-server.ts @@ -15,9 +15,9 @@ import { ExternalHooks } from '@/external-hooks'; import { Logger } from '@/logging/logger.service'; import { rawBodyReader, bodyParser, corsMiddleware } from '@/middlewares'; import { send, sendErrorResponse } from '@/response-helper'; -import { WaitingForms } from '@/waiting-forms'; import { LiveWebhooks } from '@/webhooks/live-webhooks'; import { TestWebhooks } from '@/webhooks/test-webhooks'; +import { WaitingForms } from '@/webhooks/waiting-forms'; import { WaitingWebhooks } from '@/webhooks/waiting-webhooks'; import { createWebhookHandlerFor } from '@/webhooks/webhook-request-handler'; diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index 4127909e49..189c446b65 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -48,6 +48,7 @@ import { WorkflowExecutionService } from '@/workflows/workflow-execution.service import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service'; import { ExecutionService } from './executions/execution.service'; +import { Publisher } from './scaling/pubsub/publisher.service'; interface QueuedActivation { activationMode: WorkflowActivateMode; @@ -75,6 +76,7 @@ export class ActiveWorkflowManager { private readonly activeWorkflowsService: ActiveWorkflowsService, private readonly workflowExecutionService: WorkflowExecutionService, private readonly instanceSettings: InstanceSettings, + private readonly publisher: Publisher, ) {} async init() { @@ -517,8 +519,9 @@ export class ActiveWorkflowManager { { shouldPublish } = { shouldPublish: true }, ) { if (this.orchestrationService.isMultiMainSetupEnabled && shouldPublish) { - await this.orchestrationService.publish('add-webhooks-triggers-and-pollers', { - workflowId, + void this.publisher.publishCommand({ + command: 'add-webhooks-triggers-and-pollers', + payload: { workflowId }, }); return; @@ -526,8 +529,8 @@ export class ActiveWorkflowManager { let workflow: Workflow; - const shouldAddWebhooks = this.orchestrationService.shouldAddWebhooks(activationMode); - const shouldAddTriggersAndPollers = this.orchestrationService.shouldAddTriggersAndPollers(); + const shouldAddWebhooks = this.shouldAddWebhooks(activationMode); + const shouldAddTriggersAndPollers = this.shouldAddTriggersAndPollers(); const shouldDisplayActivationMessage = (shouldAddWebhooks || shouldAddTriggersAndPollers) && @@ -717,7 +720,10 @@ export class ActiveWorkflowManager { ); } - await this.orchestrationService.publish('remove-triggers-and-pollers', { workflowId }); + void this.publisher.publishCommand({ + command: 'remove-triggers-and-pollers', + payload: { workflowId }, + }); return; } @@ -810,4 +816,29 @@ export class ActiveWorkflowManager { async removeActivationError(workflowId: string) { await this.activationErrorsService.deregister(workflowId); } + + /** + * Whether this instance may add webhooks to the `webhook_entity` table. + */ + shouldAddWebhooks(activationMode: WorkflowActivateMode) { + // Always try to populate the webhook entity table as well as register the webhooks + // to prevent issues with users upgrading from a version < 1.15, where the webhook entity + // was cleared on shutdown to anything past 1.28.0, where we stopped populating it on init, + // causing all webhooks to break + if (activationMode === 'init') return true; + + if (activationMode === 'leadershipChange') return false; + + return this.instanceSettings.isLeader; // 'update' or 'activate' + } + + /** + * Whether this instance may add triggers and pollers to memory. + * + * In both single- and multi-main setup, only the leader is allowed to manage + * triggers and pollers in memory, to ensure they are not duplicated. + */ + shouldAddTriggersAndPollers() { + return this.instanceSettings.isLeader; + } } diff --git a/packages/cli/src/commands/base-command.ts b/packages/cli/src/commands/base-command.ts index 303b3cae3e..64ce401257 100644 --- a/packages/cli/src/commands/base-command.ts +++ b/packages/cli/src/commands/base-command.ts @@ -55,7 +55,8 @@ export abstract class BaseCommand extends Command { /** * How long to wait for graceful shutdown before force killing the process. */ - protected gracefulShutdownTimeoutInS = config.getEnv('generic.gracefulShutdownTimeout'); + protected gracefulShutdownTimeoutInS = + Container.get(GlobalConfig).generic.gracefulShutdownTimeout; /** Whether to init community packages (if enabled) */ protected needsCommunityPackages = false; diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index b46ef52ea4..7865739eec 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -1,5 +1,6 @@ /* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ +import { GlobalConfig } from '@n8n/config'; import { Flags } from '@oclif/core'; import glob from 'fast-glob'; import { createReadStream, createWriteStream, existsSync } from 'fs'; @@ -240,7 +241,7 @@ export class Start extends BaseCommand { } if ( - config.getEnv('multiMainSetup.enabled') && + Container.get(GlobalConfig).multiMainSetup.enabled && !Container.get(License).isMultipleMainInstancesLicensed() ) { throw new FeatureNotLicensedError(LICENSE_FEATURES.MULTIPLE_MAIN_INSTANCES); diff --git a/packages/cli/src/commands/webhook.ts b/packages/cli/src/commands/webhook.ts index d9d2f011fb..77ec770aa0 100644 --- a/packages/cli/src/commands/webhook.ts +++ b/packages/cli/src/commands/webhook.ts @@ -83,7 +83,7 @@ export class Webhook extends BaseCommand { } async run() { - if (config.getEnv('multiMainSetup.enabled')) { + if (this.globalConfig.multiMainSetup.enabled) { throw new ApplicationError( 'Webhook process cannot be started when multi-main setup is enabled.', ); diff --git a/packages/cli/src/config/index.ts b/packages/cli/src/config/index.ts index 0c799aa8a5..c9e34355ba 100644 --- a/packages/cli/src/config/index.ts +++ b/packages/cli/src/config/index.ts @@ -122,7 +122,7 @@ if (executionProcess === 'own') { } setGlobalState({ - defaultTimezone: config.getEnv('generic.timezone'), + defaultTimezone: Container.get(GlobalConfig).generic.timezone, }); // eslint-disable-next-line import/no-default-export diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index d2bb5297d4..e0e322210e 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -162,33 +162,6 @@ export const schema = { }, }, - generic: { - // The timezone to use. Is important for nodes like "Cron" which start the - // workflow automatically at a specified time. This setting can also be - // overwritten on a per workflow basis in the workflow settings in the - // editor. - timezone: { - doc: 'The timezone to use', - format: '*', - default: 'America/New_York', - env: 'GENERIC_TIMEZONE', - }, - - releaseChannel: { - doc: 'N8N release channel', - format: ['stable', 'beta', 'nightly', 'dev'] as const, - default: 'dev', - env: 'N8N_RELEASE_TYPE', - }, - - gracefulShutdownTimeout: { - doc: 'How long should n8n process wait for components to shut down before exiting the process (seconds)', - format: Number, - default: 30, - env: 'N8N_GRACEFUL_SHUTDOWN_TIMEOUT', - }, - }, - secure_cookie: { doc: 'This sets the `Secure` flag on n8n auth cookie', format: Boolean, @@ -564,27 +537,6 @@ export const schema = { }, }, - multiMainSetup: { - enabled: { - doc: 'Whether to enable multi-main setup for queue mode (license required)', - format: Boolean, - default: false, - env: 'N8N_MULTI_MAIN_SETUP_ENABLED', - }, - ttl: { - doc: 'Time to live (in seconds) for leader key in multi-main setup', - format: Number, - default: 10, - env: 'N8N_MULTI_MAIN_SETUP_KEY_TTL', - }, - interval: { - doc: 'Interval (in seconds) for leader check in multi-main setup', - format: Number, - default: 3, - env: 'N8N_MULTI_MAIN_SETUP_CHECK_INTERVAL', - }, - }, - proxy_hops: { format: Number, default: 0, diff --git a/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts b/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts index 81025fb2ca..3f34fc1d2c 100644 --- a/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts +++ b/packages/cli/src/controllers/__tests__/api-keys.controller.test.ts @@ -1,66 +1,88 @@ import { mock } from 'jest-mock-extended'; -import { randomString } from 'n8n-workflow'; import { Container } from 'typedi'; import type { ApiKey } from '@/databases/entities/api-key'; import type { User } from '@/databases/entities/user'; -import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; +import { EventService } from '@/events/event.service'; import type { ApiKeysRequest, AuthenticatedRequest } from '@/requests'; -import { API_KEY_PREFIX } from '@/services/public-api-key.service'; +import { PublicApiKeyService } from '@/services/public-api-key.service'; import { mockInstance } from '@test/mocking'; import { ApiKeysController } from '../api-keys.controller'; describe('ApiKeysController', () => { - const apiKeysRepository = mockInstance(ApiKeyRepository); + const publicApiKeyService = mockInstance(PublicApiKeyService); + const eventService = mockInstance(EventService); const controller = Container.get(ApiKeysController); let req: AuthenticatedRequest; beforeAll(() => { - req = mock({ user: mock({ id: '123' }) }); + req = { user: { id: '123' } } as AuthenticatedRequest; }); describe('createAPIKey', () => { it('should create and save an API key', async () => { + // Arrange + const apiKeyData = { id: '123', userId: '123', label: 'My API Key', - apiKey: `${API_KEY_PREFIX}${randomString(42)}`, + apiKey: 'apiKey********', createdAt: new Date(), } as ApiKey; - apiKeysRepository.upsert.mockImplementation(); + const req = mock({ user: mock({ id: '123' }) }); - apiKeysRepository.findOneByOrFail.mockResolvedValue(apiKeyData); + publicApiKeyService.createPublicApiKeyForUser.mockResolvedValue(apiKeyData); + + // Act const newApiKey = await controller.createAPIKey(req); - expect(apiKeysRepository.upsert).toHaveBeenCalled(); + // Assert + + expect(publicApiKeyService.createPublicApiKeyForUser).toHaveBeenCalled(); expect(apiKeyData).toEqual(newApiKey); + expect(eventService.emit).toHaveBeenCalledWith( + 'public-api-key-created', + expect.objectContaining({ user: req.user, publicApi: false }), + ); }); }); describe('getAPIKeys', () => { it('should return the users api keys redacted', async () => { + // Arrange + const apiKeyData = { id: '123', userId: '123', label: 'My API Key', - apiKey: `${API_KEY_PREFIX}${randomString(42)}`, + apiKey: 'apiKey***', createdAt: new Date(), + updatedAt: new Date(), } as ApiKey; - apiKeysRepository.findBy.mockResolvedValue([apiKeyData]); + publicApiKeyService.getRedactedApiKeysForUser.mockResolvedValue([apiKeyData]); + + // Act const apiKeys = await controller.getAPIKeys(req); - expect(apiKeys[0].apiKey).not.toEqual(apiKeyData.apiKey); - expect(apiKeysRepository.findBy).toHaveBeenCalledWith({ userId: req.user.id }); + + // Assert + + expect(apiKeys).toEqual([apiKeyData]); + expect(publicApiKeyService.getRedactedApiKeysForUser).toHaveBeenCalledWith( + expect.objectContaining({ id: req.user.id }), + ); }); }); describe('deleteAPIKey', () => { it('should delete the API key', async () => { + // Arrange + const user = mock({ id: '123', password: 'password', @@ -68,12 +90,22 @@ describe('ApiKeysController', () => { role: 'global:member', mfaEnabled: false, }); + const req = mock({ user, params: { id: user.id } }); + + // Act + await controller.deleteAPIKey(req); - expect(apiKeysRepository.delete).toHaveBeenCalledWith({ - userId: req.user.id, - id: req.params.id, - }); + + publicApiKeyService.deleteApiKeyForUser.mockResolvedValue(); + + // Assert + + expect(publicApiKeyService.deleteApiKeyForUser).toHaveBeenCalledWith(user, user.id); + expect(eventService.emit).toHaveBeenCalledWith( + 'public-api-key-deleted', + expect.objectContaining({ user, publicApi: false }), + ); }); }); }); diff --git a/packages/cli/src/controllers/mfa.controller.ts b/packages/cli/src/controllers/mfa.controller.ts index f0af103265..694765761c 100644 --- a/packages/cli/src/controllers/mfa.controller.ts +++ b/packages/cli/src/controllers/mfa.controller.ts @@ -1,11 +1,21 @@ import { Get, Post, RestController } from '@/decorators'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; +import { ExternalHooks } from '@/external-hooks'; import { MfaService } from '@/mfa/mfa.service'; import { AuthenticatedRequest, MFA } from '@/requests'; @RestController('/mfa') export class MFAController { - constructor(private mfaService: MfaService) {} + constructor( + private mfaService: MfaService, + private externalHooks: ExternalHooks, + ) {} + + @Post('/can-enable') + async canEnableMFA(req: AuthenticatedRequest) { + await this.externalHooks.run('mfa.beforeSetup', [req.user]); + return; + } @Get('/qr') async getQRCode(req: AuthenticatedRequest) { @@ -52,6 +62,8 @@ export class MFAController { const { token = null } = req.body; const { id, mfaEnabled } = req.user; + await this.externalHooks.run('mfa.beforeSetup', [req.user]); + const { decryptedSecret: secret, decryptedRecoveryCodes: recoveryCodes } = await this.mfaService.getSecretAndRecoveryCodes(id); diff --git a/packages/cli/src/controllers/orchestration.controller.ts b/packages/cli/src/controllers/orchestration.controller.ts index db1d690a3e..14d38cfa43 100644 --- a/packages/cli/src/controllers/orchestration.controller.ts +++ b/packages/cli/src/controllers/orchestration.controller.ts @@ -1,31 +1,23 @@ import { Post, RestController, GlobalScope } from '@/decorators'; import { License } from '@/license'; -import { OrchestrationRequest } from '@/requests'; -import { OrchestrationService } from '@/services/orchestration.service'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; @RestController('/orchestration') export class OrchestrationController { constructor( - private readonly orchestrationService: OrchestrationService, private readonly licenseService: License, + private readonly publisher: Publisher, ) {} /** - * These endpoints do not return anything, they just trigger the message to + * This endpoint does not return anything, it just triggers the message to * the workers to respond on Redis with their status. */ - @GlobalScope('orchestration:read') - @Post('/worker/status/:id') - async getWorkersStatus(req: OrchestrationRequest.Get) { - if (!this.licenseService.isWorkerViewLicensed()) return; - const id = req.params.id; - return await this.orchestrationService.getWorkerStatus(id); - } - @GlobalScope('orchestration:read') @Post('/worker/status') async getWorkersStatusAll() { if (!this.licenseService.isWorkerViewLicensed()) return; - return await this.orchestrationService.getWorkerStatus(); + + return await this.publisher.publishCommand({ command: 'get-worker-status' }); } } diff --git a/packages/cli/src/databases/migrations/common/1724753530828-CreateExecutionAnnotationTables.ts b/packages/cli/src/databases/migrations/common/1724753530828-CreateExecutionAnnotationTables.ts index 94c5e5ff68..58cd3cc3f1 100644 --- a/packages/cli/src/databases/migrations/common/1724753530828-CreateExecutionAnnotationTables.ts +++ b/packages/cli/src/databases/migrations/common/1724753530828-CreateExecutionAnnotationTables.ts @@ -25,7 +25,10 @@ export class CreateAnnotationTables1724753530828 implements ReversibleMigration .withIndexOn('name', true).withTimestamps; await createTable(annotationTagMappingsTableName) - .withColumns(column('annotationId').int.notNull, column('tagId').varchar(24).notNull) + .withColumns( + column('annotationId').int.notNull.primary, + column('tagId').varchar(24).notNull.primary, + ) .withForeignKey('annotationId', { tableName: annotationsTableName, columnName: 'id', diff --git a/packages/cli/src/databases/migrations/common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping.ts b/packages/cli/src/databases/migrations/common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping.ts new file mode 100644 index 0000000000..51735d660a --- /dev/null +++ b/packages/cli/src/databases/migrations/common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping.ts @@ -0,0 +1,23 @@ +import assert from 'node:assert'; + +import type { IrreversibleMigration, MigrationContext } from '@/databases/types'; + +export class AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 + implements IrreversibleMigration +{ + async up({ queryRunner, tablePrefix }: MigrationContext) { + // Check if the primary key already exists + const table = await queryRunner.getTable(`${tablePrefix}execution_annotation_tags`); + + assert(table, 'execution_annotation_tags table not found'); + + const hasPrimaryKey = table.primaryColumns.length > 0; + + if (!hasPrimaryKey) { + await queryRunner.createPrimaryKey(`${tablePrefix}execution_annotation_tags`, [ + 'annotationId', + 'tagId', + ]); + } + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/index.ts b/packages/cli/src/databases/migrations/mysqldb/index.ts index 1dcca1e592..d117a6c472 100644 --- a/packages/cli/src/databases/migrations/mysqldb/index.ts +++ b/packages/cli/src/databases/migrations/mysqldb/index.ts @@ -66,6 +66,7 @@ import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-Cre import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable'; import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; +import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping'; export const mysqlMigrations: Migration[] = [ InitialMigration1588157391238, @@ -134,4 +135,5 @@ export const mysqlMigrations: Migration[] = [ AddApiKeysTable1724951148974, SeparateExecutionCreationFromStart1727427440136, CreateProcessedDataTable1726606152711, + AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644, ]; diff --git a/packages/cli/src/databases/migrations/postgresdb/index.ts b/packages/cli/src/databases/migrations/postgresdb/index.ts index eb0e2bd946..b55ce32750 100644 --- a/packages/cli/src/databases/migrations/postgresdb/index.ts +++ b/packages/cli/src/databases/migrations/postgresdb/index.ts @@ -66,6 +66,7 @@ import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-Cre import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable'; import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable'; import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart'; +import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping'; export const postgresMigrations: Migration[] = [ InitialMigration1587669153312, @@ -134,4 +135,5 @@ export const postgresMigrations: Migration[] = [ AddApiKeysTable1724951148974, SeparateExecutionCreationFromStart1727427440136, CreateProcessedDataTable1726606152711, + AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644, ]; diff --git a/packages/cli/src/databases/migrations/sqlite/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping.ts b/packages/cli/src/databases/migrations/sqlite/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping.ts new file mode 100644 index 0000000000..1a2900b7a6 --- /dev/null +++ b/packages/cli/src/databases/migrations/sqlite/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping.ts @@ -0,0 +1,72 @@ +import assert from 'node:assert'; + +import type { IrreversibleMigration, MigrationContext } from '@/databases/types'; + +const annotationsTableName = 'execution_annotations'; +const annotationTagsTableName = 'annotation_tag_entity'; +const annotationTagMappingsTableName = 'execution_annotation_tags'; + +export class AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 + implements IrreversibleMigration +{ + async up({ + queryRunner, + tablePrefix, + schemaBuilder: { createTable, column, dropIndex }, + }: MigrationContext) { + // Check if the primary key already exists + const table = await queryRunner.getTable(`${tablePrefix}execution_annotation_tags`); + + assert(table, 'execution_annotation_tags table not found'); + + const hasPrimaryKey = table.primaryColumns.length > 0; + + // Do nothing if the primary key already exists + if (hasPrimaryKey) { + return; + } + + // SQLite doesn't support adding a primary key to an existing table + // So we have to do the following steps: + + // 1. Rename the existing table + await queryRunner.query( + `ALTER TABLE ${tablePrefix}${annotationTagMappingsTableName} RENAME TO ${tablePrefix}${annotationTagMappingsTableName}_tmp;`, + ); + + // 1.1 Drop the existing indices + await dropIndex(`${annotationTagMappingsTableName}_tmp`, ['tagId'], { + customIndexName: 'IDX_a3697779b366e131b2bbdae297', + }); + await dropIndex(`${annotationTagMappingsTableName}_tmp`, ['annotationId'], { + customIndexName: 'IDX_c1519757391996eb06064f0e7c', + }); + + // 2. Create a new table with the desired structure + await createTable(annotationTagMappingsTableName) + .withColumns( + column('annotationId').int.notNull.primary, + column('tagId').varchar(24).notNull.primary, + ) + .withForeignKey('annotationId', { + tableName: annotationsTableName, + columnName: 'id', + onDelete: 'CASCADE', + }) + .withIndexOn('tagId') + .withIndexOn('annotationId') + .withForeignKey('tagId', { + tableName: annotationTagsTableName, + columnName: 'id', + onDelete: 'CASCADE', + }); + + // 3. Copy data from the old table to the new one + await queryRunner.query( + `INSERT INTO ${tablePrefix}${annotationTagMappingsTableName} SELECT * FROM ${tablePrefix}${annotationTagMappingsTableName}_tmp;`, + ); + + // 4. Drop the old table + await queryRunner.dropTable(`${tablePrefix}${annotationTagMappingsTableName}_tmp`, true); + } +} diff --git a/packages/cli/src/databases/migrations/sqlite/index.ts b/packages/cli/src/databases/migrations/sqlite/index.ts index 797b26752c..c2c75cbb84 100644 --- a/packages/cli/src/databases/migrations/sqlite/index.ts +++ b/packages/cli/src/databases/migrations/sqlite/index.ts @@ -38,6 +38,7 @@ import { ExecutionSoftDelete1693491613982 } from './1693491613982-ExecutionSoftD import { DropRoleMapping1705429061930 } from './1705429061930-DropRoleMapping'; import { AddActivatedAtUserSetting1717498465931 } from './1717498465931-AddActivatedAtUserSetting'; import { AddApiKeysTable1724951148974 } from './1724951148974-AddApiKeysTable'; +import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from './1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping'; import { UniqueWorkflowNames1620821879465 } from '../common/1620821879465-UniqueWorkflowNames'; import { UpdateWorkflowCredentials1630330987096 } from '../common/1630330987096-UpdateWorkflowCredentials'; import { AddNodeIds1658930531669 } from '../common/1658930531669-AddNodeIds'; @@ -128,6 +129,7 @@ const sqliteMigrations: Migration[] = [ AddApiKeysTable1724951148974, SeparateExecutionCreationFromStart1727427440136, CreateProcessedDataTable1726606152711, + AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644, ]; export { sqliteMigrations }; diff --git a/packages/cli/src/error-reporting.ts b/packages/cli/src/error-reporting.ts index e429bdbd30..d90229130f 100644 --- a/packages/cli/src/error-reporting.ts +++ b/packages/cli/src/error-reporting.ts @@ -3,6 +3,7 @@ import { GlobalConfig } from '@n8n/config'; import { QueryFailedError } from '@n8n/typeorm'; import { AxiosError } from 'axios'; import { createHash } from 'crypto'; +import { InstanceSettings } from 'n8n-core'; import { ErrorReporterProxy, ApplicationError } from 'n8n-workflow'; import Container from 'typedi'; @@ -30,7 +31,7 @@ export const initErrorHandling = async () => { DEPLOYMENT_NAME: serverName, } = process.env; - const { init, captureException } = await import('@sentry/node'); + const { init, captureException, setTag } = await import('@sentry/node'); const { RewriteFrames } = await import('@sentry/integrations'); const { Integrations } = await import('@sentry/node'); @@ -95,6 +96,8 @@ export const initErrorHandling = async () => { }, }); + setTag('server_type', Container.get(InstanceSettings).instanceType); + ErrorReporterProxy.init({ report: (error, options) => captureException(error, options), }); diff --git a/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts b/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts index 0f622c2317..3cf5a5a5d0 100644 --- a/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts +++ b/packages/cli/src/eventbus/message-event-bus/message-event-bus.ts @@ -14,7 +14,7 @@ import { ExecutionRepository } from '@/databases/repositories/execution.reposito import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { License } from '@/license'; import { Logger } from '@/logging/logger.service'; -import { OrchestrationService } from '@/services/orchestration.service'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; import { ExecutionRecoveryService } from '../../executions/execution-recovery.service'; import type { EventMessageTypes } from '../event-message-classes/'; @@ -70,7 +70,7 @@ export class MessageEventBus extends EventEmitter { private readonly executionRepository: ExecutionRepository, private readonly eventDestinationsRepository: EventDestinationsRepository, private readonly workflowRepository: WorkflowRepository, - private readonly orchestrationService: OrchestrationService, + private readonly publisher: Publisher, private readonly recoveryService: ExecutionRecoveryService, private readonly license: License, private readonly globalConfig: GlobalConfig, @@ -210,7 +210,7 @@ export class MessageEventBus extends EventEmitter { this.destinations[destination.getId()] = destination; this.destinations[destination.getId()].startListening(); if (notifyWorkers) { - await this.orchestrationService.publish('restart-event-bus'); + void this.publisher.publishCommand({ command: 'restart-event-bus' }); } return destination; } @@ -236,7 +236,7 @@ export class MessageEventBus extends EventEmitter { delete this.destinations[id]; } if (notifyWorkers) { - await this.orchestrationService.publish('restart-event-bus'); + void this.publisher.publishCommand({ command: 'restart-event-bus' }); } return result; } diff --git a/packages/cli/src/events/relays/telemetry.event-relay.ts b/packages/cli/src/events/relays/telemetry.event-relay.ts index 4cf0690eec..9e00e2e055 100644 --- a/packages/cli/src/events/relays/telemetry.event-relay.ts +++ b/packages/cli/src/events/relays/telemetry.event-relay.ts @@ -780,7 +780,7 @@ export class TelemetryEventRelay extends EventRelay { license_plan_name: this.license.getPlanName(), license_tenant_id: config.getEnv('license.tenantId'), binary_data_s3: isS3Available && isS3Selected && isS3Licensed, - multi_main_setup_enabled: config.getEnv('multiMainSetup.enabled'), + multi_main_setup_enabled: this.globalConfig.multiMainSetup.enabled, metrics: { metrics_enabled: this.globalConfig.endpoints.metrics.enable, metrics_category_default: this.globalConfig.endpoints.metrics.includeDefaultMetrics, diff --git a/packages/cli/src/external-secrets/__tests__/external-secrets-manager.ee.test.ts b/packages/cli/src/external-secrets/__tests__/external-secrets-manager.ee.test.ts index 97547ecf13..b1a87271f9 100644 --- a/packages/cli/src/external-secrets/__tests__/external-secrets-manager.ee.test.ts +++ b/packages/cli/src/external-secrets/__tests__/external-secrets-manager.ee.test.ts @@ -55,6 +55,7 @@ describe('External Secrets Manager', () => { providersMock, cipher, mock(), + mock(), ); }); diff --git a/packages/cli/src/external-secrets/external-secrets-manager.ee.ts b/packages/cli/src/external-secrets/external-secrets-manager.ee.ts index e175f2969c..ec7c3ed0cf 100644 --- a/packages/cli/src/external-secrets/external-secrets-manager.ee.ts +++ b/packages/cli/src/external-secrets/external-secrets-manager.ee.ts @@ -1,6 +1,6 @@ import { Cipher } from 'n8n-core'; import { jsonParse, type IDataObject, ApplicationError } from 'n8n-workflow'; -import Container, { Service } from 'typedi'; +import { Service } from 'typedi'; import { SettingsRepository } from '@/databases/repositories/settings.repository'; import { EventService } from '@/events/event.service'; @@ -11,7 +11,7 @@ import type { } from '@/interfaces'; import { License } from '@/license'; import { Logger } from '@/logging/logger.service'; -import { OrchestrationService } from '@/services/orchestration.service'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; import { EXTERNAL_SECRETS_INITIAL_BACKOFF, EXTERNAL_SECRETS_MAX_BACKOFF } from './constants'; import { updateIntervalTime } from './external-secrets-helper.ee'; @@ -38,6 +38,7 @@ export class ExternalSecretsManager { private readonly secretsProviders: ExternalSecretsProviders, private readonly cipher: Cipher, private readonly eventService: EventService, + private readonly publisher: Publisher, ) {} async init(): Promise { @@ -78,8 +79,8 @@ export class ExternalSecretsManager { } } - async broadcastReloadExternalSecretsProviders() { - await Container.get(OrchestrationService).publish('reload-external-secrets-providers'); + broadcastReloadExternalSecretsProviders() { + void this.publisher.publishCommand({ command: 'reload-external-secrets-providers' }); } private decryptSecretsSettings(value: string): ExternalSecretsSettings { @@ -280,7 +281,7 @@ export class ExternalSecretsManager { await this.saveAndSetSettings(settings, this.settingsRepo); this.cachedSettings = settings; await this.reloadProvider(provider); - await this.broadcastReloadExternalSecretsProviders(); + this.broadcastReloadExternalSecretsProviders(); void this.trackProviderSave(provider, isNewProvider, userId); } @@ -300,7 +301,7 @@ export class ExternalSecretsManager { this.cachedSettings = settings; await this.reloadProvider(provider); await this.updateSecrets(); - await this.broadcastReloadExternalSecretsProviders(); + this.broadcastReloadExternalSecretsProviders(); } private async trackProviderSave(vaultType: string, isNew: boolean, userId?: string) { @@ -380,7 +381,7 @@ export class ExternalSecretsManager { } try { await this.providers[provider].update(); - await this.broadcastReloadExternalSecretsProviders(); + this.broadcastReloadExternalSecretsProviders(); return true; } catch { return false; diff --git a/packages/cli/src/external-secrets/providers/azure-key-vault/azure-key-vault.ts b/packages/cli/src/external-secrets/providers/azure-key-vault/azure-key-vault.ts index c87c5e07d0..e753f0abbf 100644 --- a/packages/cli/src/external-secrets/providers/azure-key-vault/azure-key-vault.ts +++ b/packages/cli/src/external-secrets/providers/azure-key-vault/azure-key-vault.ts @@ -1,5 +1,4 @@ -import { ClientSecretCredential } from '@azure/identity'; -import { SecretClient } from '@azure/keyvault-secrets'; +import type { SecretClient } from '@azure/keyvault-secrets'; import type { INodeProperties } from 'n8n-workflow'; import { DOCS_HELP_NOTICE, EXTERNAL_SECRETS_NAME_REGEX } from '@/external-secrets/constants'; @@ -72,6 +71,9 @@ export class AzureKeyVault implements SecretsProvider { async connect() { const { vaultName, tenantId, clientId, clientSecret } = this.settings; + const { ClientSecretCredential } = await import('@azure/identity'); + const { SecretClient } = await import('@azure/keyvault-secrets'); + try { const credential = new ClientSecretCredential(tenantId, clientId, clientSecret); this.client = new SecretClient(`https://${vaultName}.vault.azure.net/`, credential); diff --git a/packages/cli/src/external-secrets/providers/gcp-secrets-manager/gcp-secrets-manager.ts b/packages/cli/src/external-secrets/providers/gcp-secrets-manager/gcp-secrets-manager.ts index c562139105..e6bcd11209 100644 --- a/packages/cli/src/external-secrets/providers/gcp-secrets-manager/gcp-secrets-manager.ts +++ b/packages/cli/src/external-secrets/providers/gcp-secrets-manager/gcp-secrets-manager.ts @@ -1,4 +1,4 @@ -import { SecretManagerServiceClient as GcpClient } from '@google-cloud/secret-manager'; +import type { SecretManagerServiceClient as GcpClient } from '@google-cloud/secret-manager'; import { jsonParse, type INodeProperties } from 'n8n-workflow'; import { DOCS_HELP_NOTICE, EXTERNAL_SECRETS_NAME_REGEX } from '@/external-secrets/constants'; @@ -45,6 +45,8 @@ export class GcpSecretsManager implements SecretsProvider { async connect() { const { projectId, privateKey, clientEmail } = this.settings; + const { SecretManagerServiceClient: GcpClient } = await import('@google-cloud/secret-manager'); + try { this.client = new GcpClient({ credentials: { client_email: clientEmail, private_key: privateKey }, diff --git a/packages/cli/src/license.ts b/packages/cli/src/license.ts index da7ab80313..36fb520e23 100644 --- a/packages/cli/src/license.ts +++ b/packages/cli/src/license.ts @@ -1,3 +1,4 @@ +import { GlobalConfig } from '@n8n/config'; import type { TEntitlement, TFeatures, TLicenseBlock } from '@n8n_io/license-sdk'; import { LicenseManager } from '@n8n_io/license-sdk'; import { InstanceSettings, ObjectStoreService } from 'n8n-core'; @@ -37,6 +38,7 @@ export class License { private readonly orchestrationService: OrchestrationService, private readonly settingsRepository: SettingsRepository, private readonly licenseMetricsService: LicenseMetricsService, + private readonly globalConfig: GlobalConfig, ) { this.logger = this.logger.withScope('license'); } @@ -54,7 +56,7 @@ export class License { * On becoming leader or follower, each will enable or disable renewal, respectively. * This ensures the mains do not cause a 429 (too many requests) on license init. */ - if (config.getEnv('multiMainSetup.enabled')) { + if (this.globalConfig.multiMainSetup.enabled) { return autoRenewEnabled && this.instanceSettings.isLeader; } @@ -136,7 +138,7 @@ export class License { async onFeatureChange(_features: TFeatures): Promise { this.logger.debug('License feature change detected', _features); - if (config.getEnv('executions.mode') === 'queue' && config.getEnv('multiMainSetup.enabled')) { + if (config.getEnv('executions.mode') === 'queue' && this.globalConfig.multiMainSetup.enabled) { const isMultiMainLicensed = _features[LICENSE_FEATURES.MULTIPLE_MAIN_INSTANCES] as | boolean | undefined; diff --git a/packages/cli/src/permissions/global-roles.ts b/packages/cli/src/permissions/global-roles.ts index 6315c3c617..7ea1b575da 100644 --- a/packages/cli/src/permissions/global-roles.ts +++ b/packages/cli/src/permissions/global-roles.ts @@ -15,6 +15,7 @@ export const GLOBAL_OWNER_SCOPES: Scope[] = [ 'credential:list', 'credential:share', 'credential:move', + 'community:register', 'communityPackage:install', 'communityPackage:uninstall', 'communityPackage:update', diff --git a/packages/cli/src/public-api/index.ts b/packages/cli/src/public-api/index.ts index 1264f57496..92b3602828 100644 --- a/packages/cli/src/public-api/index.ts +++ b/packages/cli/src/public-api/index.ts @@ -3,16 +3,13 @@ import type { Router } from 'express'; import express from 'express'; import type { HttpError } from 'express-openapi-validator/dist/framework/types'; import fs from 'fs/promises'; -import type { OpenAPIV3 } from 'openapi-types'; import path from 'path'; import type { JsonObject } from 'swagger-ui-express'; import { Container } from 'typedi'; import validator from 'validator'; import YAML from 'yamljs'; -import { EventService } from '@/events/event.service'; import { License } from '@/license'; -import type { AuthenticatedRequest } from '@/requests'; import { PublicApiKeyService } from '@/services/public-api-key.service'; import { UrlService } from '@/services/url.service'; @@ -85,28 +82,7 @@ async function createApiRouter( }, validateSecurity: { handlers: { - ApiKeyAuth: async ( - req: AuthenticatedRequest, - _scopes: unknown, - schema: OpenAPIV3.ApiKeySecurityScheme, - ): Promise => { - const providedApiKey = req.headers[schema.name.toLowerCase()] as string; - - const user = await Container.get(PublicApiKeyService).getUserForApiKey(providedApiKey); - - if (!user) return false; - - Container.get(EventService).emit('public-api-invoked', { - userId: user.id, - path: req.path, - method: req.method, - apiVersion: version, - }); - - req.user = user; - - return true; - }, + ApiKeyAuth: Container.get(PublicApiKeyService).getAuthMiddleware(version), }, }, }), diff --git a/packages/cli/src/push/__tests__/index.test.ts b/packages/cli/src/push/__tests__/index.test.ts index 6230c63397..03457926b1 100644 --- a/packages/cli/src/push/__tests__/index.test.ts +++ b/packages/cli/src/push/__tests__/index.test.ts @@ -20,7 +20,7 @@ describe('Push', () => { test('should validate pushRef on requests for websocket backend', () => { config.set('push.backend', 'websocket'); - const push = new Push(mock()); + const push = new Push(mock(), mock()); const ws = mock(); const request = mock({ user, ws }); request.query = { pushRef: '' }; @@ -33,7 +33,7 @@ describe('Push', () => { test('should validate pushRef on requests for SSE backend', () => { config.set('push.backend', 'sse'); - const push = new Push(mock()); + const push = new Push(mock(), mock()); const request = mock({ user, ws: undefined }); request.query = { pushRef: '' }; expect(() => push.handleRequest(request, mock())).toThrow(BadRequestError); diff --git a/packages/cli/src/push/index.ts b/packages/cli/src/push/index.ts index 232864968d..bfbfb43a51 100644 --- a/packages/cli/src/push/index.ts +++ b/packages/cli/src/push/index.ts @@ -12,6 +12,7 @@ import config from '@/config'; import type { User } from '@/databases/entities/user'; import { OnShutdown } from '@/decorators/on-shutdown'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { TypedEmitter } from '@/typed-emitter'; @@ -39,7 +40,10 @@ export class Push extends TypedEmitter { private backend = useWebSockets ? Container.get(WebSocketPush) : Container.get(SSEPush); - constructor(private readonly orchestrationService: OrchestrationService) { + constructor( + private readonly orchestrationService: OrchestrationService, + private readonly publisher: Publisher, + ) { super(); if (useWebSockets) this.backend.on('message', (msg) => this.emit('message', msg)); @@ -89,8 +93,10 @@ export class Push extends TypedEmitter { * relay the former's execution lifecycle events to the creator's frontend. */ if (this.orchestrationService.isMultiMainSetupEnabled && !this.backend.hasPushRef(pushRef)) { - const payload = { type, args: data, pushRef }; - void this.orchestrationService.publish('relay-execution-lifecycle-event', payload); + void this.publisher.publishCommand({ + command: 'relay-execution-lifecycle-event', + payload: { type, args: data, pushRef }, + }); return; } diff --git a/packages/cli/src/requests.ts b/packages/cli/src/requests.ts index e25a244f5f..ffc04925a3 100644 --- a/packages/cli/src/requests.ts +++ b/packages/cli/src/requests.ts @@ -478,15 +478,6 @@ export declare namespace ExternalSecretsRequest { type UpdateProvider = AuthenticatedRequest<{ provider: string }>; } -// ---------------------------------- -// /orchestration -// ---------------------------------- -// -export declare namespace OrchestrationRequest { - type GetAll = AuthenticatedRequest; - type Get = AuthenticatedRequest<{ id: string }, {}, {}, {}>; -} - // ---------------------------------- // /workflow-history // ---------------------------------- diff --git a/packages/cli/src/runners/task-runner-process.ts b/packages/cli/src/runners/task-runner-process.ts index 857d581127..a3bc118387 100644 --- a/packages/cli/src/runners/task-runner-process.ts +++ b/packages/cli/src/runners/task-runner-process.ts @@ -23,10 +23,14 @@ export class TaskRunnerProcess { return this.process?.pid; } + /** Promise that resolves when the process has exited */ + public get runPromise() { + return this._runPromise; + } + private process: ChildProcess | null = null; - /** Promise that resolves after the process has exited */ - private runPromise: Promise | null = null; + private _runPromise: Promise | null = null; private isShuttingDown = false; @@ -97,7 +101,7 @@ export class TaskRunnerProcess { } else { this.killNode(); } - await this.runPromise; + await this._runPromise; this.isShuttingDown = false; } @@ -128,7 +132,7 @@ export class TaskRunnerProcess { } private monitorProcess(taskRunnerProcess: ChildProcess) { - this.runPromise = new Promise((resolve) => { + this._runPromise = new Promise((resolve) => { taskRunnerProcess.on('exit', (code) => { this.onProcessExit(code, resolve); }); diff --git a/packages/cli/src/scaling/constants.ts b/packages/cli/src/scaling/constants.ts index 348f156896..e56596e4a0 100644 --- a/packages/cli/src/scaling/constants.ts +++ b/packages/cli/src/scaling/constants.ts @@ -20,4 +20,7 @@ export const SELF_SEND_COMMANDS = new Set([ * Commands that should not be debounced when received, e.g. during webhook handling in * multi-main setup. */ -export const IMMEDIATE_COMMANDS = new Set(['relay-execution-lifecycle-event']); +export const IMMEDIATE_COMMANDS = new Set([ + 'add-webhooks-triggers-and-pollers', + 'relay-execution-lifecycle-event', +]); diff --git a/packages/cli/src/scaling/job-processor.ts b/packages/cli/src/scaling/job-processor.ts index 1322beac27..7c189baf0d 100644 --- a/packages/cli/src/scaling/job-processor.ts +++ b/packages/cli/src/scaling/job-processor.ts @@ -1,7 +1,12 @@ import type { RunningJobSummary } from '@n8n/api-types'; import { InstanceSettings, WorkflowExecute } from 'n8n-core'; -import { BINARY_ENCODING, ApplicationError, Workflow } from 'n8n-workflow'; import type { ExecutionStatus, IExecuteResponsePromiseData, IRun } from 'n8n-workflow'; +import { + BINARY_ENCODING, + ApplicationError, + Workflow, + ErrorReporterProxy as ErrorReporter, +} from 'n8n-workflow'; import type PCancelable from 'p-cancelable'; import { Service } from 'typedi'; @@ -143,6 +148,7 @@ export class JobProcessor { workflowExecute = new WorkflowExecute(additionalData, execution.mode, execution.data); workflowRun = workflowExecute.processRunExecutionData(workflow); } else { + ErrorReporter.info(`Worker found execution ${executionId} without data`); // Execute all nodes // Can execute without webhook so go on workflowExecute = new WorkflowExecute(additionalData, execution.mode); diff --git a/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts b/packages/cli/src/scaling/multi-main-setup.ee.ts similarity index 78% rename from packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts rename to packages/cli/src/scaling/multi-main-setup.ee.ts index 034a214765..76c964fc4f 100644 --- a/packages/cli/src/services/orchestration/main/multi-main-setup.ee.ts +++ b/packages/cli/src/scaling/multi-main-setup.ee.ts @@ -1,3 +1,4 @@ +import { GlobalConfig } from '@n8n/config'; import { InstanceSettings } from 'n8n-core'; import { Service } from 'typedi'; @@ -9,10 +10,22 @@ import { RedisClientService } from '@/services/redis-client.service'; import { TypedEmitter } from '@/typed-emitter'; type MultiMainEvents = { + /** + * Emitted when this instance loses leadership. In response, its various + * services will stop triggers, pollers, pruning, wait-tracking, license + * renewal, queue recovery, etc. + */ 'leader-stepdown': never; + + /** + * Emitted when this instance gains leadership. In response, its various + * services will start triggers, pollers, pruning, wait-tracking, license + * renewal, queue recovery, etc. + */ 'leader-takeover': never; }; +/** Designates leader and followers when running multiple main processes. */ @Service() export class MultiMainSetup extends TypedEmitter { constructor( @@ -20,13 +33,15 @@ export class MultiMainSetup extends TypedEmitter { private readonly instanceSettings: InstanceSettings, private readonly publisher: Publisher, private readonly redisClientService: RedisClientService, + private readonly globalConfig: GlobalConfig, ) { super(); + this.logger = this.logger.withScope('scaling'); } private leaderKey: string; - private readonly leaderKeyTtl = config.getEnv('multiMainSetup.ttl'); + private readonly leaderKeyTtl = this.globalConfig.multiMainSetup.ttl; private leaderCheckInterval: NodeJS.Timer | undefined; @@ -39,7 +54,7 @@ export class MultiMainSetup extends TypedEmitter { this.leaderCheckInterval = setInterval(async () => { await this.checkLeader(); - }, config.getEnv('multiMainSetup.interval') * TIME.SECOND); + }, this.globalConfig.multiMainSetup.interval * TIME.SECOND); } async shutdown() { @@ -69,7 +84,7 @@ export class MultiMainSetup extends TypedEmitter { if (this.instanceSettings.isLeader) { this.instanceSettings.markAsFollower(); - this.emit('leader-stepdown'); // lost leadership - stop triggers, pollers, pruning, wait-tracking, queue recovery + this.emit('leader-stepdown'); this.logger.warn('[Multi-main setup] Leader failed to renew leader key'); } @@ -84,9 +99,6 @@ export class MultiMainSetup extends TypedEmitter { this.instanceSettings.markAsFollower(); - /** - * Lost leadership - stop triggers, pollers, pruning, wait tracking, license renewal, queue recovery - */ this.emit('leader-stepdown'); await this.tryBecomeLeader(); @@ -106,9 +118,6 @@ export class MultiMainSetup extends TypedEmitter { await this.publisher.setExpiration(this.leaderKey, this.leaderKeyTtl); - /** - * Gained leadership - start triggers, pollers, pruning, wait-tracking, license renewal, queue recovery - */ this.emit('leader-takeover'); } else { this.instanceSettings.markAsFollower(); diff --git a/packages/cli/src/services/__tests__/orchestration.service.test.ts b/packages/cli/src/services/__tests__/orchestration.service.test.ts index 0169462891..a8e72c49bf 100644 --- a/packages/cli/src/services/__tests__/orchestration.service.test.ts +++ b/packages/cli/src/services/__tests__/orchestration.service.test.ts @@ -1,7 +1,6 @@ import type Redis from 'ioredis'; import { mock } from 'jest-mock-extended'; import { InstanceSettings } from 'n8n-core'; -import type { WorkflowActivateMode } from 'n8n-workflow'; import Container from 'typedi'; import { ActiveWorkflowManager } from '@/active-workflow-manager'; @@ -45,35 +44,4 @@ describe('Orchestration Service', () => { // @ts-expect-error Private field expect(os.publisher).toBeDefined(); }); - - describe('shouldAddWebhooks', () => { - test('should return true for init', () => { - // We want to ensure that webhooks are populated on init - // more https://github.com/n8n-io/n8n/pull/8830 - const result = os.shouldAddWebhooks('init'); - expect(result).toBe(true); - }); - - test('should return false for leadershipChange', () => { - const result = os.shouldAddWebhooks('leadershipChange'); - expect(result).toBe(false); - }); - - test('should return true for update or activate when is leader', () => { - const modes = ['update', 'activate'] as WorkflowActivateMode[]; - for (const mode of modes) { - const result = os.shouldAddWebhooks(mode); - expect(result).toBe(true); - } - }); - - test('should return false for update or activate when not leader', () => { - instanceSettings.markAsFollower(); - const modes = ['update', 'activate'] as WorkflowActivateMode[]; - for (const mode of modes) { - const result = os.shouldAddWebhooks(mode); - expect(result).toBe(false); - } - }); - }); }); diff --git a/packages/cli/src/services/__tests__/public-api-key.service.test.ts b/packages/cli/src/services/__tests__/public-api-key.service.test.ts new file mode 100644 index 0000000000..7c60b62983 --- /dev/null +++ b/packages/cli/src/services/__tests__/public-api-key.service.test.ts @@ -0,0 +1,147 @@ +import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; +import type { OpenAPIV3 } from 'openapi-types'; + +import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; +import { UserRepository } from '@/databases/repositories/user.repository'; +import { getConnection } from '@/db'; +import type { EventService } from '@/events/event.service'; +import type { AuthenticatedRequest } from '@/requests'; +import { createOwnerWithApiKey } from '@test-integration/db/users'; +import * as testDb from '@test-integration/test-db'; + +import { JwtService } from '../jwt.service'; +import { PublicApiKeyService } from '../public-api-key.service'; + +const mockReqWith = (apiKey: string, path: string, method: string) => { + return mock({ + path, + method, + headers: { + 'x-n8n-api-key': apiKey, + }, + }); +}; + +const instanceSettings = mock({ encryptionKey: 'test-key' }); + +const eventService = mock(); + +const securitySchema = mock({ + name: 'X-N8N-API-KEY', +}); + +const jwtService = new JwtService(instanceSettings); + +let userRepository: UserRepository; +let apiKeyRepository: ApiKeyRepository; + +describe('PublicApiKeyService', () => { + beforeEach(async () => { + await testDb.truncate(['User']); + jest.clearAllMocks(); + }); + + beforeAll(async () => { + await testDb.init(); + userRepository = new UserRepository(getConnection()); + apiKeyRepository = new ApiKeyRepository(getConnection()); + }); + + afterAll(async () => { + await testDb.terminate(); + }); + + describe('getAuthMiddleware', () => { + it('should return false if api key is invalid', async () => { + //Arrange + + const apiKey = 'invalid'; + const path = '/test'; + const method = 'GET'; + const apiVersion = 'v1'; + + const publicApiKeyService = new PublicApiKeyService( + apiKeyRepository, + userRepository, + jwtService, + eventService, + ); + + const middleware = publicApiKeyService.getAuthMiddleware(apiVersion); + + //Act + + const response = await middleware(mockReqWith(apiKey, path, method), {}, securitySchema); + + //Assert + + expect(response).toBe(false); + }); + + it('should return false if valid api key is not in database', async () => { + //Arrange + + const apiKey = jwtService.sign({ sub: '123' }); + const path = '/test'; + const method = 'GET'; + const apiVersion = 'v1'; + + const publicApiKeyService = new PublicApiKeyService( + apiKeyRepository, + userRepository, + jwtService, + eventService, + ); + + const middleware = publicApiKeyService.getAuthMiddleware(apiVersion); + + //Act + + const response = await middleware(mockReqWith(apiKey, path, method), {}, securitySchema); + + //Assert + + expect(response).toBe(false); + }); + + it('should return true if valid api key exist in the database', async () => { + //Arrange + + const path = '/test'; + const method = 'GET'; + const apiVersion = 'v1'; + + const publicApiKeyService = new PublicApiKeyService( + apiKeyRepository, + userRepository, + jwtService, + eventService, + ); + + const owner = await createOwnerWithApiKey(); + + const [{ apiKey }] = owner.apiKeys; + + const middleware = publicApiKeyService.getAuthMiddleware(apiVersion); + + //Act + + const response = await middleware(mockReqWith(apiKey, path, method), {}, securitySchema); + + //Assert + + expect(response).toBe(true); + expect(eventService.emit).toHaveBeenCalledTimes(1); + expect(eventService.emit).toHaveBeenCalledWith( + 'public-api-invoked', + expect.objectContaining({ + userId: owner.id, + path, + method, + apiVersion: 'v1', + }), + ); + }); + }); +}); diff --git a/packages/cli/src/services/community-packages.service.ts b/packages/cli/src/services/community-packages.service.ts index b157119cf2..4906a6ef33 100644 --- a/packages/cli/src/services/community-packages.service.ts +++ b/packages/cli/src/services/community-packages.service.ts @@ -23,10 +23,9 @@ import type { CommunityPackages } from '@/interfaces'; import { License } from '@/license'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; import { Logger } from '@/logging/logger.service'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; import { toError } from '@/utils'; -import { OrchestrationService } from './orchestration.service'; - const DEFAULT_REGISTRY = 'https://registry.npmjs.org'; const { @@ -60,7 +59,7 @@ export class CommunityPackagesService { private readonly logger: Logger, private readonly installedPackageRepository: InstalledPackagesRepository, private readonly loadNodesAndCredentials: LoadNodesAndCredentials, - private readonly orchestrationService: OrchestrationService, + private readonly publisher: Publisher, private readonly license: License, private readonly globalConfig: GlobalConfig, ) {} @@ -322,7 +321,10 @@ export class CommunityPackagesService { async removePackage(packageName: string, installedPackage: InstalledPackages): Promise { await this.removeNpmPackage(packageName); await this.removePackageFromDatabase(installedPackage); - await this.orchestrationService.publish('community-package-uninstall', { packageName }); + void this.publisher.publishCommand({ + command: 'community-package-uninstall', + payload: { packageName }, + }); } private getNpmRegistry() { @@ -368,10 +370,10 @@ export class CommunityPackagesService { await this.removePackageFromDatabase(options.installedPackage); } const installedPackage = await this.persistInstalledPackage(loader); - await this.orchestrationService.publish( - isUpdate ? 'community-package-update' : 'community-package-install', - { packageName, packageVersion }, - ); + void this.publisher.publishCommand({ + command: isUpdate ? 'community-package-update' : 'community-package-install', + payload: { packageName, packageVersion }, + }); await this.loadNodesAndCredentials.postProcessLoaders(); this.logger.info(`Community package installed: ${packageName}`); return installedPackage; diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index a83158e96e..ef3ed5a5f9 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -96,7 +96,7 @@ export class FrontendService { executionTimeout: config.getEnv('executions.timeout'), maxExecutionTimeout: config.getEnv('executions.maxTimeout'), workflowCallerPolicyDefaultOption: this.globalConfig.workflows.callerPolicyDefaultOption, - timezone: config.getEnv('generic.timezone'), + timezone: this.globalConfig.generic.timezone, urlBaseWebhook: this.urlService.getWebhookBaseUrl(), urlBaseEditor: instanceBaseUrl, binaryDataMode: config.getEnv('binaryDataManager.mode'), @@ -106,7 +106,7 @@ export class FrontendService { authCookie: { secure: config.getEnv('secure_cookie'), }, - releaseChannel: config.getEnv('generic.releaseChannel'), + releaseChannel: this.globalConfig.generic.releaseChannel, oauthCallbackUrls: { oauth1: `${instanceBaseUrl}/${restEndpoint}/oauth1-credential/callback`, oauth2: `${instanceBaseUrl}/${restEndpoint}/oauth2-credential/callback`, diff --git a/packages/cli/src/services/orchestration.service.ts b/packages/cli/src/services/orchestration.service.ts index 64dbd0ddae..225badbf18 100644 --- a/packages/cli/src/services/orchestration.service.ts +++ b/packages/cli/src/services/orchestration.service.ts @@ -1,21 +1,19 @@ +import { GlobalConfig } from '@n8n/config'; import { InstanceSettings } from 'n8n-core'; -import type { WorkflowActivateMode } from 'n8n-workflow'; import Container, { Service } from 'typedi'; import config from '@/config'; -import type { PubSubCommandMap } from '@/events/maps/pub-sub.event-map'; -import { Logger } from '@/logging/logger.service'; import type { Publisher } from '@/scaling/pubsub/publisher.service'; import type { Subscriber } from '@/scaling/pubsub/subscriber.service'; -import { MultiMainSetup } from './orchestration/main/multi-main-setup.ee'; +import { MultiMainSetup } from '../scaling/multi-main-setup.ee'; @Service() export class OrchestrationService { constructor( - private readonly logger: Logger, readonly instanceSettings: InstanceSettings, readonly multiMainSetup: MultiMainSetup, + readonly globalConfig: GlobalConfig, ) {} private publisher: Publisher; @@ -33,7 +31,7 @@ export class OrchestrationService { get isMultiMainSetupEnabled() { return ( config.getEnv('executions.mode') === 'queue' && - config.getEnv('multiMainSetup.enabled') && + this.globalConfig.multiMainSetup.enabled && this.instanceSettings.instanceType === 'main' && this.isMultiMainSetupLicensed ); @@ -78,68 +76,4 @@ export class OrchestrationService { this.isInitialized = false; } - - // ---------------------------------- - // pubsub - // ---------------------------------- - - async publish( - commandKey: CommandKey, - payload?: PubSubCommandMap[CommandKey], - ) { - if (!this.sanityCheck()) return; - - this.logger.debug( - `[Instance ID ${this.instanceSettings.hostId}] Publishing command "${commandKey}"`, - payload, - ); - - await this.publisher.publishCommand({ command: commandKey, payload }); - } - - // ---------------------------------- - // workers status - // ---------------------------------- - - async getWorkerStatus(id?: string) { - if (!this.sanityCheck()) return; - - const command = 'get-worker-status'; - - this.logger.debug(`Sending "${command}" to command channel`); - - await this.publisher.publishCommand({ - command, - targets: id ? [id] : undefined, - }); - } - - // ---------------------------------- - // activations - // ---------------------------------- - - /** - * Whether this instance may add webhooks to the `webhook_entity` table. - */ - shouldAddWebhooks(activationMode: WorkflowActivateMode) { - // Always try to populate the webhook entity table as well as register the webhooks - // to prevent issues with users upgrading from a version < 1.15, where the webhook entity - // was cleared on shutdown to anything past 1.28.0, where we stopped populating it on init, - // causing all webhooks to break - if (activationMode === 'init') return true; - - if (activationMode === 'leadershipChange') return false; - - return this.instanceSettings.isLeader; // 'update' or 'activate' - } - - /** - * Whether this instance may add triggers and pollers to memory. - * - * In both single- and multi-main setup, only the leader is allowed to manage - * triggers and pollers in memory, to ensure they are not duplicated. - */ - shouldAddTriggersAndPollers() { - return this.instanceSettings.isLeader; - } } diff --git a/packages/cli/src/services/pruning.service.ts b/packages/cli/src/services/pruning.service.ts index e9ceab5434..0859dddd39 100644 --- a/packages/cli/src/services/pruning.service.ts +++ b/packages/cli/src/services/pruning.service.ts @@ -1,3 +1,4 @@ +import { GlobalConfig } from '@n8n/config'; import { BinaryDataService, InstanceSettings } from 'n8n-core'; import { jsonStringify } from 'n8n-workflow'; import { Service } from 'typedi'; @@ -31,6 +32,7 @@ export class PruningService { private readonly executionRepository: ExecutionRepository, private readonly binaryDataService: BinaryDataService, private readonly orchestrationService: OrchestrationService, + private readonly globalConfig: GlobalConfig, ) {} /** @@ -54,7 +56,7 @@ export class PruningService { return false; } - if (config.getEnv('multiMainSetup.enabled') && instanceType === 'main' && isFollower) { + if (this.globalConfig.multiMainSetup.enabled && instanceType === 'main' && isFollower) { return false; } diff --git a/packages/cli/src/services/public-api-key.service.ts b/packages/cli/src/services/public-api-key.service.ts index e689f3c019..bca3cd0d62 100644 --- a/packages/cli/src/services/public-api-key.service.ts +++ b/packages/cli/src/services/public-api-key.service.ts @@ -1,16 +1,28 @@ -import { randomBytes } from 'node:crypto'; -import Container, { Service } from 'typedi'; +import type { OpenAPIV3 } from 'openapi-types'; +import { Service } from 'typedi'; import { ApiKey } from '@/databases/entities/api-key'; import type { User } from '@/databases/entities/user'; import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; import { UserRepository } from '@/databases/repositories/user.repository'; +import { EventService } from '@/events/event.service'; +import type { AuthenticatedRequest } from '@/requests'; -export const API_KEY_PREFIX = 'n8n_api_'; +import { JwtService } from './jwt.service'; + +const API_KEY_AUDIENCE = 'public-api'; +const API_KEY_ISSUER = 'n8n'; +const REDACT_API_KEY_REVEAL_COUNT = 15; +const REDACT_API_KEY_MAX_LENGTH = 80; @Service() export class PublicApiKeyService { - constructor(private readonly apiKeyRepository: ApiKeyRepository) {} + constructor( + private readonly apiKeyRepository: ApiKeyRepository, + private readonly userRepository: UserRepository, + private readonly jwtService: JwtService, + private readonly eventService: EventService, + ) {} /** * Creates a new public API key for the specified user. @@ -18,7 +30,7 @@ export class PublicApiKeyService { * @returns A promise that resolves to the newly created API key. */ async createPublicApiKeyForUser(user: User) { - const apiKey = this.createApiKeyString(); + const apiKey = this.generateApiKey(user); await this.apiKeyRepository.upsert( this.apiKeyRepository.create({ userId: user.id, @@ -48,8 +60,8 @@ export class PublicApiKeyService { await this.apiKeyRepository.delete({ userId: user.id, id: apiKeyId }); } - async getUserForApiKey(apiKey: string) { - return await Container.get(UserRepository) + private async getUserForApiKey(apiKey: string) { + return await this.userRepository .createQueryBuilder('user') .innerJoin(ApiKey, 'apiKey', 'apiKey.userId = user.id') .where('apiKey.apiKey = :apiKey', { apiKey }) @@ -68,13 +80,39 @@ export class PublicApiKeyService { * ``` */ redactApiKey(apiKey: string) { - const keepLength = 5; - return ( - API_KEY_PREFIX + - apiKey.slice(API_KEY_PREFIX.length, API_KEY_PREFIX.length + keepLength) + - '*'.repeat(apiKey.length - API_KEY_PREFIX.length - keepLength) - ); + const visiblePart = apiKey.slice(0, REDACT_API_KEY_REVEAL_COUNT); + const redactedPart = '*'.repeat(apiKey.length - REDACT_API_KEY_REVEAL_COUNT); + + const completeRedactedApiKey = visiblePart + redactedPart; + + return completeRedactedApiKey.slice(0, REDACT_API_KEY_MAX_LENGTH); } - createApiKeyString = () => `${API_KEY_PREFIX}${randomBytes(40).toString('hex')}`; + getAuthMiddleware(version: string) { + return async ( + req: AuthenticatedRequest, + _scopes: unknown, + schema: OpenAPIV3.ApiKeySecurityScheme, + ): Promise => { + const providedApiKey = req.headers[schema.name.toLowerCase()] as string; + + const user = await this.getUserForApiKey(providedApiKey); + + if (!user) return false; + + this.eventService.emit('public-api-invoked', { + userId: user.id, + path: req.path, + method: req.method, + apiVersion: version, + }); + + req.user = user; + + return true; + }; + } + + private generateApiKey = (user: User) => + this.jwtService.sign({ sub: user.id, iss: API_KEY_ISSUER, aud: API_KEY_AUDIENCE }); } diff --git a/packages/cli/src/waiting-forms.ts b/packages/cli/src/waiting-forms.ts deleted file mode 100644 index 6f57e1d8fd..0000000000 --- a/packages/cli/src/waiting-forms.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Service } from 'typedi'; - -import type { IExecutionResponse } from '@/interfaces'; -import { WaitingWebhooks } from '@/webhooks/waiting-webhooks'; - -@Service() -export class WaitingForms extends WaitingWebhooks { - protected override includeForms = true; - - protected override logReceivedWebhook(method: string, executionId: string) { - this.logger.debug(`Received waiting-form "${method}" for execution "${executionId}"`); - } - - protected disableNode(execution: IExecutionResponse, method?: string) { - if (method === 'POST') { - execution.data.executionData!.nodeExecutionStack[0].node.disabled = true; - } - } -} diff --git a/packages/cli/src/webhooks/__tests__/test-webhooks.test.ts b/packages/cli/src/webhooks/__tests__/test-webhooks.test.ts index d9228bcb0d..3f8972ad9a 100644 --- a/packages/cli/src/webhooks/__tests__/test-webhooks.test.ts +++ b/packages/cli/src/webhooks/__tests__/test-webhooks.test.ts @@ -39,7 +39,7 @@ let testWebhooks: TestWebhooks; describe('TestWebhooks', () => { beforeAll(() => { - testWebhooks = new TestWebhooks(mock(), mock(), registrations, mock()); + testWebhooks = new TestWebhooks(mock(), mock(), registrations, mock(), mock()); jest.useFakeTimers(); }); diff --git a/packages/cli/src/webhooks/test-webhooks.ts b/packages/cli/src/webhooks/test-webhooks.ts index 21511d4843..bf2fa6c9d8 100644 --- a/packages/cli/src/webhooks/test-webhooks.ts +++ b/packages/cli/src/webhooks/test-webhooks.ts @@ -16,6 +16,7 @@ import { WorkflowMissingIdError } from '@/errors/workflow-missing-id.error'; import type { IWorkflowDb } from '@/interfaces'; import { NodeTypes } from '@/node-types'; import { Push } from '@/push'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; import { OrchestrationService } from '@/services/orchestration.service'; import { removeTrailingSlash } from '@/utils'; import type { TestWebhookRegistration } from '@/webhooks/test-webhook-registrations.service'; @@ -41,6 +42,7 @@ export class TestWebhooks implements IWebhookManager { private readonly nodeTypes: NodeTypes, private readonly registrations: TestWebhookRegistrationsService, private readonly orchestrationService: OrchestrationService, + private readonly publisher: Publisher, ) {} private timeouts: { [webhookKey: string]: NodeJS.Timeout } = {}; @@ -156,8 +158,10 @@ export class TestWebhooks implements IWebhookManager { pushRef && !this.push.getBackend().hasPushRef(pushRef) ) { - const payload = { webhookKey: key, workflowEntity, pushRef }; - void this.orchestrationService.publish('clear-test-webhooks', payload); + void this.publisher.publishCommand({ + command: 'clear-test-webhooks', + payload: { webhookKey: key, workflowEntity, pushRef }, + }); return; } diff --git a/packages/cli/src/webhooks/waiting-forms.ts b/packages/cli/src/webhooks/waiting-forms.ts new file mode 100644 index 0000000000..72963f21da --- /dev/null +++ b/packages/cli/src/webhooks/waiting-forms.ts @@ -0,0 +1,139 @@ +import axios from 'axios'; +import type express from 'express'; +import { FORM_NODE_TYPE, sleep, Workflow } from 'n8n-workflow'; +import { Service } from 'typedi'; + +import { ConflictError } from '@/errors/response-errors/conflict.error'; +import { NotFoundError } from '@/errors/response-errors/not-found.error'; +import type { IExecutionResponse } from '@/interfaces'; +import { WaitingWebhooks } from '@/webhooks/waiting-webhooks'; + +import type { IWebhookResponseCallbackData, WaitingWebhookRequest } from './webhook.types'; + +@Service() +export class WaitingForms extends WaitingWebhooks { + protected override includeForms = true; + + protected override logReceivedWebhook(method: string, executionId: string) { + this.logger.debug(`Received waiting-form "${method}" for execution "${executionId}"`); + } + + protected disableNode(execution: IExecutionResponse, method?: string) { + if (method === 'POST') { + execution.data.executionData!.nodeExecutionStack[0].node.disabled = true; + } + } + + private getWorkflow(execution: IExecutionResponse) { + const { workflowData } = execution; + return new Workflow({ + id: workflowData.id, + name: workflowData.name, + nodes: workflowData.nodes, + connections: workflowData.connections, + active: workflowData.active, + nodeTypes: this.nodeTypes, + staticData: workflowData.staticData, + settings: workflowData.settings, + }); + } + + async executeWebhook( + req: WaitingWebhookRequest, + res: express.Response, + ): Promise { + const { path: executionId, suffix } = req.params; + + this.logReceivedWebhook(req.method, executionId); + + // Reset request parameters + req.params = {} as WaitingWebhookRequest['params']; + + const execution = await this.getExecution(executionId); + + if (!execution) { + throw new NotFoundError(`The execution "${executionId}" does not exist.`); + } + + if (execution.data.resultData.error) { + throw new ConflictError(`The execution "${executionId}" has finished with error.`); + } + + if (execution.status === 'running') { + if (this.includeForms && req.method === 'GET') { + await sleep(1000); + + const url = `${req.protocol}://${req.get('host')}${req.originalUrl}`; + const page = await axios({ url }); + + if (page) { + res.send(` + + `); + } + + return { + noWebhookResponse: true, + }; + } + throw new ConflictError(`The execution "${executionId}" is running already.`); + } + + let completionPage; + if (execution.finished) { + const workflow = this.getWorkflow(execution); + + const parentNodes = workflow.getParentNodes( + execution.data.resultData.lastNodeExecuted as string, + ); + + const lastNodeExecuted = execution.data.resultData.lastNodeExecuted as string; + const lastNode = workflow.nodes[lastNodeExecuted]; + + if ( + !lastNode.disabled && + lastNode.type === FORM_NODE_TYPE && + lastNode.parameters.operation === 'completion' + ) { + completionPage = lastNodeExecuted; + } else { + completionPage = Object.keys(workflow.nodes).find((nodeName) => { + const node = workflow.nodes[nodeName]; + return ( + parentNodes.includes(nodeName) && + !node.disabled && + node.type === FORM_NODE_TYPE && + node.parameters.operation === 'completion' + ); + }); + } + + if (!completionPage) { + res.render('form-trigger-completion', { + title: 'Form Submitted', + message: 'Your response has been recorded', + formTitle: 'Form Submitted', + }); + + return { + noWebhookResponse: true, + }; + } + } + + const targetNode = completionPage || (execution.data.resultData.lastNodeExecuted as string); + + return await this.getWebhookExecutionData({ + execution, + req, + res, + lastNodeExecuted: targetNode, + executionId, + suffix, + }); + } +} diff --git a/packages/cli/src/webhooks/waiting-webhooks.ts b/packages/cli/src/webhooks/waiting-webhooks.ts index e644c065f3..a14563eea2 100644 --- a/packages/cli/src/webhooks/waiting-webhooks.ts +++ b/packages/cli/src/webhooks/waiting-webhooks.ts @@ -1,9 +1,11 @@ import type express from 'express'; import { + FORM_NODE_TYPE, type INodes, type IWorkflowBase, NodeHelpers, SEND_AND_WAIT_OPERATION, + WAIT_NODE_TYPE, Workflow, } from 'n8n-workflow'; import { Service } from 'typedi'; @@ -34,7 +36,7 @@ export class WaitingWebhooks implements IWebhookManager { constructor( protected readonly logger: Logger, - private readonly nodeTypes: NodeTypes, + protected readonly nodeTypes: NodeTypes, private readonly executionRepository: ExecutionRepository, ) {} @@ -58,7 +60,7 @@ export class WaitingWebhooks implements IWebhookManager { ); } - private getWorkflow(workflowData: IWorkflowBase) { + private createWorkflow(workflowData: IWorkflowBase) { return new Workflow({ id: workflowData.id, name: workflowData.name, @@ -71,6 +73,13 @@ export class WaitingWebhooks implements IWebhookManager { }); } + protected async getExecution(executionId: string) { + return await this.executionRepository.findSingleExecution(executionId, { + includeData: true, + unflattenData: true, + }); + } + async executeWebhook( req: WaitingWebhookRequest, res: express.Response, @@ -82,17 +91,14 @@ export class WaitingWebhooks implements IWebhookManager { // Reset request parameters req.params = {} as WaitingWebhookRequest['params']; - const execution = await this.executionRepository.findSingleExecution(executionId, { - includeData: true, - unflattenData: true, - }); + const execution = await this.getExecution(executionId); if (!execution) { - throw new NotFoundError(`The execution "${executionId} does not exist.`); + throw new NotFoundError(`The execution "${executionId}" does not exist.`); } if (execution.status === 'running') { - throw new ConflictError(`The execution "${executionId} is running already.`); + throw new ConflictError(`The execution "${executionId}" is running already.`); } if (execution.data?.resultData?.error) { @@ -101,7 +107,7 @@ export class WaitingWebhooks implements IWebhookManager { if (execution.finished) { const { workflowData } = execution; - const { nodes } = this.getWorkflow(workflowData); + const { nodes } = this.createWorkflow(workflowData); if (this.isSendAndWaitRequest(nodes, suffix)) { res.render('send-and-wait-no-action-required', { isTestWebhook: false }); return { noWebhookResponse: true }; @@ -112,6 +118,31 @@ export class WaitingWebhooks implements IWebhookManager { const lastNodeExecuted = execution.data.resultData.lastNodeExecuted as string; + return await this.getWebhookExecutionData({ + execution, + req, + res, + lastNodeExecuted, + executionId, + suffix, + }); + } + + protected async getWebhookExecutionData({ + execution, + req, + res, + lastNodeExecuted, + executionId, + suffix, + }: { + execution: IExecutionResponse; + req: WaitingWebhookRequest; + res: express.Response; + lastNodeExecuted: string; + executionId: string; + suffix?: string; + }): Promise { // Set the node as disabled so that the data does not get executed again as it would result // in starting the wait all over again this.disableNode(execution, req.method); @@ -123,7 +154,7 @@ export class WaitingWebhooks implements IWebhookManager { execution.data.resultData.runData[lastNodeExecuted].pop(); const { workflowData } = execution; - const workflow = this.getWorkflow(workflowData); + const workflow = this.createWorkflow(workflowData); const workflowStartNode = workflow.getNode(lastNodeExecuted); if (workflowStartNode === null) { @@ -146,11 +177,26 @@ export class WaitingWebhooks implements IWebhookManager { if (webhookData === undefined) { // If no data got found it means that the execution can not be started via a webhook. // Return 404 because we do not want to give any data if the execution exists or not. + const errorMessage = `The workflow for execution "${executionId}" does not contain a waiting webhook with a matching path/method.`; + if (this.isSendAndWaitRequest(workflow.nodes, suffix)) { res.render('send-and-wait-no-action-required', { isTestWebhook: false }); return { noWebhookResponse: true }; + } else if (!execution.data.resultData.error && execution.status === 'waiting') { + const childNodes = workflow.getChildNodes( + execution.data.resultData.lastNodeExecuted as string, + ); + const hasChildForms = childNodes.some( + (node) => + workflow.nodes[node].type === FORM_NODE_TYPE || + workflow.nodes[node].type === WAIT_NODE_TYPE, + ); + if (hasChildForms) { + return { noWebhookResponse: true }; + } else { + throw new NotFoundError(errorMessage); + } } else { - const errorMessage = `The workflow for execution "${executionId}" does not contain a waiting webhook with a matching path/method.`; throw new NotFoundError(errorMessage); } } diff --git a/packages/cli/src/webhooks/webhook-helpers.ts b/packages/cli/src/webhooks/webhook-helpers.ts index 5ff770acfb..027c965e4e 100644 --- a/packages/cli/src/webhooks/webhook-helpers.ts +++ b/packages/cli/src/webhooks/webhook-helpers.ts @@ -34,7 +34,10 @@ import { BINARY_ENCODING, createDeferredPromise, ErrorReporterProxy as ErrorReporter, + ErrorReporterProxy, + FORM_NODE_TYPE, NodeHelpers, + NodeOperationError, } from 'n8n-workflow'; import { finished } from 'stream/promises'; import { Container } from 'typedi'; @@ -120,7 +123,7 @@ export async function executeWebhook( ); if (nodeType === undefined) { const errorMessage = `The type of the webhook node "${workflowStartNode.name}" is not known`; - responseCallback(new Error(errorMessage), {}); + responseCallback(new ApplicationError(errorMessage), {}); throw new InternalServerError(errorMessage); } @@ -143,14 +146,37 @@ export async function executeWebhook( } // Get the responseMode - const responseMode = workflow.expression.getSimpleParameterValue( - workflowStartNode, - webhookData.webhookDescription.responseMode, - executionMode, - additionalKeys, - undefined, - 'onReceived', - ) as WebhookResponseMode; + let responseMode; + + // if this is n8n FormTrigger node, check if there is a Form node in child nodes, + // if so, set 'responseMode' to 'formPage' to redirect to URL of that Form later + if (nodeType.description.name === 'formTrigger') { + const connectedNodes = workflow.getChildNodes(workflowStartNode.name); + let hasNextPage = false; + for (const nodeName of connectedNodes) { + const node = workflow.nodes[nodeName]; + if (node.type === FORM_NODE_TYPE && !node.disabled) { + hasNextPage = true; + break; + } + } + + if (hasNextPage) { + responseMode = 'formPage'; + } + } + + if (!responseMode) { + responseMode = workflow.expression.getSimpleParameterValue( + workflowStartNode, + webhookData.webhookDescription.responseMode, + executionMode, + additionalKeys, + undefined, + 'onReceived', + ) as WebhookResponseMode; + } + const responseCode = workflow.expression.getSimpleParameterValue( workflowStartNode, webhookData.webhookDescription.responseCode as string, @@ -169,12 +195,12 @@ export async function executeWebhook( 'firstEntryJson', ); - if (!['onReceived', 'lastNode', 'responseNode'].includes(responseMode)) { + if (!['onReceived', 'lastNode', 'responseNode', 'formPage'].includes(responseMode)) { // If the mode is not known we error. Is probably best like that instead of using // the default that people know as early as possible (probably already testing phase) // that something does not resolve properly. const errorMessage = `The response mode '${responseMode}' is not valid!`; - responseCallback(new Error(errorMessage), {}); + responseCallback(new ApplicationError(errorMessage), {}); throw new InternalServerError(errorMessage); } @@ -242,8 +268,26 @@ export async function executeWebhook( }); } catch (err) { // Send error response to webhook caller - const errorMessage = 'Workflow Webhook Error: Workflow could not be started!'; - responseCallback(new Error(errorMessage), {}); + const webhookType = ['formTrigger', 'form'].includes(nodeType.description.name) + ? 'Form' + : 'Webhook'; + let errorMessage = `Workflow ${webhookType} Error: Workflow could not be started!`; + + // if workflow started manually, show an actual error message + if (err instanceof NodeOperationError && err.type === 'manual-form-test') { + errorMessage = err.message; + } + + ErrorReporterProxy.error(err, { + extra: { + nodeName: workflowStartNode.name, + nodeType: workflowStartNode.type, + nodeVersion: workflowStartNode.typeVersion, + workflowId: workflow.id, + }, + }); + + responseCallback(new ApplicationError(errorMessage), {}); didSendResponse = true; // Add error to execution data that it can be logged and send to Editor-UI @@ -487,6 +531,12 @@ export async function executeWebhook( responsePromise, ); + if (responseMode === 'formPage' && !didSendResponse) { + res.redirect(`${additionalData.formWaitingBaseUrl}/${executionId}`); + process.nextTick(() => res.end()); + didSendResponse = true; + } + Container.get(Logger).debug( `Started execution of workflow "${workflow.name}" from webhook with execution ID ${executionId}`, { executionId }, @@ -562,7 +612,7 @@ export async function executeWebhook( // Return the JSON data of the first entry if (returnData.data!.main[0]![0] === undefined) { - responseCallback(new Error('No item to return got found'), {}); + responseCallback(new ApplicationError('No item to return got found'), {}); didSendResponse = true; return undefined; } @@ -616,13 +666,13 @@ export async function executeWebhook( data = returnData.data!.main[0]![0]; if (data === undefined) { - responseCallback(new Error('No item was found to return'), {}); + responseCallback(new ApplicationError('No item was found to return'), {}); didSendResponse = true; return undefined; } if (data.binary === undefined) { - responseCallback(new Error('No binary data was found to return'), {}); + responseCallback(new ApplicationError('No binary data was found to return'), {}); didSendResponse = true; return undefined; } @@ -637,7 +687,10 @@ export async function executeWebhook( ); if (responseBinaryPropertyName === undefined && !didSendResponse) { - responseCallback(new Error("No 'responseBinaryPropertyName' is set"), {}); + responseCallback( + new ApplicationError("No 'responseBinaryPropertyName' is set"), + {}, + ); didSendResponse = true; } @@ -646,7 +699,7 @@ export async function executeWebhook( ]; if (binaryData === undefined && !didSendResponse) { responseCallback( - new Error( + new ApplicationError( `The binary property '${responseBinaryPropertyName}' which should be returned does not exist`, ), {}, diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index 0f1f37b71d..4dd5e08714 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -2,7 +2,6 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-shadow */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ -import { GlobalConfig } from '@n8n/config'; import { InstanceSettings, WorkflowExecute } from 'n8n-core'; import type { ExecutionError, @@ -15,6 +14,7 @@ import type { IWorkflowExecutionDataProcess, } from 'n8n-workflow'; import { + ApplicationError, ErrorReporterProxy as ErrorReporter, ExecutionCancelledError, Workflow, @@ -29,7 +29,7 @@ import { ExternalHooks } from '@/external-hooks'; import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import type { ScalingService } from '@/scaling/scaling.service'; -import type { Job, JobData, JobResult } from '@/scaling/scaling.types'; +import type { Job, JobData } from '@/scaling/scaling.types'; import { PermissionChecker } from '@/user-management/permission-checker'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; import * as WorkflowHelpers from '@/workflow-helpers'; @@ -381,6 +381,17 @@ export class WorkflowRunner { let job: Job; let hooks: WorkflowHooks; try { + // check to help diagnose PAY-2100 + if ( + data.executionData?.executionData?.nodeExecutionStack?.length === 0 && + config.getEnv('deployment.type') === 'internal' + ) { + await this.executionRepository.setRunning(executionId); // set `startedAt` so we display it correctly in UI + throw new ApplicationError('Execution to enqueue has empty node execution stack', { + extra: { executionData: data.executionData }, + }); + } + job = await this.scalingService.addJob(jobData, { priority: realtime ? 50 : 100 }); hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain( @@ -427,54 +438,8 @@ export class WorkflowRunner { reject(error); }); - const jobData: Promise = job.finished(); - - const { queueRecoveryInterval } = Container.get(GlobalConfig).queue.bull; - - const racingPromises: Array> = [jobData]; - - let clearWatchdogInterval; - if (queueRecoveryInterval > 0) { - /** *********************************************** - * Long explanation about what this solves: * - * This only happens in a very specific scenario * - * when Redis crashes and recovers shortly * - * but during this time, some execution(s) * - * finished. The end result is that the main * - * process will wait indefinitely and never * - * get a response. This adds an active polling to* - * the queue that allows us to identify that the * - * execution finished and get information from * - * the database. * - ************************************************ */ - let watchDogInterval: NodeJS.Timeout | undefined; - - const watchDog: Promise = new Promise((res) => { - watchDogInterval = setInterval(async () => { - const currentJob = await this.scalingService.getJob(job.id); - // When null means job is finished (not found in queue) - if (currentJob === null) { - // Mimic worker's success message - res({ success: true }); - } - }, queueRecoveryInterval * 1000); - }); - - racingPromises.push(watchDog); - - clearWatchdogInterval = () => { - if (watchDogInterval) { - clearInterval(watchDogInterval); - watchDogInterval = undefined; - } - }; - } - try { - await Promise.race(racingPromises); - if (clearWatchdogInterval !== undefined) { - clearWatchdogInterval(); - } + await job.finished(); } catch (error) { // We use "getWorkflowHooksWorkerExecuter" as "getWorkflowHooksWorkerMain" does not contain the // "workflowExecuteAfter" which we require. @@ -485,9 +450,6 @@ export class WorkflowRunner { { retryOf: data.retryOf ? data.retryOf.toString() : undefined }, ); this.logger.error(`Problem with execution ${executionId}: ${error.message}. Aborting.`); - if (clearWatchdogInterval !== undefined) { - clearWatchdogInterval(); - } await this.processError(error, new Date(), data.executionMode, executionId, hooks); reject(error); diff --git a/packages/cli/templates/form-trigger-completion.handlebars b/packages/cli/templates/form-trigger-completion.handlebars new file mode 100644 index 0000000000..761d09937b --- /dev/null +++ b/packages/cli/templates/form-trigger-completion.handlebars @@ -0,0 +1,74 @@ + + + + + + + + + {{formTitle}} + + + + + +
+
+
+
+

{{title}}

+

{{message}}

+
+
+ {{#if appendAttribution}} + + {{/if}} +
+
+ + + \ No newline at end of file diff --git a/packages/cli/templates/form-trigger.handlebars b/packages/cli/templates/form-trigger.handlebars index 5493a76e7f..39627c0204 100644 --- a/packages/cli/templates/form-trigger.handlebars +++ b/packages/cli/templates/form-trigger.handlebars @@ -315,7 +315,7 @@
{{#if testRun}}
-

This is test version of your form. Use it only for testing your Form Trigger.

+

This is test version of your form


{{/if}} @@ -428,7 +428,7 @@ d='M304 48a48 48 0 1 0 -96 0 48 48 0 1 0 96 0zm0 416a48 48 0 1 0 -96 0 48 48 0 1 0 96 0zM48 304a48 48 0 1 0 0-96 48 48 0 1 0 0 96zm464-48a48 48 0 1 0 -96 0 48 48 0 1 0 96 0zM142.9 437A48 48 0 1 0 75 369.1 48 48 0 1 0 142.9 437zm0-294.2A48 48 0 1 0 75 75a48 48 0 1 0 67.9 67.9zM369.1 437A48 48 0 1 0 437 369.1 48 48 0 1 0 369.1 437z' /> - Submit form + {{ buttonLabel }} {{else}} @@ -719,6 +719,10 @@ } if (response.status === 200) { + if(response.redirected) { + window.location.replace(response.url); + return; + } const redirectUrl = document.getElementById("redirectUrl"); if (redirectUrl) { window.location.replace(redirectUrl.href); @@ -731,7 +735,7 @@ document.querySelector('#submitted-form').style.display = 'block'; document.querySelector('#submitted-header').textContent = 'Problem submitting response'; document.querySelector('#submitted-content').textContent = - 'An error occurred in the workflow handling this form'; + 'Please try again or contact support if the problem persists'; } return; @@ -747,6 +751,15 @@ .catch(function (error) { console.error('Error:', error); }); + + const interval = setInterval(function() { + const isSubmited = document.querySelector('#submitted-form').style.display; + if(isSubmited === 'block') { + clearInterval(interval); + return; + } + window.location.reload(); + }, 2000); } }); diff --git a/packages/cli/test/integration/active-workflow-manager.test.ts b/packages/cli/test/integration/active-workflow-manager.test.ts index d5d471ba60..8ea790ade7 100644 --- a/packages/cli/test/integration/active-workflow-manager.test.ts +++ b/packages/cli/test/integration/active-workflow-manager.test.ts @@ -1,4 +1,5 @@ import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; import { NodeApiError, NodeOperationError, Workflow } from 'n8n-workflow'; import type { IWebhookData, WorkflowActivateMode } from 'n8n-workflow'; import { Container } from 'typedi'; @@ -278,3 +279,72 @@ describe('addWebhooks()', () => { expect(webhookService.storeWebhook).toHaveBeenCalledTimes(1); }); }); + +describe('shouldAddWebhooks', () => { + describe('if leader', () => { + const activeWorkflowManager = new ActiveWorkflowManager( + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock({ isLeader: true, isFollower: false }), + mock(), + ); + + test('should return `true` for `init`', () => { + // ensure webhooks are populated on init: https://github.com/n8n-io/n8n/pull/8830 + const result = activeWorkflowManager.shouldAddWebhooks('init'); + expect(result).toBe(true); + }); + + test('should return `false` for `leadershipChange`', () => { + const result = activeWorkflowManager.shouldAddWebhooks('leadershipChange'); + expect(result).toBe(false); + }); + + test('should return `true` for `update` or `activate`', () => { + const modes = ['update', 'activate'] as WorkflowActivateMode[]; + for (const mode of modes) { + const result = activeWorkflowManager.shouldAddWebhooks(mode); + expect(result).toBe(true); + } + }); + }); + + describe('if follower', () => { + const activeWorkflowManager = new ActiveWorkflowManager( + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock({ isLeader: false, isFollower: true }), + mock(), + ); + + test('should return `false` for `update` or `activate`', () => { + const modes = ['update', 'activate'] as WorkflowActivateMode[]; + for (const mode of modes) { + const result = activeWorkflowManager.shouldAddWebhooks(mode); + expect(result).toBe(false); + } + }); + }); +}); diff --git a/packages/cli/test/integration/collaboration/collaboration.service.test.ts b/packages/cli/test/integration/collaboration/collaboration.service.test.ts index a90424de87..df5f901f28 100644 --- a/packages/cli/test/integration/collaboration/collaboration.service.test.ts +++ b/packages/cli/test/integration/collaboration/collaboration.service.test.ts @@ -16,7 +16,7 @@ import { createWorkflow, shareWorkflowWithUsers } from '@test-integration/db/wor import * as testDb from '@test-integration/test-db'; describe('CollaborationService', () => { - mockInstance(Push, new Push(mock())); + mockInstance(Push, new Push(mock(), mock())); let pushService: Push; let collaborationService: CollaborationService; let owner: User; diff --git a/packages/cli/test/integration/debug.controller.test.ts b/packages/cli/test/integration/debug.controller.test.ts index 47695e59aa..8ab58bd1a0 100644 --- a/packages/cli/test/integration/debug.controller.test.ts +++ b/packages/cli/test/integration/debug.controller.test.ts @@ -5,7 +5,7 @@ import { ActiveWorkflowManager } from '@/active-workflow-manager'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { generateNanoId } from '@/databases/utils/generators'; -import { MultiMainSetup } from '@/services/orchestration/main/multi-main-setup.ee'; +import { MultiMainSetup } from '@/scaling/multi-main-setup.ee'; import { createOwner } from './shared/db/users'; import { randomName } from './shared/random'; diff --git a/packages/cli/test/integration/eventbus.ee.test.ts b/packages/cli/test/integration/eventbus.ee.test.ts index 9b12cc53d5..c2b6a7f23c 100644 --- a/packages/cli/test/integration/eventbus.ee.test.ts +++ b/packages/cli/test/integration/eventbus.ee.test.ts @@ -22,6 +22,7 @@ import type { MessageEventBusDestinationSentry } from '@/eventbus/message-event- import type { MessageEventBusDestinationSyslog } from '@/eventbus/message-event-bus-destination/message-event-bus-destination-syslog.ee'; import type { MessageEventBusDestinationWebhook } from '@/eventbus/message-event-bus-destination/message-event-bus-destination-webhook.ee'; import { ExecutionRecoveryService } from '@/executions/execution-recovery.service'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; import { createUser } from './shared/db/users'; import type { SuperAgentTest } from './shared/types'; @@ -34,6 +35,8 @@ const mockedAxios = axios as jest.Mocked; jest.mock('syslog-client'); const mockedSyslog = syslog as jest.Mocked; +mockInstance(Publisher); + let owner: User; let authOwnerAgent: SuperAgentTest; diff --git a/packages/cli/test/integration/execution.service.integration.test.ts b/packages/cli/test/integration/execution.service.integration.test.ts index 22d0d65754..4d7144cd4d 100644 --- a/packages/cli/test/integration/execution.service.integration.test.ts +++ b/packages/cli/test/integration/execution.service.integration.test.ts @@ -563,10 +563,10 @@ describe('ExecutionService', () => { { ...summaryShape, annotation: { - tags: [ + tags: expect.arrayContaining([ expect.objectContaining({ name: 'tag1' }), expect.objectContaining({ name: 'tag2' }), - ], + ]), vote: 'up', }, }, @@ -646,10 +646,10 @@ describe('ExecutionService', () => { { ...summaryShape, annotation: { - tags: [ + tags: expect.arrayContaining([ expect.objectContaining({ name: 'tag1' }), expect.objectContaining({ name: 'tag2' }), - ], + ]), vote: 'up', }, }, @@ -691,10 +691,10 @@ describe('ExecutionService', () => { { ...summaryShape, annotation: { - tags: [ + tags: expect.arrayContaining([ expect.objectContaining({ name: 'tag1' }), expect.objectContaining({ name: 'tag2' }), - ], + ]), vote: 'up', }, }, diff --git a/packages/cli/test/integration/external-secrets/external-secrets.api.test.ts b/packages/cli/test/integration/external-secrets/external-secrets.api.test.ts index b3560b9262..3418576be1 100644 --- a/packages/cli/test/integration/external-secrets/external-secrets.api.test.ts +++ b/packages/cli/test/integration/external-secrets/external-secrets.api.test.ts @@ -63,6 +63,7 @@ const resetManager = async () => { mockProvidersInstance, Container.get(Cipher), eventService, + mock(), ), ); diff --git a/packages/cli/test/integration/mfa/mfa.api.test.ts b/packages/cli/test/integration/mfa/mfa.api.test.ts index 0062f87e89..3f19632506 100644 --- a/packages/cli/test/integration/mfa/mfa.api.test.ts +++ b/packages/cli/test/integration/mfa/mfa.api.test.ts @@ -5,9 +5,12 @@ import { AuthService } from '@/auth/auth.service'; import config from '@/config'; import type { User } from '@/databases/entities/user'; import { AuthUserRepository } from '@/databases/repositories/auth-user.repository'; +import { BadRequestError } from '@/errors/response-errors/bad-request.error'; +import { ExternalHooks } from '@/external-hooks'; import { TOTPService } from '@/mfa/totp.service'; +import { mockInstance } from '@test/mocking'; -import { createUser, createUserWithMfaEnabled } from '../shared/db/users'; +import { createOwner, createUser, createUserWithMfaEnabled } from '../shared/db/users'; import { randomValidPassword, uniqueId } from '../shared/random'; import * as testDb from '../shared/test-db'; import * as utils from '../shared/utils'; @@ -16,6 +19,8 @@ jest.mock('@/telemetry'); let owner: User; +const externalHooks = mockInstance(ExternalHooks); + const testServer = utils.setupTestServer({ endpointGroups: ['mfa', 'auth', 'me', 'passwordReset'], }); @@ -23,7 +28,9 @@ const testServer = utils.setupTestServer({ beforeEach(async () => { await testDb.truncate(['User']); - owner = await createUser({ role: 'global:owner' }); + owner = await createOwner(); + + externalHooks.run.mockReset(); config.set('userManagement.disabled', false); }); @@ -131,6 +138,27 @@ describe('Enable MFA setup', () => { expect(user.mfaRecoveryCodes).toBeDefined(); expect(user.mfaSecret).toBeDefined(); }); + + test('POST /enable should not enable MFA if pre check fails', async () => { + // This test is to make sure owners verify their email before enabling MFA in cloud + + const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200); + + const { secret } = response.body.data; + const token = new TOTPService().generateTOTP(secret); + + await testServer.authAgentFor(owner).post('/mfa/verify').send({ token }).expect(200); + + externalHooks.run.mockRejectedValue(new BadRequestError('Error message')); + + await testServer.authAgentFor(owner).post('/mfa/enable').send({ token }).expect(400); + + const user = await Container.get(AuthUserRepository).findOneOrFail({ + where: {}, + }); + + expect(user.mfaEnabled).toBe(false); + }); }); }); @@ -232,6 +260,28 @@ describe('Change password with MFA enabled', () => { }); }); +describe('MFA before enable checks', () => { + test('POST /can-enable should throw error if mfa.beforeSetup returns error', async () => { + externalHooks.run.mockRejectedValue(new BadRequestError('Error message')); + + await testServer.authAgentFor(owner).post('/mfa/can-enable').expect(400); + + expect(externalHooks.run).toHaveBeenCalledWith('mfa.beforeSetup', [ + expect.objectContaining(owner), + ]); + }); + + test('POST /can-enable should not throw error if mfa.beforeSetup does not exist', async () => { + externalHooks.run.mockResolvedValue(undefined); + + await testServer.authAgentFor(owner).post('/mfa/can-enable').expect(200); + + expect(externalHooks.run).toHaveBeenCalledWith('mfa.beforeSetup', [ + expect.objectContaining(owner), + ]); + }); +}); + describe('Login', () => { test('POST /login with email/password should succeed when mfa is disabled', async () => { const password = randomString(8); diff --git a/packages/cli/test/integration/pruning.service.test.ts b/packages/cli/test/integration/pruning.service.test.ts index c4d1957de0..af79640746 100644 --- a/packages/cli/test/integration/pruning.service.test.ts +++ b/packages/cli/test/integration/pruning.service.test.ts @@ -38,6 +38,7 @@ describe('softDeleteOnPruningCycle()', () => { Container.get(ExecutionRepository), mockInstance(BinaryDataService), mock(), + mock(), ); workflow = await createWorkflow(); diff --git a/packages/cli/test/integration/runners/task-runner-process.test.ts b/packages/cli/test/integration/runners/task-runner-process.test.ts index e623d5f371..4b35e270df 100644 --- a/packages/cli/test/integration/runners/task-runner-process.test.ts +++ b/packages/cli/test/integration/runners/task-runner-process.test.ts @@ -88,15 +88,15 @@ describe('TaskRunnerProcess', () => { // @ts-expect-error private property runnerProcess.process?.kill('SIGKILL'); - // Assert - // Wait until the runner is running again - await retryUntil(() => expect(runnerProcess.isRunning).toBeTruthy()); - expect(runnerProcess.pid).not.toBe(processId); + // Wait until the runner has exited + await runnerProcess.runPromise; + // Assert // Wait until the runner has connected again await retryUntil(() => expect(getNumConnectedRunners()).toBe(1)); expect(getNumConnectedRunners()).toBe(1); expect(getNumRegisteredRunners()).toBe(1); + expect(runnerProcess.pid).not.toBe(processId); }); it('should launch runner directly if not using a launcher', async () => { diff --git a/packages/cli/test/integration/shared/db/users.ts b/packages/cli/test/integration/shared/db/users.ts index 62f9f39a05..64c4d8ad85 100644 --- a/packages/cli/test/integration/shared/db/users.ts +++ b/packages/cli/test/integration/shared/db/users.ts @@ -1,17 +1,16 @@ import { hash } from 'bcryptjs'; -import { randomString } from 'n8n-workflow'; import Container from 'typedi'; import { AuthIdentity } from '@/databases/entities/auth-identity'; import { type GlobalRole, type User } from '@/databases/entities/user'; -import { ApiKeyRepository } from '@/databases/repositories/api-key.repository'; import { AuthIdentityRepository } from '@/databases/repositories/auth-identity.repository'; import { AuthUserRepository } from '@/databases/repositories/auth-user.repository'; import { UserRepository } from '@/databases/repositories/user.repository'; import { MfaService } from '@/mfa/mfa.service'; import { TOTPService } from '@/mfa/totp.service'; +import { PublicApiKeyService } from '@/services/public-api-key.service'; -import { randomApiKey, randomEmail, randomName, randomValidPassword } from '../random'; +import { randomEmail, randomName, randomValidPassword } from '../random'; // pre-computed bcrypt hash for the string 'password', using `await hash('password', 10)` const passwordHash = '$2a$10$njedH7S6V5898mj6p0Jr..IGY9Ms.qNwR7RbSzzX9yubJocKfvGGK'; @@ -81,17 +80,8 @@ export async function createUserWithMfaEnabled( }; } -const createApiKeyEntity = (user: User) => { - const apiKey = randomApiKey(); - return Container.get(ApiKeyRepository).create({ - userId: user.id, - label: randomString(10), - apiKey, - }); -}; - export const addApiKey = async (user: User) => { - return await Container.get(ApiKeyRepository).save(createApiKeyEntity(user)); + return await Container.get(PublicApiKeyService).createPublicApiKeyForUser(user); }; export async function createOwnerWithApiKey() { diff --git a/packages/cli/test/integration/shared/utils/index.ts b/packages/cli/test/integration/shared/utils/index.ts index 4d4a207f94..78de2c1b25 100644 --- a/packages/cli/test/integration/shared/utils/index.ts +++ b/packages/cli/test/integration/shared/utils/index.ts @@ -32,7 +32,6 @@ export { setupTestServer } from './test-server'; export async function initActiveWorkflowManager() { mockInstance(OrchestrationService, { isMultiMainSetupEnabled: false, - shouldAddWebhooks: jest.fn().mockReturnValue(true), }); mockInstance(Push); diff --git a/packages/cli/test/integration/webhooks.test.ts b/packages/cli/test/integration/webhooks.test.ts index 165822aa84..7d7b5105cb 100644 --- a/packages/cli/test/integration/webhooks.test.ts +++ b/packages/cli/test/integration/webhooks.test.ts @@ -5,9 +5,9 @@ import type SuperAgentTest from 'supertest/lib/agent'; import Container from 'typedi'; import { ExternalHooks } from '@/external-hooks'; -import { WaitingForms } from '@/waiting-forms'; import { LiveWebhooks } from '@/webhooks/live-webhooks'; import { TestWebhooks } from '@/webhooks/test-webhooks'; +import { WaitingForms } from '@/webhooks/waiting-forms'; import { WaitingWebhooks } from '@/webhooks/waiting-webhooks'; import { WebhookServer } from '@/webhooks/webhook-server'; import type { IWebhookResponseCallbackData } from '@/webhooks/webhook.types'; diff --git a/packages/core/package.json b/packages/core/package.json index aec9b34891..89b96d3c8c 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "1.63.0", + "version": "1.64.0", "description": "Core functionality of n8n", "main": "dist/index", "types": "dist/index.d.ts", diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 6cbef1e1b8..232442be98 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -4436,6 +4436,36 @@ export function getExecuteWebhookFunctions( ); }, getMode: () => mode, + evaluateExpression: (expression: string, evaluateItemIndex?: number) => { + const itemIndex = evaluateItemIndex === undefined ? 0 : evaluateItemIndex; + const runIndex = 0; + + let connectionInputData: INodeExecutionData[] = []; + let executionData: IExecuteData | undefined; + + if (runExecutionData?.executionData !== undefined) { + executionData = runExecutionData.executionData.nodeExecutionStack[0]; + + if (executionData !== undefined) { + connectionInputData = executionData.data.main[0]!; + } + } + + const additionalKeys = getAdditionalKeys(additionalData, mode, runExecutionData); + + return workflow.expression.resolveSimpleParameterValue( + `=${expression}`, + {}, + runExecutionData, + runIndex, + itemIndex, + node.name, + connectionInputData, + mode, + additionalKeys, + executionData, + ); + }, getNodeParameter: ( parameterName: string, fallbackValue?: any, diff --git a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts index 606f624d02..6f8b43a660 100644 --- a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts +++ b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts @@ -286,6 +286,149 @@ export class DirectedGraph { ); } + /** + * Returns all strongly connected components. + * + * Strongly connected components are a set of nodes where it's possible to + * reach every node from every node. + * + * Strongly connected components are mutually exclusive in directed graphs, + * e.g. they cannot overlap. + * + * The smallest strongly connected component is a single node, since it can + * reach itself from itself by not following any edges. + * + * The algorithm implement here is Tarjan's algorithm. + * + * Example: + * ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ + * │node1├────►node2◄────┤node3├────►node5│ + * └─────┘ └──┬──┘ └──▲──┘ └▲───┬┘ + * │ │ │ │ + * ┌──▼──┐ │ ┌┴───▼┐ + * │node4├───────┘ │node6│ + * └─────┘ └─────┘ + * + * The strongly connected components are + * 1. node1 + * 2. node2, node4, node3 + * 3. node5, node6 + * + * Further reading: + * https://en.wikipedia.org/wiki/Strongly_connected_component + * https://www.youtube.com/watch?v=wUgWX0nc4NY + */ + getStronglyConnectedComponents(): Array> { + let id = 0; + const visited = new Set(); + const ids = new Map(); + const lowLinkValues = new Map(); + const stack: INode[] = []; + const stronglyConnectedComponents: Array> = []; + + const followNode = (node: INode) => { + if (visited.has(node)) { + return; + } + + visited.add(node); + lowLinkValues.set(node, id); + ids.set(node, id); + id++; + stack.push(node); + + const directChildren = this.getDirectChildConnections(node).map((c) => c.to); + for (const child of directChildren) { + followNode(child); + + // if node is on stack min the low id + if (stack.includes(child)) { + const childLowLinkValue = lowLinkValues.get(child); + const ownLowLinkValue = lowLinkValues.get(node); + a.ok(childLowLinkValue !== undefined); + a.ok(ownLowLinkValue !== undefined); + const lowestLowLinkValue = Math.min(childLowLinkValue, ownLowLinkValue); + + lowLinkValues.set(node, lowestLowLinkValue); + } + } + + // after we visited all children, check if the low id is the same as the + // nodes id, which means we found a strongly connected component + const ownId = ids.get(node); + const ownLowLinkValue = lowLinkValues.get(node); + a.ok(ownId !== undefined); + a.ok(ownLowLinkValue !== undefined); + + if (ownId === ownLowLinkValue) { + // pop from the stack until the stack is empty or we find a node that + // has a different low id + const scc: Set = new Set(); + let next = stack.at(-1); + + while (next && lowLinkValues.get(next) === ownId) { + stack.pop(); + scc.add(next); + next = stack.at(-1); + } + + if (scc.size > 0) { + stronglyConnectedComponents.push(scc); + } + } + }; + + for (const node of this.nodes.values()) { + followNode(node); + } + + return stronglyConnectedComponents; + } + + private depthFirstSearchRecursive( + from: INode, + fn: (node: INode) => boolean, + seen: Set, + ): INode | undefined { + if (seen.has(from)) { + return undefined; + } + seen.add(from); + + if (fn(from)) { + return from; + } + + for (const childConnection of this.getDirectChildConnections(from)) { + const found = this.depthFirstSearchRecursive(childConnection.to, fn, seen); + + if (found) { + return found; + } + } + + return undefined; + } + + /** + * Like `Array.prototype.find` but for directed graphs. + * + * Starting from, and including, the `from` node this calls the provided + * predicate function with every child node until the predicate function + * returns true. + * + * The search is depth first, meaning every branch is exhausted before the + * next branch is tried. + * + * The first node for which the predicate function returns true is returned. + * + * If the graph is exhausted and the predicate function never returned true, + * undefined is returned instead. + */ + depthFirstSearch({ from, fn }: { from: INode; fn: (node: INode) => boolean }): INode | undefined { + return this.depthFirstSearchRecursive(from, fn, new Set()); + } + toWorkflow(parameters: Omit): Workflow { return new Workflow({ ...parameters, diff --git a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts index d6eedf416d..9530ed2217 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts @@ -9,6 +9,7 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data +import type { INode } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; import { createNodeData, defaultWorkflowParameter } from './helpers'; @@ -89,6 +90,115 @@ describe('DirectedGraph', () => { }); }); + describe('getStronglyConnectedComponents', () => { + // ┌─────┐ ┌─────┐ ┌─────┐ + // │node1├───►│node2├───►│node4│ + // └─────┘ └──┬──┘ └─────┘ + // ▲ │ + // │ │ + // ┌──┴──┐ │ + // │node3│◄──────┘ + // └─────┘ + test('find strongly connected components', () => { + // ARRANGE + const node1 = createNodeData({ name: 'Node1' }); + const node2 = createNodeData({ name: 'Node2' }); + const node3 = createNodeData({ name: 'Node3' }); + const node4 = createNodeData({ name: 'Node4' }); + const graph = new DirectedGraph() + .addNodes(node1, node2, node3, node4) + .addConnections( + { from: node1, to: node2 }, + { from: node2, to: node3 }, + { from: node3, to: node1 }, + { from: node2, to: node4 }, + ); + + // ACT + const stronglyConnectedComponents = graph.getStronglyConnectedComponents(); + + // ASSERT + expect(stronglyConnectedComponents).toHaveLength(2); + expect(stronglyConnectedComponents).toContainEqual(new Set([node4])); + expect(stronglyConnectedComponents).toContainEqual(new Set([node3, node2, node1])); + }); + + // ┌────┐ + // ┌───────┐ │ ├─ + // │trigger├──┬──►loop│ + // └───────┘ │ │ ├────┐ + // │ └────┘ │ + // └─────────┐ │ + // ┌────┐ │ │ + // ┌───►node├─┘ │ + // │ └────┘ │ + // │ │ + // └─────────────┘ + test('find strongly connected components even if they use different output indexes', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const loop = createNodeData({ name: 'loop' }); + const node = createNodeData({ name: 'node' }); + const graph = new DirectedGraph() + .addNodes(trigger, loop, node) + .addConnections( + { from: trigger, to: loop }, + { from: loop, outputIndex: 1, to: node }, + { from: node, to: loop }, + ); + + // ACT + const stronglyConnectedComponents = graph.getStronglyConnectedComponents(); + + // ASSERT + expect(stronglyConnectedComponents).toHaveLength(2); + expect(stronglyConnectedComponents).toContainEqual(new Set([trigger])); + expect(stronglyConnectedComponents).toContainEqual(new Set([node, loop])); + }); + }); + + describe('depthFirstSearch', () => { + // ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ + // │node0├───►│node1├───►│node2├───►│node4│───►│node5│ + // └─────┘ └─────┘ └──┬──┘ └─────┘ └─────┘ + // ▲ │ + // │ │ + // ┌──┴──┐ │ + // │node3│◄──────┘ + // └─────┘ + test('calls nodes in the correct order and stops when it found the node', () => { + // ARRANGE + const node0 = createNodeData({ name: 'Node0' }); + const node1 = createNodeData({ name: 'Node1' }); + const node2 = createNodeData({ name: 'Node2' }); + const node3 = createNodeData({ name: 'Node3' }); + const node4 = createNodeData({ name: 'Node4' }); + const node5 = createNodeData({ name: 'Node5' }); + const graph = new DirectedGraph() + .addNodes(node0, node1, node2, node3, node4, node5) + .addConnections( + { from: node0, to: node1 }, + { from: node1, to: node2 }, + { from: node2, to: node3 }, + { from: node3, to: node1 }, + { from: node2, to: node4 }, + { from: node4, to: node5 }, + ); + const fn = jest.fn().mockImplementation((node: INode) => node === node4); + + // ACT + const foundNode = graph.depthFirstSearch({ + from: node0, + fn, + }); + + // ASSERT + expect(foundNode).toBe(node4); + expect(fn).toHaveBeenCalledTimes(5); + expect(fn.mock.calls).toEqual([[node0], [node1], [node2], [node3], [node4]]); + }); + }); + describe('getParentConnections', () => { // ┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐ // │node1├──►│node2├──►│node3│──►│node4│ diff --git a/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts index bf37ec7636..5daea46ef6 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts @@ -23,7 +23,7 @@ describe('cleanRunData', () => { }; // ACT - const newRunData = cleanRunData(runData, graph, [node1]); + const newRunData = cleanRunData(runData, graph, new Set([node1])); // ASSERT expect(newRunData).toEqual({}); @@ -47,7 +47,7 @@ describe('cleanRunData', () => { }; // ACT - const newRunData = cleanRunData(runData, graph, [node2]); + const newRunData = cleanRunData(runData, graph, new Set([node2])); // ASSERT expect(newRunData).toEqual({ [node1.name]: runData[node1.name] }); @@ -78,7 +78,7 @@ describe('cleanRunData', () => { }; // ACT - const newRunData = cleanRunData(runData, graph, [node2]); + const newRunData = cleanRunData(runData, graph, new Set([node2])); // ASSERT // TODO: Find out if this is a desirable result in milestone 2 diff --git a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts index 57022d862c..ab33ccf8ed 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts @@ -48,8 +48,8 @@ describe('findStartNodes', () => { const startNodes = findStartNodes({ graph, trigger: node, destination: node }); - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node); }); // ►► @@ -67,8 +67,8 @@ describe('findStartNodes', () => { { const startNodes = findStartNodes({ graph, trigger, destination }); - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(trigger); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(trigger); } // if the trigger has run data @@ -79,8 +79,8 @@ describe('findStartNodes', () => { const startNodes = findStartNodes({ graph, trigger, destination, runData }); - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(destination); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(destination); } }); @@ -115,8 +115,8 @@ describe('findStartNodes', () => { const startNodes = findStartNodes({ graph, trigger, destination: node, runData }); // ASSERT - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node); }); // ┌─────┐ ┌─────┐ ►► @@ -156,9 +156,9 @@ describe('findStartNodes', () => { const startNodes = findStartNodes({ graph, trigger, destination: node4 }); // ASSERT - expect(startNodes).toHaveLength(1); + expect(startNodes.size).toBe(1); // no run data means the trigger is the start node - expect(startNodes[0]).toEqual(trigger); + expect(startNodes).toContainEqual(trigger); } { @@ -175,8 +175,8 @@ describe('findStartNodes', () => { const startNodes = findStartNodes({ graph, trigger, destination: node4, runData }); // ASSERT - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node4); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node4); } }); @@ -211,8 +211,8 @@ describe('findStartNodes', () => { }); // ASSERT - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node); }); // ►► @@ -246,8 +246,8 @@ describe('findStartNodes', () => { }); // ASSERT - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node); }); // ►► @@ -286,8 +286,8 @@ describe('findStartNodes', () => { }); // ASSERT - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node); }); // ►► @@ -324,8 +324,8 @@ describe('findStartNodes', () => { }); // ASSERT - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node3); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node3); }); // ►► @@ -360,8 +360,8 @@ describe('findStartNodes', () => { }); // ASSERT - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node2); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node2); }); // ►► @@ -392,7 +392,7 @@ describe('findStartNodes', () => { const startNodes = findStartNodes({ graph, trigger, destination: node2, runData, pinData }); // ASSERT - expect(startNodes).toHaveLength(1); - expect(startNodes[0]).toEqual(node2); + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(node2); }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/handleCycles.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/handleCycles.test.ts new file mode 100644 index 0000000000..def9fed0ff --- /dev/null +++ b/packages/core/src/PartialExecutionUtils/__tests__/handleCycles.test.ts @@ -0,0 +1,116 @@ +// NOTE: Diagrams in this file have been created with https://asciiflow.com/#/ +// If you update the tests, please update the diagrams as well. +// If you add a test, please create a new diagram. +// +// Map +// 0 means the output has no run data +// 1 means the output has run data +// ►► denotes the node that the user wants to execute to +// XX denotes that the node is disabled +// PD denotes that the node has pinned data + +import { createNodeData } from './helpers'; +import { DirectedGraph } from '../DirectedGraph'; +import { handleCycles } from '../handleCycles'; + +describe('handleCycles', () => { + // ┌────┐ ┌─────────┐ + //┌───────┐ │ ├──────────►afterLoop│ + //│trigger├────┬───►loop│ └─────────┘ + //└───────┘ │ │ ├─┐ ►► + // │ └────┘ │ ┌──────┐ + // │ └───►inLoop├────┐ + // │ └──────┘ │ + // │ │ + // └──────────────────────────┘ + test('if the start node is within a cycle it returns the start of the cycle as the new start node', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const loop = createNodeData({ name: 'loop' }); + const inLoop = createNodeData({ name: 'inLoop' }); + const afterLoop = createNodeData({ name: 'afterLoop' }); + const graph = new DirectedGraph() + .addNodes(trigger, loop, inLoop, afterLoop) + .addConnections( + { from: trigger, to: loop }, + { from: loop, outputIndex: 0, to: afterLoop }, + { from: loop, outputIndex: 1, to: inLoop }, + { from: inLoop, to: loop }, + ); + const startNodes = new Set([inLoop]); + + // ACT + const newStartNodes = handleCycles(graph, startNodes, trigger); + + // ASSERT + expect(newStartNodes.size).toBe(1); + expect(newStartNodes).toContainEqual(loop); + }); + + // ┌────┐ ┌─────────┐ + //┌───────┐ │ ├──────────►afterLoop│ + //│trigger├────┬───►loop│ └─────────┘ + //└───────┘ │ │ ├─┐ ►► + // │ └────┘ │ ┌──────┐ + // │ └───►inLoop├────┐ + // │ └──────┘ │ + // │ │ + // └──────────────────────────┘ + test('does not mutate `startNodes`', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const loop = createNodeData({ name: 'loop' }); + const inLoop = createNodeData({ name: 'inLoop' }); + const afterLoop = createNodeData({ name: 'afterLoop' }); + const graph = new DirectedGraph() + .addNodes(trigger, loop, inLoop, afterLoop) + .addConnections( + { from: trigger, to: loop }, + { from: loop, outputIndex: 0, to: afterLoop }, + { from: loop, outputIndex: 1, to: inLoop }, + { from: inLoop, to: loop }, + ); + const startNodes = new Set([inLoop]); + + // ACT + handleCycles(graph, startNodes, trigger); + + // ASSERT + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(inLoop); + }); + + // ►► + // ┌────┐ ┌─────────┐ + //┌───────┐ │ ├──────────►afterLoop│ + //│trigger├────┬───►loop│ └─────────┘ + //└───────┘ │ │ ├─┐ + // │ └────┘ │ ┌──────┐ + // │ └───►inLoop├────┐ + // │ └──────┘ │ + // │ │ + // └──────────────────────────┘ + test('if the start node is not within a cycle it returns the same node as the new start node', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const loop = createNodeData({ name: 'loop' }); + const inLoop = createNodeData({ name: 'inLoop' }); + const afterLoop = createNodeData({ name: 'afterLoop' }); + const graph = new DirectedGraph() + .addNodes(trigger, loop, inLoop, afterLoop) + .addConnections( + { from: trigger, to: loop }, + { from: loop, outputIndex: 0, to: afterLoop }, + { from: loop, outputIndex: 1, to: inLoop }, + { from: inLoop, to: loop }, + ); + const startNodes = new Set([afterLoop]); + + // ACT + const newStartNodes = handleCycles(graph, startNodes, trigger); + + // ASSERT + expect(newStartNodes.size).toBe(1); + expect(newStartNodes).toContainEqual(afterLoop); + }); +}); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts index a4bcac23a5..8bae766912 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts @@ -33,7 +33,7 @@ describe('recreateNodeExecutionStack', () => { .addConnections({ from: trigger, to: node }); const workflow = findSubgraph({ graph, destination: node, trigger }); - const startNodes = [node]; + const startNodes = new Set([node]); const runData: IRunData = { [trigger.name]: [toITaskData([{ data: { value: 1 } }])], }; @@ -87,7 +87,7 @@ describe('recreateNodeExecutionStack', () => { const workflow = new DirectedGraph() .addNodes(trigger, node) .addConnections({ from: trigger, to: node }); - const startNodes = [trigger]; + const startNodes = new Set([trigger]); const runData: IRunData = {}; const pinData: IPinData = {}; @@ -121,7 +121,7 @@ describe('recreateNodeExecutionStack', () => { const workflow = new DirectedGraph() .addNodes(trigger, node) .addConnections({ from: trigger, to: node }); - const startNodes = [node]; + const startNodes = new Set([node]); const runData: IRunData = {}; const pinData: IPinData = { [trigger.name]: [{ json: { value: 1 } }], @@ -169,7 +169,7 @@ describe('recreateNodeExecutionStack', () => { .addNodes(trigger, node1, node2) .addConnections({ from: trigger, to: node1 }, { from: node1, to: node2 }); - const startNodes = [node2]; + const startNodes = new Set([node2]); const runData: IRunData = { [trigger.name]: [toITaskData([{ data: { value: 1 } }])], }; @@ -204,7 +204,7 @@ describe('recreateNodeExecutionStack', () => { { from: node2, to: node3 }, ); - const startNodes = [node3]; + const startNodes = new Set([node3]); const runData: IRunData = { [trigger.name]: [toITaskData([{ data: { value: 1 } }])], [node1.name]: [toITaskData([{ data: { value: 1 } }])], @@ -287,7 +287,7 @@ describe('recreateNodeExecutionStack', () => { { from: node1, to: node3, inputIndex: 0 }, { from: node2, to: node3, inputIndex: 1 }, ); - const startNodes = [node3]; + const startNodes = new Set([node3]); const runData: IRunData = { [trigger.name]: [toITaskData([{ data: { value: 1 } }])], [node1.name]: [toITaskData([{ data: { value: 1 } }])], diff --git a/packages/core/src/PartialExecutionUtils/cleanRunData.ts b/packages/core/src/PartialExecutionUtils/cleanRunData.ts index 5d74a3575a..bcd60c423b 100644 --- a/packages/core/src/PartialExecutionUtils/cleanRunData.ts +++ b/packages/core/src/PartialExecutionUtils/cleanRunData.ts @@ -10,7 +10,7 @@ import type { DirectedGraph } from './DirectedGraph'; export function cleanRunData( runData: IRunData, graph: DirectedGraph, - startNodes: INode[], + startNodes: Set, ): IRunData { const newRunData: IRunData = { ...runData }; diff --git a/packages/core/src/PartialExecutionUtils/findCycles.ts b/packages/core/src/PartialExecutionUtils/findCycles.ts deleted file mode 100644 index 388518ae52..0000000000 --- a/packages/core/src/PartialExecutionUtils/findCycles.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { Workflow } from 'n8n-workflow'; - -export function findCycles(_workflow: Workflow) { - // TODO: implement depth first search or Tarjan's Algorithm - return []; -} diff --git a/packages/core/src/PartialExecutionUtils/findStartNodes.ts b/packages/core/src/PartialExecutionUtils/findStartNodes.ts index a6165f6564..5eb036bd88 100644 --- a/packages/core/src/PartialExecutionUtils/findStartNodes.ts +++ b/packages/core/src/PartialExecutionUtils/findStartNodes.ts @@ -137,7 +137,7 @@ export function findStartNodes(options: { destination: INode; runData?: IRunData; pinData?: IPinData; -}): INode[] { +}): Set { const graph = options.graph; const trigger = options.trigger; const destination = options.destination; @@ -156,5 +156,5 @@ export function findStartNodes(options: { new Set(), ); - return [...startNodes]; + return startNodes; } diff --git a/packages/core/src/PartialExecutionUtils/handleCycles.ts b/packages/core/src/PartialExecutionUtils/handleCycles.ts new file mode 100644 index 0000000000..94a8ae8cbc --- /dev/null +++ b/packages/core/src/PartialExecutionUtils/handleCycles.ts @@ -0,0 +1,56 @@ +import type { INode } from 'n8n-workflow'; +import * as a from 'node:assert/strict'; + +import type { DirectedGraph } from './DirectedGraph'; + +/** + * Returns a new set of start nodes. + * + * For every start node this checks if it is part of a cycle and if it is it + * replaces the start node with the start of the cycle. + * + * This is useful because it prevents executing cycles partially, e.g. figuring + * our which run of the cycle has to be repeated etc. + */ +export function handleCycles( + graph: DirectedGraph, + startNodes: Set, + trigger: INode, +): Set { + // Strongly connected components can also be nodes that are not part of a + // cycle. They form a strongly connected component of one. E.g the trigger is + // always a strongly connected component by itself because it does not have + // any inputs and thus cannot build a cycle. + // + // We're not interested in them so we filter them out. + const cycles = graph.getStronglyConnectedComponents().filter((cycle) => cycle.size >= 1); + const newStartNodes: Set = new Set(startNodes); + + // For each start node, check if the node is part of a cycle and if it is + // replace the start node with the start of the cycle. + if (cycles.length === 0) { + return newStartNodes; + } + + for (const startNode of startNodes) { + for (const cycle of cycles) { + const isPartOfCycle = cycle.has(startNode); + if (isPartOfCycle) { + const firstNode = graph.depthFirstSearch({ + from: trigger, + fn: (node) => cycle.has(node), + }); + + a.ok( + firstNode, + "the trigger must be connected to the cycle, otherwise the cycle wouldn't be part of the subgraph", + ); + + newStartNodes.delete(startNode); + newStartNodes.add(firstNode); + } + } + } + + return newStartNodes; +} diff --git a/packages/core/src/PartialExecutionUtils/index.ts b/packages/core/src/PartialExecutionUtils/index.ts index 6a6f1a233a..cea8ded9b9 100644 --- a/packages/core/src/PartialExecutionUtils/index.ts +++ b/packages/core/src/PartialExecutionUtils/index.ts @@ -2,5 +2,6 @@ export { DirectedGraph } from './DirectedGraph'; export { findTriggerForPartialExecution } from './findTriggerForPartialExecution'; export { findStartNodes } from './findStartNodes'; export { findSubgraph } from './findSubgraph'; -export { findCycles } from './findCycles'; export { recreateNodeExecutionStack } from './recreateNodeExecutionStack'; +export { cleanRunData } from './cleanRunData'; +export { handleCycles } from './handleCycles'; diff --git a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts index 4926becb79..534969f960 100644 --- a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts +++ b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts @@ -32,7 +32,7 @@ import { getSourceDataGroups } from './getSourceDataGroups'; */ export function recreateNodeExecutionStack( graph: DirectedGraph, - startNodes: INode[], + startNodes: Set, destinationNode: INode, runData: IRunData, pinData: IPinData, diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index ec5963a54b..1d9aee76c6 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -51,13 +51,13 @@ import PCancelable from 'p-cancelable'; import * as NodeExecuteFunctions from './NodeExecuteFunctions'; import { DirectedGraph, - findCycles, findStartNodes, findSubgraph, findTriggerForPartialExecution, + cleanRunData, + recreateNodeExecutionStack, + handleCycles, } from './PartialExecutionUtils'; -import { cleanRunData } from './PartialExecutionUtils/cleanRunData'; -import { recreateNodeExecutionStack } from './PartialExecutionUtils/recreateNodeExecutionStack'; export class WorkflowExecute { private status: ExecutionStatus = 'new'; @@ -352,15 +352,11 @@ export class WorkflowExecute { const filteredNodes = subgraph.getNodes(); // 3. Find the Start Nodes - const startNodes = findStartNodes({ graph: subgraph, trigger, destination, runData }); + let startNodes = findStartNodes({ graph: subgraph, trigger, destination, runData }); // 4. Detect Cycles - const cycles = findCycles(workflow); - // 5. Handle Cycles - if (cycles.length) { - // TODO: handle - } + startNodes = handleCycles(graph, startNodes, trigger); // 6. Clean Run Data const newRunData: IRunData = cleanRunData(runData, graph, startNodes); diff --git a/packages/design-system/package.json b/packages/design-system/package.json index 36fcf31528..6b59977d56 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "1.53.0", + "version": "1.54.0", "main": "src/main.ts", "import": "src/main.ts", "scripts": { diff --git a/packages/design-system/src/components/N8nActionDropdown/ActionDropdown.vue b/packages/design-system/src/components/N8nActionDropdown/ActionDropdown.vue index 7284dea7dd..4b3f4d7df4 100644 --- a/packages/design-system/src/components/N8nActionDropdown/ActionDropdown.vue +++ b/packages/design-system/src/components/N8nActionDropdown/ActionDropdown.vue @@ -117,6 +117,9 @@ defineExpose({ open, close }); {{ item.label }} + + {{ item.badge }} + (), { tooltipPlacement: 'top', }); -const iconData = computed((): { icon: string; color: string } => { - switch (props.theme) { - case 'info': - return { - icon: 'info-circle', - color: '--color-text-light)', - }; - case 'info-light': - return { - icon: 'info-circle', - color: 'var(--color-foreground-dark)', - }; - case 'warning': - return { - icon: 'exclamation-triangle', - color: 'var(--color-warning)', - }; - case 'danger': - return { - icon: 'exclamation-triangle', - color: 'var(--color-danger)', - }; - case 'success': - return { - icon: 'check-circle', - color: 'var(--color-success)', - }; - default: - return { - icon: 'info-circle', - color: '--color-text-light)', - }; - } +const iconData = computed<{ icon: IconMap[keyof IconMap]; color: IconColor }>(() => { + return { + icon: ICON_MAP[props.theme], + color: props.theme === 'info' || props.theme === 'info-light' ? 'text-base' : props.theme, + } as const; }); @@ -69,14 +53,16 @@ const iconData = computed((): { icon: string; color: string } => { [$style.bold]: bold, }" > + - - + + - + diff --git a/packages/design-system/src/components/N8nInfoTip/__tests__/__snapshots__/InfoTip.spec.ts.snap b/packages/design-system/src/components/N8nInfoTip/__tests__/__snapshots__/InfoTip.spec.ts.snap index 4bbaaa8b5d..ef7db8dfe8 100644 --- a/packages/design-system/src/components/N8nInfoTip/__tests__/__snapshots__/InfoTip.spec.ts.snap +++ b/packages/design-system/src/components/N8nInfoTip/__tests__/__snapshots__/InfoTip.spec.ts.snap @@ -1,9 +1,16 @@ // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`N8nInfoTip > should render correctly as note 1`] = `"
Need help doing something?Open docs
"`; +exports[`N8nInfoTip > should render correctly as note 1`] = ` +"
+ Need help doing something?Open docs +
" +`; exports[`N8nInfoTip > should render correctly as tooltip 1`] = ` -"
+"
+
" diff --git a/packages/design-system/src/components/N8nMenuItem/MenuItem.vue b/packages/design-system/src/components/N8nMenuItem/MenuItem.vue index 03fdb7e182..e9a34c7e04 100644 --- a/packages/design-system/src/components/N8nMenuItem/MenuItem.vue +++ b/packages/design-system/src/components/N8nMenuItem/MenuItem.vue @@ -104,7 +104,7 @@ const isItemActive = (item: IMenuItem): boolean => { diff --git a/packages/design-system/src/composables/useDeviceSupport.ts b/packages/design-system/src/composables/useDeviceSupport.ts index 63d5549dcc..8f713ffd30 100644 --- a/packages/design-system/src/composables/useDeviceSupport.ts +++ b/packages/design-system/src/composables/useDeviceSupport.ts @@ -28,6 +28,7 @@ export function useDeviceSupport() { } return { + userAgent: userAgent.value, isTouchDevice: isTouchDevice.value, isMacOs: isMacOs.value, controlKeyCode: controlKeyCode.value, diff --git a/packages/design-system/src/types/action-dropdown.ts b/packages/design-system/src/types/action-dropdown.ts index 9ea96de909..87872e1411 100644 --- a/packages/design-system/src/types/action-dropdown.ts +++ b/packages/design-system/src/types/action-dropdown.ts @@ -3,6 +3,7 @@ import type { KeyboardShortcut } from 'n8n-design-system/types/keyboardshortcut' export interface ActionDropdownItem { id: string; label: string; + badge?: string; icon?: string; divided?: boolean; disabled?: boolean; diff --git a/packages/design-system/src/types/icon.ts b/packages/design-system/src/types/icon.ts index b16116e4c4..e2d7a36634 100644 --- a/packages/design-system/src/types/icon.ts +++ b/packages/design-system/src/types/icon.ts @@ -1,8 +1,9 @@ +import type { TextColor } from 'n8n-design-system/types/text'; + const ICON_SIZE = ['xsmall', 'small', 'medium', 'large'] as const; export type IconSize = (typeof ICON_SIZE)[number]; -const ICON_COLOR = ['primary', 'danger', 'success', 'warning', 'text-base'] as const; -export type IconColor = (typeof ICON_COLOR)[number]; +export type IconColor = TextColor; const ICON_ORIENTATION = ['horizontal', 'vertical'] as const; export type IconOrientation = (typeof ICON_ORIENTATION)[number]; diff --git a/packages/editor-ui/package.json b/packages/editor-ui/package.json index cdb9814101..69d52a8138 100644 --- a/packages/editor-ui/package.json +++ b/packages/editor-ui/package.json @@ -1,6 +1,6 @@ { "name": "n8n-editor-ui", - "version": "1.63.0", + "version": "1.64.0", "description": "Workflow Editor UI for n8n", "main": "index.js", "scripts": { @@ -14,7 +14,7 @@ "format:check": "biome ci . && prettier --check . --ignore-path ../../.prettierignore", "serve": "cross-env VUE_APP_URL_BASE_API=http://localhost:5678/ vite --host 0.0.0.0 --port 8080 dev", "test": "vitest run", - "test:dev": "vitest" + "test:dev": "vitest --silent=false" }, "dependencies": { "@codemirror/autocomplete": "^6.16.0", diff --git a/packages/editor-ui/src/api/cloudPlans.ts b/packages/editor-ui/src/api/cloudPlans.ts index f7daa4b0a1..821c1ce1a3 100644 --- a/packages/editor-ui/src/api/cloudPlans.ts +++ b/packages/editor-ui/src/api/cloudPlans.ts @@ -13,7 +13,7 @@ export async function getCloudUserInfo(context: IRestApiContext): Promise { +export async function sendConfirmationEmail(context: IRestApiContext): Promise { return await post(context.baseUrl, '/cloud/proxy/user/resend-confirmation-email'); } diff --git a/packages/editor-ui/src/api/mfa.ts b/packages/editor-ui/src/api/mfa.ts index 09cfb84df4..0cce31c96d 100644 --- a/packages/editor-ui/src/api/mfa.ts +++ b/packages/editor-ui/src/api/mfa.ts @@ -1,6 +1,10 @@ import type { IRestApiContext } from '@/Interface'; import { makeRestApiRequest } from '@/utils/apiUtils'; +export async function canEnableMFA(context: IRestApiContext) { + return await makeRestApiRequest(context, 'POST', '/mfa/can-enable'); +} + export async function getMfaQR( context: IRestApiContext, ): Promise<{ qrCode: string; secret: string; recoveryCodes: string[] }> { diff --git a/packages/editor-ui/src/components/ExpressionEditModal.vue b/packages/editor-ui/src/components/ExpressionEditModal.vue index 1f9f0b75fe..934876f75e 100644 --- a/packages/editor-ui/src/components/ExpressionEditModal.vue +++ b/packages/editor-ui/src/components/ExpressionEditModal.vue @@ -78,14 +78,14 @@ watch( void externalHooks.run('expressionEdit.dialogVisibleChanged', { dialogVisible: newValue, parameter: props.parameter, - value: props.modelValue, + value: props.modelValue.toString(), resolvedExpressionValue, }); if (!newValue) { const telemetryPayload = createExpressionTelemetryPayload( segments.value, - props.modelValue, + props.modelValue.toString(), workflowsStore.workflowId, ndvStore.pushRef, ndvStore.activeNode?.type ?? '', diff --git a/packages/editor-ui/src/components/JsonEditor/JsonEditor.vue b/packages/editor-ui/src/components/JsonEditor/JsonEditor.vue index 650aa8f48b..a24bf2373f 100644 --- a/packages/editor-ui/src/components/JsonEditor/JsonEditor.vue +++ b/packages/editor-ui/src/components/JsonEditor/JsonEditor.vue @@ -111,18 +111,18 @@ function destroyEditor() { diff --git a/packages/editor-ui/src/components/KeyboardShortcutTooltip.vue b/packages/editor-ui/src/components/KeyboardShortcutTooltip.vue index 1c006d4050..56625f004f 100644 --- a/packages/editor-ui/src/components/KeyboardShortcutTooltip.vue +++ b/packages/editor-ui/src/components/KeyboardShortcutTooltip.vue @@ -4,10 +4,10 @@ import type { Placement } from 'element-plus'; interface Props { label: string; - shortcut: KeyboardShortcut; + shortcut?: KeyboardShortcut; placement?: Placement; } -withDefaults(defineProps(), { placement: 'top' }); +withDefaults(defineProps(), { placement: 'top', shortcut: undefined }); @@ -203,6 +213,15 @@ onMounted(async () => { .collapsed { text-transform: uppercase; } + +.projectsLabel { + margin: 0 var(--spacing-xs) var(--spacing-s); + padding: 0 var(--spacing-s); + text-overflow: ellipsis; + overflow: hidden; + box-sizing: border-box; + color: var(--color-text-base); +} diff --git a/packages/editor-ui/src/components/Workers/WorkerCard.ee.vue b/packages/editor-ui/src/components/Workers/WorkerCard.ee.vue index c8c7c222c9..75096b7639 100644 --- a/packages/editor-ui/src/components/Workers/WorkerCard.ee.vue +++ b/packages/editor-ui/src/components/Workers/WorkerCard.ee.vue @@ -7,6 +7,7 @@ import { averageWorkerLoadFromLoadsAsString, memAsGb } from '../../utils/workerU import WorkerJobAccordion from './WorkerJobAccordion.ee.vue'; import WorkerNetAccordion from './WorkerNetAccordion.ee.vue'; import WorkerChartsAccordion from './WorkerChartsAccordion.ee.vue'; +import { sortByProperty } from '@/utils/sortUtils'; let interval: NodeJS.Timer; @@ -23,8 +24,8 @@ const worker = computed((): WorkerStatus | undefined => { return orchestrationStore.getWorkerStatus(props.workerId); }); -const sortedWorkerInterfaces = computed( - () => worker.value?.interfaces.toSorted((a, b) => a.family.localeCompare(b.family)) ?? [], +const sortedWorkerInterfaces = computed(() => + sortByProperty('family', worker.value?.interfaces.slice() ?? []), ); function upTime(seconds: number): string { diff --git a/packages/editor-ui/src/components/__tests__/BannersStack.test.ts b/packages/editor-ui/src/components/__tests__/BannersStack.test.ts index 71955e8739..468a20232d 100644 --- a/packages/editor-ui/src/components/__tests__/BannersStack.test.ts +++ b/packages/editor-ui/src/components/__tests__/BannersStack.test.ts @@ -105,7 +105,7 @@ describe('BannerStack', () => { }, }), }); - const confirmEmailSpy = vi.spyOn(useUsersStore(), 'confirmEmail'); + const confirmEmailSpy = vi.spyOn(useUsersStore(), 'sendConfirmationEmail'); getByTestId('confirm-email-button').click(); await waitFor(() => expect(confirmEmailSpy).toHaveBeenCalled()); await waitFor(() => { @@ -125,9 +125,11 @@ describe('BannerStack', () => { }, }), }); - const confirmEmailSpy = vi.spyOn(useUsersStore(), 'confirmEmail').mockImplementation(() => { - throw new Error(ERROR_MESSAGE); - }); + const confirmEmailSpy = vi + .spyOn(useUsersStore(), 'sendConfirmationEmail') + .mockImplementation(() => { + throw new Error(ERROR_MESSAGE); + }); getByTestId('confirm-email-button').click(); await waitFor(() => expect(confirmEmailSpy).toHaveBeenCalled()); await waitFor(() => { diff --git a/packages/editor-ui/src/components/__tests__/RunData.test.ts b/packages/editor-ui/src/components/__tests__/RunData.test.ts index c9ea213e8c..042062a9c1 100644 --- a/packages/editor-ui/src/components/__tests__/RunData.test.ts +++ b/packages/editor-ui/src/components/__tests__/RunData.test.ts @@ -6,10 +6,11 @@ import RunData from '@/components/RunData.vue'; import { SET_NODE_TYPE, STORES, VIEWS } from '@/constants'; import { SETTINGS_STORE_DEFAULT_STATE } from '@/__tests__/utils'; import { createComponentRenderer } from '@/__tests__/render'; -import type { INodeUi, IRunDataDisplayMode } from '@/Interface'; +import type { INodeUi, IRunDataDisplayMode, NodePanelType } from '@/Interface'; import { useWorkflowsStore } from '@/stores/workflows.store'; import { setActivePinia } from 'pinia'; import { defaultNodeTypes } from '@/__tests__/mocks'; +import type { INodeExecutionData } from 'n8n-workflow'; const nodes = [ { @@ -23,6 +24,47 @@ const nodes = [ ] as INodeUi[]; describe('RunData', () => { + it("should render pin button in output panel disabled when there's binary data", () => { + const { getByTestId } = render( + [ + { + json: {}, + binary: { + data: { + fileName: 'test.xyz', + mimeType: 'application/octet-stream', + }, + }, + }, + ], + 'binary', + ); + + expect(getByTestId('ndv-pin-data')).toBeInTheDocument(); + expect(getByTestId('ndv-pin-data')).toHaveAttribute('disabled'); + }); + + it("should not render pin button in input panel when there's binary data", () => { + const { queryByTestId } = render( + [ + { + json: {}, + binary: { + data: { + fileName: 'test.xyz', + mimeType: 'application/octet-stream', + }, + }, + }, + ], + 'binary', + undefined, + 'input', + ); + + expect(queryByTestId('ndv-pin-data')).not.toBeInTheDocument(); + }); + it('should render data correctly even when "item.json" has another "json" key', async () => { const { getByText, getAllByTestId, getByTestId } = render( [ @@ -95,7 +137,69 @@ describe('RunData', () => { expect(getByTestId('ndv-binary-data_0')).toBeInTheDocument(); }); - const render = (outputData: unknown[], displayMode: IRunDataDisplayMode) => { + it('should not render pin data button when there is no output data', async () => { + const { queryByTestId } = render([], 'table'); + expect(queryByTestId('ndv-pin-data')).not.toBeInTheDocument(); + }); + + it('should disable pin data button when data is pinned', async () => { + const { getByTestId } = render([], 'table', [{ json: { name: 'Test' } }]); + const pinDataButton = getByTestId('ndv-pin-data'); + expect(pinDataButton).toBeDisabled(); + }); + + it('should enable pin data button when data is not pinned', async () => { + const { getByTestId } = render([{ json: { name: 'Test' } }], 'table'); + const pinDataButton = getByTestId('ndv-pin-data'); + expect(pinDataButton).toBeEnabled(); + }); + + it('should not render pagination on binary tab', async () => { + const { queryByTestId } = render( + Array.from({ length: 11 }).map((_, i) => ({ + json: { + data: { + id: i, + name: `Test ${i}`, + }, + }, + binary: { + data: { + a: 'b', + }, + }, + })), + 'binary', + ); + expect(queryByTestId('ndv-data-pagination')).not.toBeInTheDocument(); + }); + + it('should render pagination with binary data on non-binary tab', async () => { + const { getByTestId } = render( + Array.from({ length: 11 }).map((_, i) => ({ + json: { + data: { + id: i, + name: `Test ${i}`, + }, + }, + binary: { + data: { + a: 'b', + }, + }, + })), + 'json', + ); + expect(getByTestId('ndv-data-pagination')).toBeInTheDocument(); + }); + + const render = ( + outputData: unknown[], + displayMode: IRunDataDisplayMode, + pinnedData?: INodeExecutionData[], + paneType: NodePanelType = 'output', + ) => { const pinia = createTestingPinia({ initialState: { [STORES.SETTINGS]: { @@ -154,12 +258,18 @@ describe('RunData', () => { const workflowsStore = useWorkflowsStore(); vi.mocked(workflowsStore).getNodeByName.mockReturnValue(nodes[0]); + if (pinnedData) { + vi.mocked(workflowsStore).pinDataByNodeName.mockReturnValue(pinnedData); + } return createComponentRenderer(RunData, { props: { node: { name: 'Test Node', }, + workflow: { + nodes, + }, }, data() { return { @@ -168,6 +278,9 @@ describe('RunData', () => { }; }, global: { + stubs: { + RunDataPinButton: { template: '' }, + }, mocks: { $route: { name: VIEWS.WORKFLOW, @@ -183,7 +296,7 @@ describe('RunData', () => { }, nodes: [{ name: 'Test Node', indicies: [], depth: 1 }], runIndex: 0, - paneType: 'output', + paneType, isExecuting: false, mappingEnabled: true, distanceFromActive: 0, diff --git a/packages/editor-ui/src/components/banners/EmailConfirmationBanner.vue b/packages/editor-ui/src/components/banners/EmailConfirmationBanner.vue index 45b3d8c425..72d3f6e923 100644 --- a/packages/editor-ui/src/components/banners/EmailConfirmationBanner.vue +++ b/packages/editor-ui/src/components/banners/EmailConfirmationBanner.vue @@ -14,7 +14,7 @@ const userEmail = computed(() => { async function onConfirmEmailClick() { try { - await useUsersStore().confirmEmail(); + await useUsersStore().sendConfirmationEmail(); toast.showMessage({ type: 'success', title: locale.baseText('banners.confirmEmail.toast.success.heading'), diff --git a/packages/editor-ui/src/components/canvas/Canvas.spec.ts b/packages/editor-ui/src/components/canvas/Canvas.spec.ts index 5ab4c6e96e..8e53425532 100644 --- a/packages/editor-ui/src/components/canvas/Canvas.spec.ts +++ b/packages/editor-ui/src/components/canvas/Canvas.spec.ts @@ -207,4 +207,17 @@ describe('Canvas', () => { await waitFor(() => expect(getByTestId('canvas-minimap')).not.toBeVisible()); }); }); + + describe('background', () => { + it('should render default background', () => { + const { container } = renderComponent(); + expect(container.querySelector('#pattern-canvas')).toBeInTheDocument(); + }); + + it('should render striped background', () => { + const { container } = renderComponent({ props: { readOnly: true } }); + expect(container.querySelector('#pattern-canvas')).not.toBeInTheDocument(); + expect(container.querySelector('#diagonalHatch')).toBeInTheDocument(); + }); + }); }); diff --git a/packages/editor-ui/src/components/canvas/Canvas.vue b/packages/editor-ui/src/components/canvas/Canvas.vue index 64a9e5647a..424ac853e0 100644 --- a/packages/editor-ui/src/components/canvas/Canvas.vue +++ b/packages/editor-ui/src/components/canvas/Canvas.vue @@ -18,7 +18,17 @@ import { Background } from '@vue-flow/background'; import { MiniMap } from '@vue-flow/minimap'; import Node from './elements/nodes/CanvasNode.vue'; import Edge from './elements/edges/CanvasEdge.vue'; -import { computed, onMounted, onUnmounted, provide, ref, toRef, useCssModule, watch } from 'vue'; +import { + computed, + nextTick, + onMounted, + onUnmounted, + provide, + ref, + toRef, + useCssModule, + watch, +} from 'vue'; import type { EventBus } from 'n8n-design-system'; import { createEventBus } from 'n8n-design-system'; import { useContextMenu, type ContextMenuAction } from '@/composables/useContextMenu'; @@ -31,6 +41,8 @@ import { GRID_SIZE } from '@/utils/nodeViewUtils'; import { CanvasKey } from '@/constants'; import { onKeyDown, onKeyUp, useDebounceFn } from '@vueuse/core'; import CanvasArrowHeadMarker from './elements/edges/CanvasArrowHeadMarker.vue'; +import { CanvasNodeRenderType } from '@/types'; +import CanvasBackgroundStripedPattern from './elements/CanvasBackgroundStripedPattern.vue'; const $style = useCssModule(); @@ -80,6 +92,7 @@ const props = withDefaults( readOnly?: boolean; executing?: boolean; keyBindings?: boolean; + showBugReportingButton?: boolean; }>(), { id: 'canvas', @@ -108,6 +121,8 @@ const { nodes: graphNodes, onPaneReady, findNode, + onNodesInitialized, + viewport, } = useVueFlow({ id: props.id, deleteKeyCode: null }); const isPaneReady = ref(false); @@ -127,15 +142,19 @@ const disableKeyBindings = computed(() => !props.keyBindings); /** * @see https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values#whitespace_keys */ -const panningKeyCode = ' '; + const isPanningEnabled = ref(false); +const panningKeyCode = ' '; +const selectionKeyCode = ref(true); onKeyDown(panningKeyCode, () => { isPanningEnabled.value = true; + selectionKeyCode.value = null; }); onKeyUp(panningKeyCode, () => { isPanningEnabled.value = false; + selectionKeyCode.value = true; }); const keyMap = computed(() => ({ @@ -167,11 +186,23 @@ const keyMap = computed(() => ({ useKeybindings(keyMap, { disabled: disableKeyBindings }); +/** + * When the window is focused, the selection key code is lost. + * We trigger a value refresh to ensure that the selection key code is set correctly again. + * + * @issue https://linear.app/n8n/issue/N8N-7843/selection-keycode-gets-unset-when-changing-tabs + */ +function resetSelectionKeyCode() { + selectionKeyCode.value = null; + void nextTick(() => { + selectionKeyCode.value = true; + }); +} + /** * Nodes */ -const selectionKeyCode = computed(() => (isPanningEnabled.value ? null : true)); const lastSelectedNode = computed(() => selectedNodes.value[selectedNodes.value.length - 1]); const hasSelection = computed(() => selectedNodes.value.length > 0); const selectedNodeIds = computed(() => selectedNodes.value.map((node) => node.id)); @@ -467,11 +498,13 @@ function onMinimapMouseLeave() { onMounted(() => { props.eventBus.on('fitView', onFitView); props.eventBus.on('nodes:select', onSelectNodes); + window.addEventListener('focus', resetSelectionKeyCode); }); onUnmounted(() => { props.eventBus.off('fitView', onFitView); props.eventBus.off('nodes:select', onSelectNodes); + window.removeEventListener('focus', resetSelectionKeyCode); }); onPaneReady(async () => { @@ -479,6 +512,11 @@ onPaneReady(async () => { isPaneReady.value = true; }); +onNodesInitialized((nodes) => { + if (nodes.length !== 1 || nodes[0].data?.render.type !== CanvasNodeRenderType.AddNodes) return; + void onFitView(); +}); + watch(() => props.readOnly, setReadonly, { immediate: true, }); @@ -555,7 +593,11 @@ provide(CanvasKey, { - + + + { workflow, workflowObject, fallbackNodes, + showFallbackNodes: true, }, }); @@ -109,9 +110,8 @@ describe('WorkflowCanvas', () => { expect(container.querySelector(`[data-id="${fallbackNodes[0].id}"]`)).toBeInTheDocument(); }); - it('should not render fallback nodes when non-sticky nodes are present', async () => { - const nonStickyNodes = [createTestNode({ id: '1', name: 'Non-Sticky Node 1' })]; - const stickyNodes = [createTestNode({ id: '2', name: 'Sticky Node', type: STICKY_NODE_TYPE })]; + it('should not render fallback nodes when showFallbackNodes is false', async () => { + const nodes = [createTestNode({ id: '1', name: 'Non-Sticky Node 1' })]; const fallbackNodes = [ createTestNode({ id: CanvasNodeRenderType.AddNodes, @@ -123,7 +123,7 @@ describe('WorkflowCanvas', () => { const workflow = createTestWorkflow({ id: '1', name: 'Test Workflow', - nodes: [...nonStickyNodes, ...stickyNodes], + nodes, connections: {}, }); @@ -134,13 +134,13 @@ describe('WorkflowCanvas', () => { workflow, workflowObject, fallbackNodes, + showFallbackNodes: false, }, }); - await waitFor(() => expect(container.querySelectorAll('.vue-flow__node')).toHaveLength(2)); + await waitFor(() => expect(container.querySelectorAll('.vue-flow__node')).toHaveLength(1)); - expect(container.querySelector(`[data-id="${nonStickyNodes[0].id}"]`)).toBeInTheDocument(); - expect(container.querySelector(`[data-id="${stickyNodes[0].id}"]`)).toBeInTheDocument(); + expect(container.querySelector(`[data-id="${nodes[0].id}"]`)).toBeInTheDocument(); expect(container.querySelector(`[data-id="${fallbackNodes[0].id}"]`)).not.toBeInTheDocument(); }); }); diff --git a/packages/editor-ui/src/components/canvas/WorkflowCanvas.vue b/packages/editor-ui/src/components/canvas/WorkflowCanvas.vue index 319084c1f2..72dc04cbcd 100644 --- a/packages/editor-ui/src/components/canvas/WorkflowCanvas.vue +++ b/packages/editor-ui/src/components/canvas/WorkflowCanvas.vue @@ -7,7 +7,6 @@ import { useCanvasMapping } from '@/composables/useCanvasMapping'; import type { EventBus } from 'n8n-design-system'; import { createEventBus } from 'n8n-design-system'; import type { CanvasEventBusEvents } from '@/types'; -import { STICKY_NODE_TYPE } from '@/constants'; defineOptions({ inheritAttrs: false, @@ -19,14 +18,17 @@ const props = withDefaults( workflow: IWorkflowDb; workflowObject: Workflow; fallbackNodes?: IWorkflowDb['nodes']; + showFallbackNodes?: boolean; eventBus?: EventBus; readOnly?: boolean; executing?: boolean; + showBugReportingButton?: boolean; }>(), { id: 'canvas', eventBus: () => createEventBus(), fallbackNodes: () => [], + showFallbackNodes: true, }, ); @@ -36,11 +38,9 @@ const workflow = toRef(props, 'workflow'); const workflowObject = toRef(props, 'workflowObject'); const nodes = computed(() => { - const stickyNoteNodes = props.workflow.nodes.filter((node) => node.type === STICKY_NODE_TYPE); - - return props.workflow.nodes.length > stickyNoteNodes.length - ? props.workflow.nodes - : [...props.fallbackNodes, ...stickyNoteNodes]; + return props.showFallbackNodes + ? [...props.workflow.nodes, ...props.fallbackNodes] + : props.workflow.nodes; }); const connections = computed(() => props.workflow.connections); @@ -52,12 +52,14 @@ const { nodes: mappedNodes, connections: mappedConnections } = useCanvasMapping(