diff --git a/CHANGELOG.md b/CHANGELOG.md index e7d455c95d..9c49ea673f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,43 @@ +# [1.72.0](https://github.com/n8n-io/n8n/compare/n8n@1.71.0...n8n@1.72.0) (2024-12-11) + + +### Bug Fixes + +* Allow disabling MFA with recovery codes ([#12014](https://github.com/n8n-io/n8n/issues/12014)) ([95d56fe](https://github.com/n8n-io/n8n/commit/95d56fee8d0168b75fca6dcf41702d2f10c930a8)) +* Chat triggers don't work with the new partial execution flow ([#11952](https://github.com/n8n-io/n8n/issues/11952)) ([2b6a72f](https://github.com/n8n-io/n8n/commit/2b6a72f1289c01145edf2b88e5027d2b9b2ed624)) +* **core:** Execute nodes after loops correctly with the new partial execution flow ([#11978](https://github.com/n8n-io/n8n/issues/11978)) ([891dd7f](https://github.com/n8n-io/n8n/commit/891dd7f995c78a2355a049b7ced981a5f6b1c40c)) +* **core:** Fix support for multiple invocation of AI tools ([#12141](https://github.com/n8n-io/n8n/issues/12141)) ([c572c06](https://github.com/n8n-io/n8n/commit/c572c0648ca5b644b222157b3cabac9c05704a84)) +* **core:** Make sure task runner exits ([#12123](https://github.com/n8n-io/n8n/issues/12123)) ([c5effca](https://github.com/n8n-io/n8n/commit/c5effca7d47a713f157eea21d7892002e9ab7283)) +* **core:** Remove run data of nodes unrelated to the current partial execution ([#12099](https://github.com/n8n-io/n8n/issues/12099)) ([c4e4d37](https://github.com/n8n-io/n8n/commit/c4e4d37a8785d1a4bcd376cb1c49b82a80aa4391)) +* **core:** Return homeProject when filtering workflows by project id ([#12077](https://github.com/n8n-io/n8n/issues/12077)) ([efafeed](https://github.com/n8n-io/n8n/commit/efafeed33482100a23fa0163a53b9ce93cd6b2c3)) +* **editor:** Don't reset all Parameter Inputs when switched to read-only ([#12063](https://github.com/n8n-io/n8n/issues/12063)) ([706702d](https://github.com/n8n-io/n8n/commit/706702dff8da3c2e949e2c98dd5b34b299a1f17c)) +* **editor:** Fix canvas panning using `Control` + `Left Mouse Button` on Windows ([#12104](https://github.com/n8n-io/n8n/issues/12104)) ([43009b6](https://github.com/n8n-io/n8n/commit/43009b6aa820f24b9e6f519e7a45592aa21db03e)) +* **editor:** Fix Nodeview.v2 reinitialise based on route changes ([#12062](https://github.com/n8n-io/n8n/issues/12062)) ([b1f8663](https://github.com/n8n-io/n8n/commit/b1f866326574974eb2936e6b02771346e83e7137)) +* **editor:** Fix svg background pattern rendering on safari ([#12079](https://github.com/n8n-io/n8n/issues/12079)) ([596f221](https://github.com/n8n-io/n8n/commit/596f22103c01e14063ebb2388c4dabf4714d37c6)) +* **editor:** Fix switching from v2 to v1 ([#12050](https://github.com/n8n-io/n8n/issues/12050)) ([5c76de3](https://github.com/n8n-io/n8n/commit/5c76de324c2e25b0d8b74cdab79f04aa616d8c4f)) +* **editor:** Improvements to the commit modal ([#12031](https://github.com/n8n-io/n8n/issues/12031)) ([4fe1952](https://github.com/n8n-io/n8n/commit/4fe1952e2fb3379d95da42a7bb531851af6d0094)) +* **editor:** Load node types in demo and preview modes ([#12048](https://github.com/n8n-io/n8n/issues/12048)) ([4ac5f95](https://github.com/n8n-io/n8n/commit/4ac5f9527bbec382a65ed3f1d9c41d6948c154e3)) +* **editor:** Polyfill crypto.randomUUID ([#12052](https://github.com/n8n-io/n8n/issues/12052)) ([0537524](https://github.com/n8n-io/n8n/commit/0537524c3e45d7633415c7a9175a3857ad52cd58)) +* **editor:** Redirect Settings to the proper sub page depending on the instance type (cloud or not) ([#12053](https://github.com/n8n-io/n8n/issues/12053)) ([a16d006](https://github.com/n8n-io/n8n/commit/a16d006f893cac927d674fa447b08c1205b67c54)) +* **editor:** Render sanitized HTML content in toast messages ([#12139](https://github.com/n8n-io/n8n/issues/12139)) ([0468945](https://github.com/n8n-io/n8n/commit/0468945c99f083577c4cc71f671b4b950f6aeb86)) +* **editor:** Universal button snags ([#11974](https://github.com/n8n-io/n8n/issues/11974)) ([956b11a](https://github.com/n8n-io/n8n/commit/956b11a560528336a74be40f722fa05bf3cca94d)) +* **editor:** Update concurrency UI considering different types of instances ([#12068](https://github.com/n8n-io/n8n/issues/12068)) ([fa572bb](https://github.com/n8n-io/n8n/commit/fa572bbca4397b1cc42668530497444630ed17eb)) +* **FTP Node:** Fix issue with creating folders on rename ([#9340](https://github.com/n8n-io/n8n/issues/9340)) ([eb7d593](https://github.com/n8n-io/n8n/commit/eb7d5934ef8bc6e999d6de4c0b8025ce175df5dd)) +* **n8n Form Node:** Completion page display if EXECUTIONS_DATA_SAVE_ON_SUCCESS=none ([#11869](https://github.com/n8n-io/n8n/issues/11869)) ([f4c2523](https://github.com/n8n-io/n8n/commit/f4c252341985fe03927a2fd5d60ba846ec3dfc77)) +* **OpenAI Node:** Allow updating assistant files ([#12042](https://github.com/n8n-io/n8n/issues/12042)) ([7b20f8a](https://github.com/n8n-io/n8n/commit/7b20f8aaa8befd19dbad0af3bf1b881342c1fca5)) + + +### Features + +* **AI Transform Node:** Reduce payload size ([#11965](https://github.com/n8n-io/n8n/issues/11965)) ([d8ca8de](https://github.com/n8n-io/n8n/commit/d8ca8de13a4cbb856696873bdb56c66b12a5b027)) +* **core:** Add option to filter for empty variables ([#12112](https://github.com/n8n-io/n8n/issues/12112)) ([a63f0e8](https://github.com/n8n-io/n8n/commit/a63f0e878e21da9924451e2679939209b34b6583)) +* **core:** Cancel runner task on timeout in external mode ([#12101](https://github.com/n8n-io/n8n/issues/12101)) ([addb4fa](https://github.com/n8n-io/n8n/commit/addb4fa352c88d856e463bb2b7001173c4fd6a7d)) +* **core:** Parent workflows should wait for sub-workflows to finish ([#11985](https://github.com/n8n-io/n8n/issues/11985)) ([60b3dcc](https://github.com/n8n-io/n8n/commit/60b3dccf9317da6f3013be35a78ce21d0416ad80)) +* **editor:** Implementing the `Easy AI Workflow` experiment ([#12043](https://github.com/n8n-io/n8n/issues/12043)) ([67ed1d2](https://github.com/n8n-io/n8n/commit/67ed1d2c3c2e69d5a96daf7de2795c02f5d8f15b)) +* **Redis Node:** Add support for continue on fail / error output branch ([#11714](https://github.com/n8n-io/n8n/issues/11714)) ([ed35958](https://github.com/n8n-io/n8n/commit/ed359586c88a7662f4d94d58c5a87cf91d027ab9)) + + + # [1.71.0](https://github.com/n8n-io/n8n/compare/n8n@1.70.0...n8n@1.71.0) (2024-12-04) diff --git a/README.md b/README.md index d51ac596ca..c41a5e5ac8 100644 --- a/README.md +++ b/README.md @@ -1,104 +1,67 @@ -![n8n.io - Workflow Automation](https://user-images.githubusercontent.com/65276001/173571060-9f2f6d7b-bac0-43b6-bdb2-001da9694058.png) +![Banner image](https://user-images.githubusercontent.com/10284570/173569848-c624317f-42b1-45a6-ab09-f0ea3c247648.png) -# n8n - Workflow automation tool +# n8n - Secure Workflow Automation for Technical Teams -n8n is an extendable workflow automation tool. With a [fair-code](https://faircode.io) distribution model, n8n -will always have visible source code, be available to self-host, and allow you to add your own custom -functions, logic and apps. n8n's node-based approach makes it highly versatile, enabling you to connect -anything to everything. +n8n is a workflow automation platform that gives technical teams the flexibility of code with the speed of no-code. With 400+ integrations, native AI capabilities, and a fair-code license, n8n lets you build powerful automations while maintaining full control over your data and deployments. -![n8n.io - Screenshot](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png) +![n8n.io - Screenshot](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot-readme.png) -## Demo +## Key Capabilities -[:tv: A short video (< 5 min)](https://www.youtube.com/watch?v=1MwSoB0gnM4) that goes over key concepts of -creating workflows in n8n. +- **Code When You Need It**: Write JavaScript/Python, add npm packages, or use the visual interface +- **AI-Native Platform**: Build AI agent workflows based on LangChain with your own data and models +- **Full Control**: Self-host with our fair-code license or use our [cloud offering](https://app.n8n.cloud/login) +- **Enterprise-Ready**: Advanced permissions, SSO, and air-gapped deployments +- **Active Community**: 400+ integrations and 900+ ready-to-use [templates](https://n8n.io/workflows) -## Available integrations +## Quick Start -n8n has 200+ different nodes to automate workflows. The list can be found on: -[https://n8n.io/integrations](https://n8n.io/integrations) - -## Documentation - -The official n8n documentation can be found on our [documentation website](https://docs.n8n.io) - -Additional information and example workflows on the [n8n.io website](https://n8n.io) - -The release notes can be found [here](https://docs.n8n.io/release-notes/) and the list of breaking -changes [here](https://github.com/n8n-io/n8n/blob/master/packages/cli/BREAKING-CHANGES.md). - -## Usage - -- :books: Learn - [how to **use** it from the command line](https://docs.n8n.io/reference/cli-commands/) -- :whale: Learn - [how to run n8n in **Docker**](https://docs.n8n.io/hosting/installation/docker/) - -## Start - -You can try n8n without installing it using npx. You must have [Node.js](https://nodejs.org/en/) installed. -From the terminal, run: +Try n8n instantly with [npx](https://docs.n8n.io/hosting/installation/npm/) (requires [Node.js](https://nodejs.org/en/)): `npx n8n` -This command will download everything that is needed to start n8n. You can then access n8n and start building workflows by opening [http://localhost:5678](http://localhost:5678). +Or deploy with [Docker](https://docs.n8n.io/hosting/installation/docker/): -## n8n cloud +`docker run -it --rm --name n8n -p 5678:5678 docker.n8n.io/n8n-io/n8n` -Sign-up for an [n8n cloud](https://www.n8n.io/cloud/) account. +Access the editor at http://localhost:5678 -While n8n cloud and n8n are the same in terms of features, n8n cloud provides certain conveniences such as: +## Resources -- Not having to set up and maintain your n8n instance -- Managed OAuth for authentication -- Easily upgrading to the newer n8n versions - -## Build with LangChain and AI in n8n (beta) - -With n8n's LangChain nodes you can build AI-powered functionality within your workflows. The LangChain nodes are configurable, meaning you can choose your preferred agent, LLM, memory, and so on. Alongside the LangChain nodes, you can connect any n8n node as normal: this means you can integrate your LangChain logic with other data sources and services. - -Learn more in the [documentation](https://docs.n8n.io/langchain/). - -- [LangChain nodes package](https://www.npmjs.com/package/@n8n/n8n-nodes-langchain) -- [Chatbot package](https://www.npmjs.com/package/@n8n/chat) +- 📚 [Documentation](https://docs.n8n.io) +- 🔧 [400+ Integrations](https://n8n.io/integrations) +- 💡 [Example Workflows](https://n8n.io/workflows) +- 🤖 [AI & LangChain Guide](https://docs.n8n.io/langchain/) +- 👥 [Community Forum](https://community.n8n.io) +- 📖 [Community Tutorials](https://community.n8n.io/c/tutorials/28) ## Support -If you have problems or questions go to our forum, we will then try to help you asap: - -[https://community.n8n.io](https://community.n8n.io) - -## Jobs - -If you are interested in working for n8n and so shape the future of the project check out our -[job posts](https://apply.workable.com/n8n/) - -## What does n8n mean and how do you pronounce it? - -**Short answer:** It means "nodemation" and it is pronounced as n-eight-n. - -**Long answer:** "I get that question quite often (more often than I expected) so I decided it is probably -best to answer it here. While looking for a good name for the project with a free domain I realized very -quickly that all the good ones I could think of were already taken. So, in the end, I chose nodemation. -'node-' in the sense that it uses a Node-View and that it uses Node.js and '-mation' for 'automation' which is -what the project is supposed to help with. However, I did not like how long the name was and I could not -imagine writing something that long every time in the CLI. That is when I then ended up on 'n8n'." - **Jan -Oberhauser, Founder and CEO, n8n.io** - -## Development setup - -Have you found a bug :bug: ? Or maybe you have a nice feature :sparkles: to contribute ? The -[CONTRIBUTING guide](https://github.com/n8n-io/n8n/blob/master/CONTRIBUTING.md) will help you get your -development environment ready in minutes. +Need help? Our community forum is the place to get support and connect with other users: +[community.n8n.io](https://community.n8n.io) ## License -n8n is [fair-code](https://faircode.io) distributed under the -[**Sustainable Use License**](https://github.com/n8n-io/n8n/blob/master/LICENSE.md) and the -[**n8n Enterprise License**](https://github.com/n8n-io/n8n/blob/master/LICENSE_EE.md). +n8n is [fair-code](https://faircode.io) distributed under the [Sustainable Use License](https://github.com/n8n-io/n8n/blob/master/LICENSE.md) and [n8n Enterprise License](https://github.com/n8n-io/n8n/blob/master/LICENSE_EE.md). -Proprietary licenses are available for enterprise customers. [Get in touch](mailto:license@n8n.io) +- **Source Available**: Always visible source code +- **Self-Hostable**: Deploy anywhere +- **Extensible**: Add your own nodes and functionality -Additional information about the license model can be found in the -[docs](https://docs.n8n.io/reference/license/). +[Enterprise licenses](mailto:license@n8n.io) available for additional features and support. + +Additional information about the license model can be found in the [docs](https://docs.n8n.io/reference/license/). + +## Contributing + +Found a bug 🐛 or have a feature idea ✨? Check our [Contributing Guide](https://github.com/n8n-io/n8n/blob/master/CONTRIBUTING.md) to get started. + +## Join the Team + +Want to shape the future of automation? Check out our [job posts](https://n8n.io/careers) and join our team! + +## What does n8n mean? + +**Short answer:** It means "nodemation" and is pronounced as n-eight-n. + +**Long answer:** "I get that question quite often (more often than I expected) so I decided it is probably best to answer it here. While looking for a good name for the project with a free domain I realized very quickly that all the good ones I could think of were already taken. So, in the end, I chose nodemation. 'node-' in the sense that it uses a Node-View and that it uses Node.js and '-mation' for 'automation' which is what the project is supposed to help with. However, I did not like how long the name was and I could not imagine writing something that long every time in the CLI. That is when I then ended up on 'n8n'." - **Jan Oberhauser, Founder and CEO, n8n.io** diff --git a/assets/n8n-screenshot-readme.png b/assets/n8n-screenshot-readme.png new file mode 100644 index 0000000000..d6b5faef71 Binary files /dev/null and b/assets/n8n-screenshot-readme.png differ diff --git a/cypress/e2e/11-inline-expression-editor.cy.ts b/cypress/e2e/11-inline-expression-editor.cy.ts index 945c62821b..a762135a65 100644 --- a/cypress/e2e/11-inline-expression-editor.cy.ts +++ b/cypress/e2e/11-inline-expression-editor.cy.ts @@ -129,7 +129,7 @@ describe('Inline expression editor', () => { // Run workflow ndv.actions.close(); - WorkflowPage.actions.executeNode('No Operation'); + WorkflowPage.actions.executeNode('No Operation', { anchor: 'topLeft' }); WorkflowPage.actions.openNode('Hacker News'); WorkflowPage.actions.openInlineExpressionEditor(); diff --git a/cypress/e2e/13-pinning.cy.ts b/cypress/e2e/13-pinning.cy.ts index 4f48fa4529..2d3351f8aa 100644 --- a/cypress/e2e/13-pinning.cy.ts +++ b/cypress/e2e/13-pinning.cy.ts @@ -112,7 +112,7 @@ describe('Data pinning', () => { it('Should be able to pin data from canvas (context menu or shortcut)', () => { workflowPage.actions.addInitialNodeToCanvas('Schedule Trigger'); workflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); - workflowPage.actions.openContextMenu(EDIT_FIELDS_SET_NODE_NAME, 'overflow-button'); + workflowPage.actions.openContextMenu(EDIT_FIELDS_SET_NODE_NAME, { method: 'overflow-button' }); workflowPage.getters .contextMenuAction('toggle_pin') .parent() diff --git a/cypress/e2e/14-mapping.cy.ts b/cypress/e2e/14-mapping.cy.ts index 3bbbd0b293..e19959453f 100644 --- a/cypress/e2e/14-mapping.cy.ts +++ b/cypress/e2e/14-mapping.cy.ts @@ -185,7 +185,6 @@ describe('Data mapping', () => { workflowPage.actions.openNode('Set1'); ndv.actions.executePrevious(); - ndv.actions.expandSchemaViewNode(SCHEDULE_TRIGGER_NODE_NAME); const dataPill = ndv.getters .inputDataContainer() diff --git a/cypress/e2e/18-user-management.cy.ts b/cypress/e2e/18-user-management.cy.ts index fe91a72935..d4eb5841cf 100644 --- a/cypress/e2e/18-user-management.cy.ts +++ b/cypress/e2e/18-user-management.cy.ts @@ -148,24 +148,9 @@ describe('User Management', { disableAutoLogin: true }, () => { personalSettingsPage.actions.changeTheme('Dark'); cy.get('body').should('have.attr', 'data-theme', 'dark'); - settingsSidebar.actions.back(); - mainSidebar.getters - .logo() - .should('have.attr', 'src') - .then((src) => { - expect(src).to.include('/static/logo/channel/dev-dark.svg'); - }); - cy.visit(personalSettingsPage.url); personalSettingsPage.actions.changeTheme('Light'); cy.get('body').should('have.attr', 'data-theme', 'light'); - settingsSidebar.actions.back(); - mainSidebar.getters - .logo() - .should('have.attr', 'src') - .then((src) => { - expect(src).to.include('/static/logo/channel/dev.svg'); - }); }); it('should delete user and their data', () => { diff --git a/cypress/e2e/28-debug.cy.ts b/cypress/e2e/28-debug.cy.ts index b5159951a7..9149e2e478 100644 --- a/cypress/e2e/28-debug.cy.ts +++ b/cypress/e2e/28-debug.cy.ts @@ -87,11 +87,28 @@ describe('Debug', () => { confirmDialog.get('.btn--confirm').click(); cy.url().should('include', '/debug'); - workflowPage.getters.canvasNodes().first().should('have.descendants', '.node-pin-data-icon'); - workflowPage.getters - .canvasNodes() - .not(':first') - .should('not.have.descendants', '.node-pin-data-icon'); + cy.ifCanvasVersion( + () => { + workflowPage.getters + .canvasNodes() + .first() + .should('have.descendants', '.node-pin-data-icon'); + workflowPage.getters + .canvasNodes() + .not(':first') + .should('not.have.descendants', '.node-pin-data-icon'); + }, + () => { + workflowPage.getters + .canvasNodes() + .first() + .should('have.descendants', '[data-test-id="canvas-node-status-pinned"]'); + workflowPage.getters + .canvasNodes() + .not(':first') + .should('not.have.descendants', '[data-test-id="canvas-node-status-pinned"]'); + }, + ); cy.reload(true); cy.wait(['@getExecution']); @@ -114,7 +131,18 @@ describe('Debug', () => { confirmDialog.get('.btn--confirm').click(); cy.url().should('include', '/debug'); - workflowPage.getters.canvasNodes().last().find('.node-info-icon').should('be.empty'); + cy.ifCanvasVersion( + () => { + workflowPage.getters.canvasNodes().last().find('.node-info-icon').should('be.empty'); + }, + () => { + workflowPage.getters + .canvasNodes() + .last() + .find('[class*="statusIcons"]') + .should('not.exist'); + }, + ); workflowPage.getters.canvasNodes().first().dblclick(); ndv.actions.unPinData(); diff --git a/cypress/e2e/45-workflow-selector-parameter.cy.ts b/cypress/e2e/45-workflow-selector-parameter.cy.ts index 3a4de55f50..38df9a29b8 100644 --- a/cypress/e2e/45-workflow-selector-parameter.cy.ts +++ b/cypress/e2e/45-workflow-selector-parameter.cy.ts @@ -98,6 +98,10 @@ describe('Workflow Selector Parameter', () => { getVisiblePopper().findChildByTestId('rlc-item').eq(0).click(); - cy.get('@windowOpen').should('be.calledWith', '/workflows/onboarding/0?sampleSubWorkflows=0'); + const SAMPLE_SUBWORKFLOW_TEMPLATE_ID = 'VMiAxXa3lCAizGB5f7dVZQSFfg3FtHkdTKvLuupqBls='; + cy.get('@windowOpen').should( + 'be.calledWith', + `/workflows/onboarding/${SAMPLE_SUBWORKFLOW_TEMPLATE_ID}?sampleSubWorkflows=0`, + ); }); }); diff --git a/cypress/e2e/5-ndv.cy.ts b/cypress/e2e/5-ndv.cy.ts index 96b917d4c3..a10dbc94e6 100644 --- a/cypress/e2e/5-ndv.cy.ts +++ b/cypress/e2e/5-ndv.cy.ts @@ -77,7 +77,7 @@ describe('NDV', () => { workflowPage.actions.openNode('Switch'); cy.get('.cm-line').realMouseMove(100, 100); - cy.get('.fa-angle-down').click(); + cy.get('.fa-angle-down').first().click(); ndv.getters.backToCanvas().click(); workflowPage.actions.executeWorkflow(); workflowPage.actions.openNode('Merge'); @@ -111,6 +111,7 @@ describe('NDV', () => { cy.get('[class*=hasIssues]').should('have.length', 1); }); + // Correctly failing in V2 - node issues are only shows after execution it('should show all validation errors when opening pasted node', () => { cy.createFixtureWorkflow('Test_workflow_ndv_errors.json', 'Validation errors'); workflowPage.getters.canvasNodes().should('have.have.length', 1); @@ -204,7 +205,7 @@ describe('NDV', () => { .contains(key) .should('be.visible'); }); - getObjectValueItem().find('label').click({ force: true }); + getObjectValueItem().find('.toggle').click({ force: true }); expandedObjectProps.forEach((key) => { ndv.getters .outputPanel() @@ -213,9 +214,11 @@ describe('NDV', () => { .should('not.be.visible'); }); }); + it('should not display pagination for schema', () => { setupSchemaWorkflow(); ndv.getters.backToCanvas().click(); + workflowPage.actions.deselectAll(); workflowPage.getters.canvasNodeByName('Set').click(); workflowPage.actions.addNodeToCanvas( 'Customer Datastore (n8n training)', @@ -245,8 +248,8 @@ describe('NDV', () => { ndv.getters.outputPanel().find('[class*=_pagination]').should('not.exist'); ndv.getters .outputPanel() - .find('[data-test-id=run-data-schema-item] [data-test-id=run-data-schema-item]') - .should('have.length', 20); + .find('[data-test-id=run-data-schema-item]') + .should('have.length.above', 10); }); }); @@ -407,8 +410,18 @@ describe('NDV', () => { return cy.get(`[data-node-placement=${position}]`); } + // Correctly failing in V2 - due to floating navigation not updating the selected node it('should traverse floating nodes with mouse', () => { cy.createFixtureWorkflow('Floating_Nodes.json', 'Floating Nodes'); + + cy.ifCanvasVersion( + () => {}, + () => { + // Needed in V2 as all nodes remain selected when clicking on a selected node + workflowPage.actions.deselectAll(); + }, + ); + workflowPage.getters.canvasNodes().first().dblclick(); getFloatingNodeByPosition('inputMain').should('not.exist'); getFloatingNodeByPosition('outputMain').should('exist'); @@ -419,6 +432,7 @@ describe('NDV', () => { getFloatingNodeByPosition('inputMain').should('exist'); getFloatingNodeByPosition('outputMain').should('exist'); ndv.actions.close(); + // These two lines are broken in V2 workflowPage.getters.selectedNodes().should('have.length', 1); workflowPage.getters .selectedNodes() @@ -426,10 +440,8 @@ describe('NDV', () => { .should('contain', `Node ${i + 1}`); workflowPage.getters.selectedNodes().first().dblclick(); }); - getFloatingNodeByPosition('outputMain').click({ force: true }); ndv.getters.nodeNameContainer().should('contain', 'Chain'); - // Traverse 4 connected node backwards Array.from(Array(4).keys()).forEach((i) => { getFloatingNodeByPosition('inputMain').click({ force: true }); @@ -453,8 +465,17 @@ describe('NDV', () => { .should('contain', MANUAL_TRIGGER_NODE_DISPLAY_NAME); }); + // Correctly failing in V2 - due to floating navigation not updating the selected node it('should traverse floating nodes with keyboard', () => { cy.createFixtureWorkflow('Floating_Nodes.json', 'Floating Nodes'); + cy.ifCanvasVersion( + () => {}, + () => { + // Needed in V2 as all nodes remain selected when clicking on a selected node + workflowPage.actions.deselectAll(); + }, + ); + workflowPage.getters.canvasNodes().first().dblclick(); getFloatingNodeByPosition('inputMain').should('not.exist'); getFloatingNodeByPosition('outputMain').should('exist'); @@ -465,6 +486,7 @@ describe('NDV', () => { getFloatingNodeByPosition('inputMain').should('exist'); getFloatingNodeByPosition('outputMain').should('exist'); ndv.actions.close(); + // These two lines are broken in V2 workflowPage.getters.selectedNodes().should('have.length', 1); workflowPage.getters .selectedNodes() @@ -492,6 +514,7 @@ describe('NDV', () => { getFloatingNodeByPosition('inputSub').should('not.exist'); getFloatingNodeByPosition('outputSub').should('not.exist'); ndv.actions.close(); + // These two lines are broken in V2 workflowPage.getters.selectedNodes().should('have.length', 1); workflowPage.getters .selectedNodes() @@ -717,6 +740,7 @@ describe('NDV', () => { .should('have.value', 'Error fetching options from Notion'); }); + // Correctly failing in V2 - NodeCreator is not opened after clicking on the link it('Should open appropriate node creator after clicking on connection hint link', () => { const nodeCreator = new NodeCreator(); const hintMapper = { @@ -734,6 +758,7 @@ describe('NDV', () => { Object.entries(hintMapper).forEach(([node, group]) => { workflowPage.actions.openNode(node); + // This fails to open the NodeCreator cy.get('[data-action=openSelectiveNodeCreator]').contains('Insert one').click(); nodeCreator.getters.activeSubcategory().should('contain', group); cy.realPress('Escape'); diff --git a/cypress/package.json b/cypress/package.json index 832abd9ef5..26b585408b 100644 --- a/cypress/package.json +++ b/cypress/package.json @@ -6,6 +6,7 @@ "cypress:install": "cypress install", "test:e2e:ui": "scripts/run-e2e.js ui", "test:e2e:dev": "scripts/run-e2e.js dev", + "test:e2e:dev:v2": "scripts/run-e2e.js dev:v2", "test:e2e:all": "scripts/run-e2e.js all", "format": "biome format --write .", "format:check": "biome ci .", diff --git a/cypress/pages/credentials.ts b/cypress/pages/credentials.ts index d5fa9cc0b1..08b2fee9c7 100644 --- a/cypress/pages/credentials.ts +++ b/cypress/pages/credentials.ts @@ -6,48 +6,10 @@ export class CredentialsPage extends BasePage { getters = { emptyListCreateCredentialButton: () => cy.getByTestId('empty-resources-list').find('button'), createCredentialButton: () => { - cy.getByTestId('resource-add').should('be.visible').click(); - cy.getByTestId('resource-add') - .find('.el-sub-menu__title') - .as('menuitem') - .should('have.attr', 'aria-describedby'); - - cy.get('@menuitem') - .should('be.visible') - .invoke('attr', 'aria-describedby') - .then((el) => cy.get(`[id="${el}"]`)) - .as('submenu'); - - cy.get('@submenu') - .should('be.visible') - .within((submenu) => { - // If submenu has another submenu - if (submenu.find('[data-test-id="navigation-submenu"]').length) { - cy.wrap(submenu) - .find('[data-test-id="navigation-submenu"]') - .should('be.visible') - .filter(':contains("Credential")') - .as('child') - .click(); - - cy.get('@child') - .should('be.visible') - .find('[data-test-id="navigation-submenu-item"]') - .should('be.visible') - .filter(':contains("Personal")') - .as('button'); - } else { - cy.wrap(submenu) - .find('[data-test-id="navigation-menu-item"]') - .filter(':contains("Credential")') - .as('button'); - } - }); - - return cy.get('@button').should('be.visible'); + cy.getByTestId('add-resource').should('be.visible').click(); + cy.getByTestId('add-resource').getByTestId('action-credential').should('be.visible'); + return cy.getByTestId('add-resource').getByTestId('action-credential'); }, - - // cy.getByTestId('resources-list-add'), searchInput: () => cy.getByTestId('resources-list-search'), emptyList: () => cy.getByTestId('resources-list-empty'), credentialCards: () => cy.getByTestId('resources-list-item'), diff --git a/cypress/pages/ndv.ts b/cypress/pages/ndv.ts index 516a0a1ea8..4550da8e2a 100644 --- a/cypress/pages/ndv.ts +++ b/cypress/pages/ndv.ts @@ -227,9 +227,6 @@ export class NDV extends BasePage { this.getters.inputSelect().find('.el-select').click(); this.getters.inputOption().contains(nodeName).click(); }, - expandSchemaViewNode: (nodeName: string) => { - this.getters.schemaViewNodeName().contains(nodeName).click(); - }, addDefaultPinnedData: () => { this.actions.editPinnedData(); this.actions.savePinnedData(); diff --git a/cypress/pages/workflow-executions-tab.ts b/cypress/pages/workflow-executions-tab.ts index 5e8c36c055..be022e6cdf 100644 --- a/cypress/pages/workflow-executions-tab.ts +++ b/cypress/pages/workflow-executions-tab.ts @@ -30,6 +30,12 @@ export class WorkflowExecutionsTab extends BasePage { actions = { toggleNodeEnabled: (nodeName: string) => { + cy.ifCanvasVersion( + () => {}, + () => { + cy.get('body').click(); // Cancel selection if it exists + }, + ); workflowPage.getters.canvasNodeByName(nodeName).click(); cy.get('body').type('d', { force: true }); }, diff --git a/cypress/pages/workflow.ts b/cypress/pages/workflow.ts index ee90fa55e8..1b63688da1 100644 --- a/cypress/pages/workflow.ts +++ b/cypress/pages/workflow.ts @@ -1,6 +1,7 @@ import { BasePage } from './base'; import { NodeCreator } from './features/node-creator'; import { META_KEY } from '../constants'; +import type { OpenContextMenuOptions } from '../types'; import { getVisibleSelect } from '../utils'; import { getUniqueWorkflowName, isCanvasV2 } from '../utils/workflowUtils'; @@ -96,7 +97,7 @@ export class WorkflowPage extends BasePage { disabledNodes: () => cy.ifCanvasVersion( () => cy.get('.node-box.disabled'), - () => cy.get('[data-test-id="canvas-trigger-node"][class*="disabled"]'), + () => cy.get('[data-test-id*="node"][class*="disabled"]'), ), selectedNodes: () => cy.ifCanvasVersion( @@ -272,14 +273,14 @@ export class WorkflowPage extends BasePage { }, openContextMenu: ( nodeTypeName?: string, - method: 'right-click' | 'overflow-button' = 'right-click', + { method = 'right-click', anchor = 'center' }: OpenContextMenuOptions = {}, ) => { const target = nodeTypeName ? this.getters.canvasNodeByName(nodeTypeName) : this.getters.nodeViewBackground(); if (method === 'right-click') { - target.rightclick(nodeTypeName ? 'center' : 'topLeft', { force: true }); + target.rightclick(nodeTypeName ? anchor : 'topLeft', { force: true }); } else { target.realHover(); target.find('[data-test-id="overflow-node-button"]').click({ force: true }); @@ -296,8 +297,8 @@ export class WorkflowPage extends BasePage { this.actions.openContextMenu(nodeTypeName); this.actions.contextMenuAction('delete'); }, - executeNode: (nodeTypeName: string) => { - this.actions.openContextMenu(nodeTypeName); + executeNode: (nodeTypeName: string, options?: OpenContextMenuOptions) => { + this.actions.openContextMenu(nodeTypeName, options); this.actions.contextMenuAction('execute'); }, addStickyFromContextMenu: () => { @@ -324,7 +325,7 @@ export class WorkflowPage extends BasePage { this.actions.contextMenuAction('toggle_pin'); }, openNodeFromContextMenu: (nodeTypeName: string) => { - this.actions.openContextMenu(nodeTypeName, 'overflow-button'); + this.actions.openContextMenu(nodeTypeName, { method: 'overflow-button' }); this.actions.contextMenuAction('open'); }, selectAllFromContextMenu: () => { @@ -332,8 +333,14 @@ export class WorkflowPage extends BasePage { this.actions.contextMenuAction('select_all'); }, deselectAll: () => { - this.actions.openContextMenu(); - this.actions.contextMenuAction('deselect_all'); + cy.ifCanvasVersion( + () => { + this.actions.openContextMenu(); + this.actions.contextMenuAction('deselect_all'); + }, + // rightclick doesn't work with vueFlow canvas + () => this.getters.nodeViewBackground().click('topLeft'), + ); }, openExpressionEditorModal: () => { cy.contains('Expression').invoke('show').click(); diff --git a/cypress/pages/workflows.ts b/cypress/pages/workflows.ts index 41f62e8bc3..a58911a355 100644 --- a/cypress/pages/workflows.ts +++ b/cypress/pages/workflows.ts @@ -8,45 +8,8 @@ export class WorkflowsPage extends BasePage { newWorkflowTemplateCard: () => cy.getByTestId('new-workflow-template-card'), searchBar: () => cy.getByTestId('resources-list-search'), createWorkflowButton: () => { - cy.getByTestId('resource-add').should('be.visible').click(); - cy.getByTestId('resource-add') - .find('.el-sub-menu__title') - .as('menuitem') - .should('have.attr', 'aria-describedby'); - - cy.get('@menuitem') - .should('be.visible') - .invoke('attr', 'aria-describedby') - .then((el) => cy.get(`[id="${el}"]`)) - .as('submenu'); - - cy.get('@submenu') - .should('be.visible') - .within((submenu) => { - // If submenu has another submenu - if (submenu.find('[data-test-id="navigation-submenu"]').length) { - cy.wrap(submenu) - .find('[data-test-id="navigation-submenu"]') - .should('be.visible') - .filter(':contains("Workflow")') - .as('child') - .click(); - - cy.get('@child') - .should('be.visible') - .find('[data-test-id="navigation-submenu-item"]') - .should('be.visible') - .filter(':contains("Personal")') - .as('button'); - } else { - cy.wrap(submenu) - .find('[data-test-id="navigation-menu-item"]') - .filter(':contains("Workflow")') - .as('button'); - } - }); - - return cy.get('@button').should('be.visible'); + cy.getByTestId('add-resource-workflow').should('be.visible'); + return cy.getByTestId('add-resource-workflow'); }, workflowCards: () => cy.getByTestId('resources-list-item'), workflowCard: (workflowName: string) => diff --git a/cypress/scripts/run-e2e.js b/cypress/scripts/run-e2e.js index 8096a70caf..6819d6c824 100755 --- a/cypress/scripts/run-e2e.js +++ b/cypress/scripts/run-e2e.js @@ -57,6 +57,17 @@ switch (scenario) { }, }); break; + case 'dev:v2': + runTests({ + startCommand: 'develop', + url: 'http://localhost:8080/favicon.ico', + testCommand: 'cypress open', + customEnv: { + CYPRESS_NODE_VIEW_VERSION: 2, + CYPRESS_BASE_URL: 'http://localhost:8080', + }, + }); + break; case 'all': const specSuiteFilter = process.argv[3]; const specParam = specSuiteFilter ? ` --spec **/*${specSuiteFilter}*` : ''; diff --git a/cypress/types.ts b/cypress/types.ts index 6186c4201d..63f2ddb99e 100644 --- a/cypress/types.ts +++ b/cypress/types.ts @@ -22,3 +22,8 @@ export interface ExecutionResponse { results: Execution[]; }; } + +export type OpenContextMenuOptions = { + method?: 'right-click' | 'overflow-button'; + anchor?: 'topRight' | 'topLeft' | 'center' | 'bottomRight' | 'bottomLeft'; +}; diff --git a/docker/images/n8n-custom/Dockerfile b/docker/images/n8n-custom/Dockerfile index f4c1da897b..2b72365eb8 100644 --- a/docker/images/n8n-custom/Dockerfile +++ b/docker/images/n8n-custom/Dockerfile @@ -33,7 +33,7 @@ COPY docker/images/n8n/docker-entrypoint.sh / # Setup the Task Runner Launcher ARG TARGETPLATFORM -ARG LAUNCHER_VERSION=0.7.0-rc +ARG LAUNCHER_VERSION=1.0.0 COPY docker/images/n8n/n8n-task-runners.json /etc/n8n-task-runners.json # Download, verify, then extract the launcher binary RUN \ diff --git a/docker/images/n8n/Dockerfile b/docker/images/n8n/Dockerfile index 0f28ee706f..7407736185 100644 --- a/docker/images/n8n/Dockerfile +++ b/docker/images/n8n/Dockerfile @@ -24,7 +24,7 @@ RUN set -eux; \ # Setup the Task Runner Launcher ARG TARGETPLATFORM -ARG LAUNCHER_VERSION=0.7.0-rc +ARG LAUNCHER_VERSION=1.0.0 COPY n8n-task-runners.json /etc/n8n-task-runners.json # Download, verify, then extract the launcher binary RUN \ diff --git a/docker/images/n8n/n8n-task-runners.json b/docker/images/n8n/n8n-task-runners.json index d9575997c0..c64d0ecdd0 100644 --- a/docker/images/n8n/n8n-task-runners.json +++ b/docker/images/n8n/n8n-task-runners.json @@ -12,6 +12,7 @@ "N8N_RUNNERS_TASK_BROKER_URI", "N8N_RUNNERS_MAX_PAYLOAD", "N8N_RUNNERS_MAX_CONCURRENCY", + "N8N_RUNNERS_TASK_TIMEOUT", "N8N_RUNNERS_HEALTH_CHECK_SERVER_ENABLED", "N8N_RUNNERS_HEALTH_CHECK_SERVER_HOST", "N8N_RUNNERS_HEALTH_CHECK_SERVER_PORT", diff --git a/package.json b/package.json index e2a0628773..29be8d868a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "n8n-monorepo", - "version": "1.71.0", + "version": "1.72.0", "private": true, "engines": { "node": ">=20.15", diff --git a/packages/@n8n/api-types/package.json b/packages/@n8n/api-types/package.json index 9f045e31d4..fac0011437 100644 --- a/packages/@n8n/api-types/package.json +++ b/packages/@n8n/api-types/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/api-types", - "version": "0.9.0", + "version": "0.10.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/api-types/src/dto/index.ts b/packages/@n8n/api-types/src/dto/index.ts index 41a55f050a..97d5d38459 100644 --- a/packages/@n8n/api-types/src/dto/index.ts +++ b/packages/@n8n/api-types/src/dto/index.ts @@ -3,3 +3,4 @@ export { RoleChangeRequestDto } from './user/role-change-request.dto'; export { SettingsUpdateRequestDto } from './user/settings-update-request.dto'; export { UserUpdateRequestDto } from './user/user-update-request.dto'; export { CommunityRegisteredRequestDto } from './license/community-registered-request.dto'; +export { VariableListRequestDto } from './variables/variables-list-request.dto'; diff --git a/packages/@n8n/api-types/src/dto/user/settings-update-request.dto.ts b/packages/@n8n/api-types/src/dto/user/settings-update-request.dto.ts index f4c0eb0af3..247b830d91 100644 --- a/packages/@n8n/api-types/src/dto/user/settings-update-request.dto.ts +++ b/packages/@n8n/api-types/src/dto/user/settings-update-request.dto.ts @@ -4,4 +4,5 @@ import { Z } from 'zod-class'; export class SettingsUpdateRequestDto extends Z.class({ userActivated: z.boolean().optional(), allowSSOManualLogin: z.boolean().optional(), + easyAIWorkflowOnboarded: z.boolean().optional(), }) {} diff --git a/packages/@n8n/api-types/src/dto/variables/variables-list-request.dto.ts b/packages/@n8n/api-types/src/dto/variables/variables-list-request.dto.ts new file mode 100644 index 0000000000..804bcb2786 --- /dev/null +++ b/packages/@n8n/api-types/src/dto/variables/variables-list-request.dto.ts @@ -0,0 +1,6 @@ +import { z } from 'zod'; +import { Z } from 'zod-class'; + +export class VariableListRequestDto extends Z.class({ + state: z.literal('empty').optional(), +}) {} diff --git a/packages/@n8n/api-types/src/frontend-settings.ts b/packages/@n8n/api-types/src/frontend-settings.ts index 8f9c740ad6..1fe0fcd857 100644 --- a/packages/@n8n/api-types/src/frontend-settings.ts +++ b/packages/@n8n/api-types/src/frontend-settings.ts @@ -163,7 +163,7 @@ export interface FrontendSettings { pruneTime: number; licensePruneTime: number; }; - pruning: { + pruning?: { isEnabled: boolean; maxAge: number; maxCount: number; @@ -172,5 +172,5 @@ export interface FrontendSettings { blockFileAccessToN8nFiles: boolean; }; betaFeatures: FrontendBetaFeatures[]; - virtualSchemaView: boolean; + easyAIWorkflowOnboarded: boolean; } diff --git a/packages/@n8n/chat/package.json b/packages/@n8n/chat/package.json index 4cc32dc8fe..1db1c50f9a 100644 --- a/packages/@n8n/chat/package.json +++ b/packages/@n8n/chat/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/chat", - "version": "0.32.0", + "version": "0.33.0", "scripts": { "dev": "pnpm run storybook", "build": "pnpm build:vite && pnpm build:bundle", diff --git a/packages/@n8n/config/package.json b/packages/@n8n/config/package.json index 8ba9cc43bf..961079acd6 100644 --- a/packages/@n8n/config/package.json +++ b/packages/@n8n/config/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/config", - "version": "1.21.0", + "version": "1.22.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/config/src/configs/runners.config.ts b/packages/@n8n/config/src/configs/runners.config.ts index 06e262fe49..733e724408 100644 --- a/packages/@n8n/config/src/configs/runners.config.ts +++ b/packages/@n8n/config/src/configs/runners.config.ts @@ -43,11 +43,11 @@ export class TaskRunnersConfig { @Env('N8N_RUNNERS_MAX_CONCURRENCY') maxConcurrency: number = 5; - /** How long (in seconds) a task is allowed to take for completion, else the task will be aborted and the runner restarted. Must be greater than 0. */ + /** How long (in seconds) a task is allowed to take for completion, else the task will be aborted. (In internal mode, the runner will also be restarted.) Must be greater than 0. */ @Env('N8N_RUNNERS_TASK_TIMEOUT') taskTimeout: number = 60; - /** How often (in seconds) the runner must send a heartbeat to the broker, else the task will be aborted and the runner restarted. Must be greater than 0. */ + /** How often (in seconds) the runner must send a heartbeat to the broker, else the task will be aborted. (In internal mode, the runner will also be restarted.) Must be greater than 0. */ @Env('N8N_RUNNERS_HEARTBEAT_INTERVAL') heartbeatInterval: number = 30; } diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts index 80e5da9cfa..230a6ddc6e 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts @@ -9,6 +9,8 @@ import type { INodeProperties, } from 'n8n-workflow'; +import { promptTypeOptions, textFromPreviousNode, textInput } from '@utils/descriptions'; + import { conversationalAgentProperties } from './agents/ConversationalAgent/description'; import { conversationalAgentExecute } from './agents/ConversationalAgent/execute'; import { openAiFunctionsAgentProperties } from './agents/OpenAiFunctionsAgent/description'; @@ -21,7 +23,6 @@ import { sqlAgentAgentProperties } from './agents/SqlAgent/description'; import { sqlAgentAgentExecute } from './agents/SqlAgent/execute'; import { toolsAgentProperties } from './agents/ToolsAgent/description'; import { toolsAgentExecute } from './agents/ToolsAgent/execute'; -import { promptTypeOptions, textFromPreviousNode, textInput } from '../../../utils/descriptions'; // Function used in the inputs expression to figure out which inputs to // display based on the agent type diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/description.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/description.ts index c3507b328c..0a65f4919c 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/description.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/description.ts @@ -1,4 +1,5 @@ import type { INodeProperties } from 'n8n-workflow'; + import { SYSTEM_MESSAGE, HUMAN_MESSAGE } from './prompt'; export const conversationalAgentProperties: INodeProperties[] = [ diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts index 09e04c0b76..04a565971c 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts @@ -6,14 +6,11 @@ import { CombiningOutputParser } from 'langchain/output_parsers'; import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import { - isChatInstance, - getPromptInputByType, - getConnectedTools, -} from '../../../../../utils/helpers'; -import { getOptionalOutputParsers } from '../../../../../utils/output_parsers/N8nOutputParser'; -import { throwIfToolSchema } from '../../../../../utils/schemaParsing'; -import { getTracingConfig } from '../../../../../utils/tracing'; +import { isChatInstance, getPromptInputByType, getConnectedTools } from '@utils/helpers'; +import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; +import { throwIfToolSchema } from '@utils/schemaParsing'; +import { getTracingConfig } from '@utils/tracing'; + import { checkForStructuredTools, extractParsedOutput } from '../utils'; export async function conversationalAgentExecute( diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/description.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/description.ts index 40f9ad1945..0a0f6ac55e 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/description.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/description.ts @@ -1,4 +1,5 @@ import type { INodeProperties } from 'n8n-workflow'; + import { SYSTEM_MESSAGE } from './prompt'; export const openAiFunctionsAgentProperties: INodeProperties[] = [ diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts index a9b324678c..17a2d43590 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts @@ -12,9 +12,10 @@ import { NodeOperationError, } from 'n8n-workflow'; -import { getConnectedTools, getPromptInputByType } from '../../../../../utils/helpers'; -import { getOptionalOutputParsers } from '../../../../../utils/output_parsers/N8nOutputParser'; -import { getTracingConfig } from '../../../../../utils/tracing'; +import { getConnectedTools, getPromptInputByType } from '@utils/helpers'; +import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; +import { getTracingConfig } from '@utils/tracing'; + import { extractParsedOutput } from '../utils'; export async function openAiFunctionsAgentExecute( diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/description.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/description.ts index 608f9e9def..8dcb8f8385 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/description.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/description.ts @@ -1,4 +1,5 @@ import type { INodeProperties } from 'n8n-workflow'; + import { DEFAULT_STEP_EXECUTOR_HUMAN_CHAT_MESSAGE_TEMPLATE } from './prompt'; export const planAndExecuteAgentProperties: INodeProperties[] = [ diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts index d2dc152ebb..379475f923 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts @@ -10,10 +10,11 @@ import { NodeOperationError, } from 'n8n-workflow'; -import { getConnectedTools, getPromptInputByType } from '../../../../../utils/helpers'; -import { getOptionalOutputParsers } from '../../../../../utils/output_parsers/N8nOutputParser'; -import { throwIfToolSchema } from '../../../../../utils/schemaParsing'; -import { getTracingConfig } from '../../../../../utils/tracing'; +import { getConnectedTools, getPromptInputByType } from '@utils/helpers'; +import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; +import { throwIfToolSchema } from '@utils/schemaParsing'; +import { getTracingConfig } from '@utils/tracing'; + import { checkForStructuredTools, extractParsedOutput } from '../utils'; export async function planAndExecuteAgentExecute( diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/description.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/description.ts index f95026f7a4..890d83dc4a 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/description.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/description.ts @@ -1,4 +1,5 @@ import type { INodeProperties } from 'n8n-workflow'; + import { HUMAN_MESSAGE_TEMPLATE, PREFIX, SUFFIX, SUFFIX_CHAT } from './prompt'; export const reActAgentAgentProperties: INodeProperties[] = [ diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts index b671a8189c..4db35634d6 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts @@ -11,14 +11,11 @@ import { NodeOperationError, } from 'n8n-workflow'; -import { - getConnectedTools, - getPromptInputByType, - isChatInstance, -} from '../../../../../utils/helpers'; -import { getOptionalOutputParsers } from '../../../../../utils/output_parsers/N8nOutputParser'; -import { throwIfToolSchema } from '../../../../../utils/schemaParsing'; -import { getTracingConfig } from '../../../../../utils/tracing'; +import { getConnectedTools, getPromptInputByType, isChatInstance } from '@utils/helpers'; +import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; +import { throwIfToolSchema } from '@utils/schemaParsing'; +import { getTracingConfig } from '@utils/tracing'; + import { checkForStructuredTools, extractParsedOutput } from '../utils'; export async function reActAgentAgentExecute( diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/description.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/description.ts index bed547ba6d..919f501d17 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/description.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/description.ts @@ -1,11 +1,8 @@ import type { INodeProperties } from 'n8n-workflow'; +import { promptTypeOptions, textFromPreviousNode, textInput } from '@utils/descriptions'; + import { SQL_PREFIX, SQL_SUFFIX } from './other/prompts'; -import { - promptTypeOptions, - textFromPreviousNode, - textInput, -} from '../../../../../utils/descriptions'; const dataSourceOptions: INodeProperties = { displayName: 'Data Source', diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts index b9c0f3db8e..369ca109af 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts @@ -12,12 +12,13 @@ import { type IDataObject, } from 'n8n-workflow'; +import { getPromptInputByType, serializeChatHistory } from '@utils/helpers'; +import { getTracingConfig } from '@utils/tracing'; + import { getMysqlDataSource } from './other/handlers/mysql'; import { getPostgresDataSource } from './other/handlers/postgres'; import { getSqliteDataSource } from './other/handlers/sqlite'; import { SQL_PREFIX, SQL_SUFFIX } from './other/prompts'; -import { getPromptInputByType, serializeChatHistory } from '../../../../../utils/helpers'; -import { getTracingConfig } from '../../../../../utils/tracing'; const parseTablesString = (tablesString: string) => tablesString diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.ts index ea1b360f04..dd56f93d6c 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.ts @@ -1,5 +1,5 @@ -import { type IExecuteFunctions } from 'n8n-workflow'; import { DataSource } from '@n8n/typeorm'; +import { type IExecuteFunctions } from 'n8n-workflow'; export async function getMysqlDataSource(this: IExecuteFunctions): Promise { const credentials = await this.getCredentials('mySql'); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.ts index 6971d9119f..31dda9ed72 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.ts @@ -1,5 +1,5 @@ -import { type IExecuteFunctions } from 'n8n-workflow'; import { DataSource } from '@n8n/typeorm'; +import { type IExecuteFunctions } from 'n8n-workflow'; export async function getPostgresDataSource(this: IExecuteFunctions): Promise { const credentials = await this.getCredentials('postgres'); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.ts index 9240feb280..31db7b0dca 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.ts @@ -1,9 +1,9 @@ +import { DataSource } from '@n8n/typeorm'; import * as fs from 'fs'; import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import { BINARY_ENCODING, NodeOperationError } from 'n8n-workflow'; -import * as temp from 'temp'; import * as sqlite3 from 'sqlite3'; -import { DataSource } from '@n8n/typeorm'; +import * as temp from 'temp'; export async function getSqliteDataSource( this: IExecuteFunctions, diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/description.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/description.ts index cb33560cc6..06b64a91de 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/description.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/description.ts @@ -1,4 +1,5 @@ import type { INodeProperties } from 'n8n-workflow'; + import { SYSTEM_MESSAGE } from './prompt'; export const toolsAgentProperties: INodeProperties[] = [ diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts index 74d6819961..b0e36d0d8b 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts @@ -14,16 +14,13 @@ import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import type { ZodObject } from 'zod'; import { z } from 'zod'; -import { SYSTEM_MESSAGE } from './prompt'; -import { - isChatInstance, - getPromptInputByType, - getConnectedTools, -} from '../../../../../utils/helpers'; +import { isChatInstance, getPromptInputByType, getConnectedTools } from '@utils/helpers'; import { getOptionalOutputParsers, type N8nOutputParser, -} from '../../../../../utils/output_parsers/N8nOutputParser'; +} from '@utils/output_parsers/N8nOutputParser'; + +import { SYSTEM_MESSAGE } from './prompt'; function getOutputParserSchema(outputParser: N8nOutputParser): ZodObject { const schema = @@ -33,7 +30,7 @@ function getOutputParserSchema(outputParser: N8nOutputParser): ZodObject data.mimeType.startsWith('image/')) @@ -260,7 +257,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise; diff --git a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts index ca42323fc3..e44ad8f9d2 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts @@ -10,9 +10,10 @@ import type { } from 'n8n-workflow'; import { OpenAI as OpenAIClient } from 'openai'; +import { getConnectedTools } from '@utils/helpers'; +import { getTracingConfig } from '@utils/tracing'; + import { formatToOpenAIAssistantTool } from './utils'; -import { getConnectedTools } from '../../../utils/helpers'; -import { getTracingConfig } from '../../../utils/tracing'; export class OpenAiAssistant implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/utils.ts b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/utils.ts index 294fc47847..d0db590bc3 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/utils.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/utils.ts @@ -1,6 +1,6 @@ -import { zodToJsonSchema } from 'zod-to-json-schema'; -import type { OpenAIClient } from '@langchain/openai'; import type { StructuredTool } from '@langchain/core/tools'; +import type { OpenAIClient } from '@langchain/openai'; +import { zodToJsonSchema } from 'zod-to-json-schema'; // Copied from langchain(`langchain/src/tools/convert_to_openai.ts`) // since these functions are not exported diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts index 32d70f2d32..d4e205ec88 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts @@ -27,16 +27,17 @@ import { NodeOperationError, } from 'n8n-workflow'; -import { getPromptInputByType, isChatInstance } from '../../../utils/helpers'; -import type { N8nOutputParser } from '../../../utils/output_parsers/N8nOutputParser'; -import { getOptionalOutputParsers } from '../../../utils/output_parsers/N8nOutputParser'; -import { getTemplateNoticeField } from '../../../utils/sharedFields'; -import { getTracingConfig } from '../../../utils/tracing'; +import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions'; +import { getPromptInputByType, isChatInstance } from '@utils/helpers'; +import type { N8nOutputParser } from '@utils/output_parsers/N8nOutputParser'; +import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; +import { getTemplateNoticeField } from '@utils/sharedFields'; +import { getTracingConfig } from '@utils/tracing'; + import { getCustomErrorMessage as getCustomOpenAiErrorMessage, isOpenAiError, } from '../../vendors/OpenAi/helpers/error-handling'; -import { promptTypeOptions, textFromPreviousNode } from '../../../utils/descriptions'; interface MessagesTemplate { type: string; diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts index 75f7458438..9c7c739701 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts @@ -16,10 +16,10 @@ import { NodeOperationError, } from 'n8n-workflow'; -import { promptTypeOptions, textFromPreviousNode } from '../../../utils/descriptions'; -import { getPromptInputByType, isChatInstance } from '../../../utils/helpers'; -import { getTemplateNoticeField } from '../../../utils/sharedFields'; -import { getTracingConfig } from '../../../utils/tracing'; +import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions'; +import { getPromptInputByType, isChatInstance } from '@utils/helpers'; +import { getTemplateNoticeField } from '@utils/sharedFields'; +import { getTracingConfig } from '@utils/tracing'; const SYSTEM_PROMPT_TEMPLATE = `Use the following pieces of context to answer the users question. If you don't know the answer, just say that you don't know, don't try to make up an answer. diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.ts index 3e7b6997d6..fedf979082 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.ts @@ -1,3 +1,8 @@ +import type { Document } from '@langchain/core/documents'; +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import { PromptTemplate } from '@langchain/core/prompts'; +import type { SummarizationChainParams } from 'langchain/chains'; +import { loadSummarizationChain } from 'langchain/chains'; import { NodeConnectionType, type INodeTypeBaseDescription, @@ -7,14 +12,10 @@ import { type INodeTypeDescription, } from 'n8n-workflow'; -import type { SummarizationChainParams } from 'langchain/chains'; -import { loadSummarizationChain } from 'langchain/chains'; -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import type { Document } from '@langchain/core/documents'; -import { PromptTemplate } from '@langchain/core/prompts'; -import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader'; -import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader'; -import { getTemplateNoticeField } from '../../../../utils/sharedFields'; +import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; +import { N8nJsonLoader } from '@utils/N8nJsonLoader'; +import { getTemplateNoticeField } from '@utils/sharedFields'; + import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt'; export class ChainSummarizationV1 implements INodeType { diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts index 76964e99f1..ff6dadde59 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts @@ -1,4 +1,8 @@ -import { NodeConnectionType } from 'n8n-workflow'; +import type { Document } from '@langchain/core/documents'; +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import type { TextSplitter } from '@langchain/textsplitters'; +import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'; +import { loadSummarizationChain } from 'langchain/chains'; import type { INodeTypeBaseDescription, IExecuteFunctions, @@ -7,18 +11,15 @@ import type { INodeTypeDescription, IDataObject, } from 'n8n-workflow'; +import { NodeConnectionType } from 'n8n-workflow'; + +import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; +import { N8nJsonLoader } from '@utils/N8nJsonLoader'; +import { getTemplateNoticeField } from '@utils/sharedFields'; +import { getTracingConfig } from '@utils/tracing'; -import { loadSummarizationChain } from 'langchain/chains'; -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import type { Document } from '@langchain/core/documents'; -import type { TextSplitter } from '@langchain/textsplitters'; -import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'; -import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader'; -import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader'; -import { getTemplateNoticeField } from '../../../../utils/sharedFields'; -import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt'; import { getChainPromptsArgs } from '../helpers'; -import { getTracingConfig } from '../../../../utils/tracing'; +import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt'; function getInputs(parameters: IDataObject) { const chunkingMode = parameters?.chunkingMode; diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/helpers.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/helpers.ts index 2a00d836e8..2da507ed00 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/helpers.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/helpers.ts @@ -1,5 +1,5 @@ -import type { SummarizationChainParams } from 'langchain/chains'; import { PromptTemplate } from '@langchain/core/prompts'; +import type { SummarizationChainParams } from 'langchain/chains'; interface ChainTypeOptions { combineMapPrompt?: string; prompt?: string; diff --git a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts index ab6cd8f201..365a35ddd3 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts @@ -13,15 +13,12 @@ import type { } from 'n8n-workflow'; import type { z } from 'zod'; +import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@utils/descriptions'; +import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; +import { getTracingConfig } from '@utils/tracing'; + import { makeZodSchemaFromAttributes } from './helpers'; import type { AttributeDefinition } from './types'; -import { - inputSchemaField, - jsonSchemaExampleField, - schemaTypeField, -} from '../../../utils/descriptions'; -import { convertJsonSchemaToZod, generateSchema } from '../../../utils/schemaParsing'; -import { getTracingConfig } from '../../../utils/tracing'; const SYSTEM_PROMPT_TEMPLATE = `You are an expert extraction algorithm. Only extract relevant information from the text. diff --git a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/helpers.ts b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/helpers.ts index acde52765e..22f39f25a9 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/helpers.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/helpers.ts @@ -1,4 +1,5 @@ import { z } from 'zod'; + import type { AttributeDefinition } from './types'; function makeAttributeSchema(attributeDefinition: AttributeDefinition, required: boolean = true) { diff --git a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/test/InformationExtraction.node.test.ts b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/test/InformationExtraction.node.test.ts index b4e4672dac..725af39a60 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/test/InformationExtraction.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/test/InformationExtraction.node.test.ts @@ -1,10 +1,10 @@ -import type { IDataObject, IExecuteFunctions } from 'n8n-workflow/src'; -import get from 'lodash/get'; - -import { FakeLLM, FakeListChatModel } from '@langchain/core/utils/testing'; import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import { InformationExtractor } from '../InformationExtractor.node'; +import { FakeLLM, FakeListChatModel } from '@langchain/core/utils/testing'; +import get from 'lodash/get'; +import type { IDataObject, IExecuteFunctions } from 'n8n-workflow/src'; + import { makeZodSchemaFromAttributes } from '../helpers'; +import { InformationExtractor } from '../InformationExtractor.node'; import type { AttributeDefinition } from '../types'; const mockPersonAttributes: AttributeDefinition[] = [ diff --git a/packages/@n8n/nodes-langchain/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts index 5f5c6f19db..e810b0f98a 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts @@ -1,3 +1,8 @@ +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import { HumanMessage } from '@langchain/core/messages'; +import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts'; +import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; +import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { IDataObject, IExecuteFunctions, @@ -6,15 +11,9 @@ import type { INodeType, INodeTypeDescription, } from 'n8n-workflow'; - -import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; - -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import { HumanMessage } from '@langchain/core/messages'; -import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts'; -import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; import { z } from 'zod'; -import { getTracingConfig } from '../../../utils/tracing'; + +import { getTracingConfig } from '@utils/tracing'; const DEFAULT_SYSTEM_PROMPT_TEMPLATE = 'You are highly intelligent and accurate sentiment analyzer. Analyze the sentiment of the provided text. Categorize it into one of the following: {categories}. Use the provided formatting instructions. Only output the JSON.'; diff --git a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts index 7afc317c37..298c41572d 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts @@ -13,7 +13,7 @@ import type { } from 'n8n-workflow'; import { z } from 'zod'; -import { getTracingConfig } from '../../../utils/tracing'; +import { getTracingConfig } from '@utils/tracing'; const SYSTEM_PROMPT_TEMPLATE = "Please classify the text provided by the user into one of the following categories: {categories}, and use the provided formatting instructions below. Don't explain, and only output the json."; diff --git a/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts b/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts index d73d6d3268..dda3f24414 100644 --- a/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts @@ -1,4 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { Tool } from '@langchain/core/tools'; +import { makeResolverFromLegacyOptions } from '@n8n/vm2'; +import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox'; +import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; +import { standardizeOutput } from 'n8n-nodes-base/dist/nodes/Code/utils'; import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; import type { IExecuteFunctions, @@ -12,12 +17,7 @@ import type { // TODO: Add support for execute function. Got already started but got commented out -import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; -import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox'; -import { standardizeOutput } from 'n8n-nodes-base/dist/nodes/Code/utils'; -import type { Tool } from '@langchain/core/tools'; -import { makeResolverFromLegacyOptions } from '@n8n/vm2'; -import { logWrapper } from '../../utils/logWrapper'; +import { logWrapper } from '@utils/logWrapper'; const { NODE_FUNCTION_ALLOW_BUILTIN: builtIn, NODE_FUNCTION_ALLOW_EXTERNAL: external } = process.env; diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.ts index 2e68db4e69..5c9ebf08b0 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { TextSplitter } from '@langchain/textsplitters'; import { NodeConnectionType, type INodeType, @@ -7,11 +8,9 @@ import { type SupplyData, } from 'n8n-workflow'; -import type { TextSplitter } from '@langchain/textsplitters'; - -import { logWrapper } from '../../../utils/logWrapper'; -import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; -import { getConnectionHintNoticeField, metadataFilterField } from '../../../utils/sharedFields'; +import { logWrapper } from '@utils/logWrapper'; +import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; +import { getConnectionHintNoticeField, metadataFilterField } from '@utils/sharedFields'; // Dependencies needed underneath the hood for the loaders. We add them // here only to track where what dependency is sued diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.ts index 5e6457951e..1f5ad6228a 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { TextSplitter } from '@langchain/textsplitters'; import { NodeConnectionType, type INodeType, @@ -7,10 +8,9 @@ import { type SupplyData, } from 'n8n-workflow'; -import type { TextSplitter } from '@langchain/textsplitters'; -import { logWrapper } from '../../../utils/logWrapper'; -import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; -import { metadataFilterField } from '../../../utils/sharedFields'; +import { logWrapper } from '@utils/logWrapper'; +import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; +import { metadataFilterField } from '@utils/sharedFields'; // Dependencies needed underneath the hood for the loaders. We add them // here only to track where what dependency is sued @@ -18,7 +18,7 @@ import { metadataFilterField } from '../../../utils/sharedFields'; import 'mammoth'; // for docx import 'epub2'; // for epub import 'pdf-parse'; // for pdf -import { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; +import { N8nJsonLoader } from '@utils/N8nJsonLoader'; export class DocumentDefaultDataLoader implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts index 71a77f013c..7d63e32f0b 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts @@ -1,4 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { GithubRepoLoader } from '@langchain/community/document_loaders/web/github'; +import type { CharacterTextSplitter } from '@langchain/textsplitters'; import { NodeConnectionType, type INodeType, @@ -6,10 +8,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { GithubRepoLoader } from '@langchain/community/document_loaders/web/github'; -import type { CharacterTextSplitter } from '@langchain/textsplitters'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class DocumentGithubLoader implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.ts index 2e8cb95a11..9c295ba144 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { TextSplitter } from '@langchain/textsplitters'; import { NodeConnectionType, type INodeType, @@ -7,10 +8,9 @@ import { type SupplyData, } from 'n8n-workflow'; -import type { TextSplitter } from '@langchain/textsplitters'; -import { logWrapper } from '../../../utils/logWrapper'; -import { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; -import { getConnectionHintNoticeField, metadataFilterField } from '../../../utils/sharedFields'; +import { logWrapper } from '@utils/logWrapper'; +import { N8nJsonLoader } from '@utils/N8nJsonLoader'; +import { getConnectionHintNoticeField, metadataFilterField } from '@utils/sharedFields'; export class DocumentJsonInputLoader implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.ts index 6e0782f1c1..fdb2da5ce0 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.ts @@ -8,8 +8,8 @@ import { type SupplyData, } from 'n8n-workflow'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class EmbeddingsAwsBedrock implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts index bf101292f2..65f493d578 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { OpenAIEmbeddings } from '@langchain/openai'; import { NodeConnectionType, type INodeType, @@ -7,9 +8,8 @@ import { type SupplyData, } from 'n8n-workflow'; -import { OpenAIEmbeddings } from '@langchain/openai'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class EmbeddingsAzureOpenAi implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.ts index 26e5d39b70..ebab22ec55 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { CohereEmbeddings } from '@langchain/cohere'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { CohereEmbeddings } from '@langchain/cohere'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class EmbeddingsCohere implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.ts index 2a455e4574..949d6ee24e 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { GoogleGenerativeAIEmbeddings } from '@langchain/google-genai'; import { NodeConnectionType, type INodeType, @@ -6,10 +7,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { GoogleGenerativeAIEmbeddings } from '@langchain/google-genai'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class EmbeddingsGoogleGemini implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts index c8317630c3..c8023354ef 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { HuggingFaceInferenceEmbeddings } from '@langchain/community/embeddings/hf'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { HuggingFaceInferenceEmbeddings } from '@langchain/community/embeddings/hf'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class EmbeddingsHuggingFaceInference implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts index dbfb93b82e..553abfa406 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts @@ -1,4 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { MistralAIEmbeddingsParams } from '@langchain/mistralai'; +import { MistralAIEmbeddings } from '@langchain/mistralai'; import { NodeConnectionType, type INodeType, @@ -6,10 +8,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import type { MistralAIEmbeddingsParams } from '@langchain/mistralai'; -import { MistralAIEmbeddings } from '@langchain/mistralai'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class EmbeddingsMistralCloud implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts index d84aa537ec..08feb90309 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { OllamaEmbeddings } from '@langchain/ollama'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,10 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { OllamaEmbeddings } from '@langchain/ollama'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { ollamaDescription, ollamaModel } from '../../llms/LMOllama/description'; export class EmbeddingsOllama implements INodeType { diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts index aececc09ae..fb5f67e30c 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { OpenAIEmbeddings } from '@langchain/openai'; import { NodeConnectionType, type INodeType, @@ -7,11 +8,10 @@ import { type ISupplyDataFunctions, type INodeProperties, } from 'n8n-workflow'; - import type { ClientOptions } from 'openai'; -import { OpenAIEmbeddings } from '@langchain/openai'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; const modelParameter: INodeProperties = { displayName: 'Model', diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts index c575b59aa8..3a38ce3a31 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts @@ -12,9 +12,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; const modelField: INodeProperties = { displayName: 'Model', diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOllama/LmChatOllama.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOllama/LmChatOllama.node.ts index 354e54e27a..d4685fa802 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOllama/LmChatOllama.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOllama/LmChatOllama.node.ts @@ -1,4 +1,7 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ + +import type { ChatOllamaInput } from '@langchain/ollama'; +import { ChatOllama } from '@langchain/ollama'; import { NodeConnectionType, type INodeType, @@ -7,12 +10,11 @@ import { type SupplyData, } from 'n8n-workflow'; -import type { ChatOllamaInput } from '@langchain/ollama'; -import { ChatOllama } from '@langchain/ollama'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description'; -import { N8nLlmTracing } from '../N8nLlmTracing'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmChatOllama implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts index 2e55e56722..f3dceb5dd1 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts @@ -9,7 +9,8 @@ import { type SupplyData, } from 'n8n-workflow'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; import { N8nLlmTracing } from '../N8nLlmTracing'; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMCohere/LmCohere.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMCohere/LmCohere.node.ts index 4b5f85f915..6b9559104b 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMCohere/LmCohere.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMCohere/LmCohere.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { Cohere } from '@langchain/cohere'; import { NodeConnectionType, type INodeType, @@ -7,10 +8,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import { Cohere } from '@langchain/cohere'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmCohere implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/LmOllama.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/LmOllama.node.ts index ddd565e3a9..21a7a0c50f 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/LmOllama.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/LmOllama.node.ts @@ -1,4 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ + +import { Ollama } from '@langchain/community/llms/ollama'; import { NodeConnectionType, type INodeType, @@ -7,11 +9,11 @@ import { type SupplyData, } from 'n8n-workflow'; -import { Ollama } from '@langchain/community/llms/ollama'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { ollamaDescription, ollamaModel, ollamaOptions } from './description'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmOllama implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts index 41cb622294..1a64f07cca 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { OpenAI, type ClientOptions } from '@langchain/openai'; import { NodeConnectionType } from 'n8n-workflow'; import type { INodeType, @@ -8,9 +9,8 @@ import type { ILoadOptionsFunctions, } from 'n8n-workflow'; -import { OpenAI, type ClientOptions } from '@langchain/openai'; -import { N8nLlmTracing } from '../N8nLlmTracing'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; type LmOpenAiOptions = { baseURL?: string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts index 7823c91b52..e393d86f8a 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { HuggingFaceInference } from '@langchain/community/llms/hf'; import { NodeConnectionType, type INodeType, @@ -7,10 +8,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import { HuggingFaceInference } from '@langchain/community/llms/hf'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmOpenHuggingFaceInference implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts index 3d928ce801..ef15a531cf 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts @@ -8,9 +8,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmChatAwsBedrock implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts index e2292abc77..5fc562153d 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { ChatOpenAI } from '@langchain/openai'; import { NodeConnectionType, type INodeType, @@ -7,10 +8,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import { ChatOpenAI } from '@langchain/openai'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmChatAzureOpenAi implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts index 9bade1e26a..f8b7d2bb3e 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts @@ -1,4 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { SafetySetting } from '@google/generative-ai'; +import { ChatGoogleGenerativeAI } from '@langchain/google-genai'; import { NodeConnectionType, type INodeType, @@ -6,12 +8,12 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { ChatGoogleGenerativeAI } from '@langchain/google-genai'; -import type { SafetySetting } from '@google/generative-ai'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; + +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { additionalOptions } from '../gemini-common/additional-options'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmChatGoogleGemini implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.ts index 92b51e534f..5ca6091378 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.ts @@ -1,4 +1,8 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { SafetySetting } from '@google/generative-ai'; +import { ProjectsClient } from '@google-cloud/resource-manager'; +import { ChatVertexAI } from '@langchain/google-vertexai'; +import { formatPrivateKey } from 'n8n-nodes-base/dist/utils/utilities'; import { NodeConnectionType, type INodeType, @@ -9,15 +13,13 @@ import { type JsonObject, NodeOperationError, } from 'n8n-workflow'; -import { ChatVertexAI } from '@langchain/google-vertexai'; -import type { SafetySetting } from '@google/generative-ai'; -import { ProjectsClient } from '@google-cloud/resource-manager'; -import { formatPrivateKey } from 'n8n-nodes-base/dist/utils/utilities'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; -import { additionalOptions } from '../gemini-common/additional-options'; + +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { makeErrorFromStatus } from './error-handling'; +import { additionalOptions } from '../gemini-common/additional-options'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmChatGoogleVertex implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts index 1494dbcf55..fb859e3fce 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { ChatGroq } from '@langchain/groq'; import { NodeConnectionType, type INodeType, @@ -7,10 +8,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import { ChatGroq } from '@langchain/groq'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmChatGroq implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.ts index edd533b6ba..a23c2d4e9f 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.ts @@ -1,4 +1,7 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ + +import type { ChatMistralAIInput } from '@langchain/mistralai'; +import { ChatMistralAI } from '@langchain/mistralai'; import { NodeConnectionType, type INodeType, @@ -7,11 +10,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import type { ChatMistralAIInput } from '@langchain/mistralai'; -import { ChatMistralAI } from '@langchain/mistralai'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { N8nLlmTracing } from '../N8nLlmTracing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; +import { N8nLlmTracing } from '../N8nLlmTracing'; export class LmChatMistralCloud implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts index af60b72982..3d426309b7 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts @@ -9,11 +9,11 @@ import type { import type { BaseMessage } from '@langchain/core/messages'; import type { LLMResult } from '@langchain/core/outputs'; import { encodingForModel } from '@langchain/core/utils/tiktoken'; -import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow'; import { pick } from 'lodash'; +import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow'; import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow'; -import { logAiEvent } from '../../utils/helpers'; +import { logAiEvent } from '@utils/helpers'; type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => { completionTokens: number; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/gemini-common/additional-options.ts b/packages/@n8n/nodes-langchain/nodes/llms/gemini-common/additional-options.ts index 3fc1900b8a..f154b676c5 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/gemini-common/additional-options.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/gemini-common/additional-options.ts @@ -1,5 +1,6 @@ import type { HarmBlockThreshold, HarmCategory } from '@google/generative-ai'; import type { INodeProperties } from 'n8n-workflow'; + import { harmCategories, harmThresholds } from './safety-options'; export const additionalOptions: INodeProperties = { diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.ts index 28025f2884..480bed68f9 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.ts @@ -9,9 +9,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import { getSessionId } from '../../../utils/helpers'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { getSessionId } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { sessionIdOption, sessionKeyProperty, diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryChatRetriever/MemoryChatRetriever.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryChatRetriever/MemoryChatRetriever.node.ts index f2bb0f43b0..fa54f25a16 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryChatRetriever/MemoryChatRetriever.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryChatRetriever/MemoryChatRetriever.node.ts @@ -1,4 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; +import type { BaseMessage } from '@langchain/core/messages'; import { NodeConnectionType, type IDataObject, @@ -7,8 +9,6 @@ import { type INodeType, type INodeTypeDescription, } from 'n8n-workflow'; -import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; -import type { BaseMessage } from '@langchain/core/messages'; function simplifyMessages(messages: BaseMessage[]) { const chunkedMessages = []; diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryManager/MemoryManager.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryManager/MemoryManager.node.ts index 04d6035e7f..964da65475 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryManager/MemoryManager.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryManager/MemoryManager.node.ts @@ -1,4 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; +import { AIMessage, SystemMessage, HumanMessage, type BaseMessage } from '@langchain/core/messages'; import { NodeConnectionType } from 'n8n-workflow'; import type { IDataObject, @@ -7,8 +9,6 @@ import type { INodeType, INodeTypeDescription, } from 'n8n-workflow'; -import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; -import { AIMessage, SystemMessage, HumanMessage, type BaseMessage } from '@langchain/core/messages'; type MessageRole = 'ai' | 'system' | 'user'; interface MessageRecord { diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.ts index a326f4c1be..f5184d7e93 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.ts @@ -8,9 +8,10 @@ import { type SupplyData, } from 'n8n-workflow'; -import { getSessionId } from '../../../utils/helpers'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { getSessionId } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { expressionSessionKeyProperty, sessionIdOption, sessionKeyProperty } from '../descriptions'; export class MemoryMotorhead implements INodeType { diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.ts index b3d5f2f409..18fd76e3c5 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.ts @@ -13,9 +13,10 @@ import type { import { NodeConnectionType } from 'n8n-workflow'; import type pg from 'pg'; -import { getSessionId } from '../../../utils/helpers'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { getSessionId } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { sessionIdOption, sessionKeyProperty, diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.ts index 09208d96f7..ab7d02e2c4 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.ts @@ -13,9 +13,10 @@ import { import type { RedisClientOptions } from 'redis'; import { createClient } from 'redis'; -import { getSessionId } from '../../../utils/helpers'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { getSessionId } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { sessionIdOption, sessionKeyProperty, diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryXata/MemoryXata.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryXata/MemoryXata.node.ts index c48f32976b..c1ad7b9539 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryXata/MemoryXata.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryXata/MemoryXata.node.ts @@ -10,9 +10,10 @@ import type { SupplyData, } from 'n8n-workflow'; -import { getSessionId } from '../../../utils/helpers'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { getSessionId } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { sessionIdOption, sessionKeyProperty, diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/MemoryZep.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/MemoryZep.node.ts index 3c9ab307e2..1943f41c03 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/MemoryZep.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/MemoryZep.node.ts @@ -1,4 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { BaseChatMemory } from '@langchain/community/dist/memory/chat_memory'; +import { ZepMemory } from '@langchain/community/memory/zep'; +import { ZepCloudMemory } from '@langchain/community/memory/zep_cloud'; +import type { InputValues, MemoryVariables } from '@langchain/core/memory'; +import type { BaseMessage } from '@langchain/core/messages'; import { NodeConnectionType, type ISupplyDataFunctions, @@ -7,16 +12,12 @@ import { type SupplyData, NodeOperationError, } from 'n8n-workflow'; -import { ZepMemory } from '@langchain/community/memory/zep'; -import { ZepCloudMemory } from '@langchain/community/memory/zep_cloud'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { getSessionId } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { expressionSessionKeyProperty, sessionIdOption, sessionKeyProperty } from '../descriptions'; -import { getSessionId } from '../../../utils/helpers'; -import type { BaseChatMemory } from '@langchain/community/dist/memory/chat_memory'; -import type { InputValues, MemoryVariables } from '@langchain/core/memory'; -import type { BaseMessage } from '@langchain/core/messages'; // Extend ZepCloudMemory to trim white space in messages. class WhiteSpaceTrimmedZepCloudMemory extends ZepCloudMemory { diff --git a/packages/@n8n/nodes-langchain/nodes/memory/descriptions.ts b/packages/@n8n/nodes-langchain/nodes/memory/descriptions.ts index 4627671a9b..fd0a022015 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/descriptions.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/descriptions.ts @@ -6,10 +6,10 @@ export const sessionIdOption: INodeProperties = { type: 'options', options: [ { - // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased - name: 'Take from previous node automatically', + name: 'Connected Chat Trigger Node', value: 'fromInput', - description: 'Looks for an input field called sessionId', + description: + "Looks for an input field called 'sessionId' that is coming from a directly connected Chat Trigger", }, { // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts index 4f385e1770..0ccf4c27c0 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts @@ -8,12 +8,13 @@ import type { SupplyData, } from 'n8n-workflow'; -import { NAIVE_FIX_PROMPT } from './prompt'; import { N8nOutputFixingParser, type N8nStructuredOutputParser, -} from '../../../utils/output_parsers/N8nOutputParser'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +} from '@utils/output_parsers/N8nOutputParser'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + +import { NAIVE_FIX_PROMPT } from './prompt'; export class OutputParserAutofixing implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts index 9fcae1a8fa..45f054a34b 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/test/OutputParserAutofixing.node.test.ts @@ -11,7 +11,8 @@ import { ApplicationError, NodeConnectionType, NodeOperationError } from 'n8n-wo import type { N8nOutputFixingParser, N8nStructuredOutputParser, -} from '../../../../utils/output_parsers/N8nOutputParser'; +} from '@utils/output_parsers/N8nOutputParser'; + import { OutputParserAutofixing } from '../OutputParserAutofixing.node'; import { NAIVE_FIX_PROMPT } from '../prompt'; diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts index b613c14775..696a6be79c 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts @@ -7,8 +7,8 @@ import { type SupplyData, } from 'n8n-workflow'; -import { N8nItemListOutputParser } from '../../../utils/output_parsers/N8nItemListOutputParser'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { N8nItemListOutputParser } from '@utils/output_parsers/N8nItemListOutputParser'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class OutputParserItemList implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/test/OutputParserItemList.node.test.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/test/OutputParserItemList.node.test.ts index c8ac869169..ae31e88353 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/test/OutputParserItemList.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/test/OutputParserItemList.node.test.ts @@ -6,7 +6,8 @@ import { type IWorkflowDataProxyData, } from 'n8n-workflow'; -import { N8nItemListOutputParser } from '../../../../utils/output_parsers/N8nItemListOutputParser'; +import { N8nItemListOutputParser } from '@utils/output_parsers/N8nItemListOutputParser'; + import { OutputParserItemList } from '../OutputParserItemList.node'; describe('OutputParserItemList', () => { diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts index c35cb1d145..8da4cb05d8 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts @@ -10,14 +10,10 @@ import { } from 'n8n-workflow'; import type { z } from 'zod'; -import { - inputSchemaField, - jsonSchemaExampleField, - schemaTypeField, -} from '../../../utils/descriptions'; -import { N8nStructuredOutputParser } from '../../../utils/output_parsers/N8nOutputParser'; -import { convertJsonSchemaToZod, generateSchema } from '../../../utils/schemaParsing'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@utils/descriptions'; +import { N8nStructuredOutputParser } from '@utils/output_parsers/N8nOutputParser'; +import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class OutputParserStructured implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/test/OutputParserStructured.node.test.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/test/OutputParserStructured.node.test.ts index af72c49d7e..e07b012ec6 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/test/OutputParserStructured.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/test/OutputParserStructured.node.test.ts @@ -7,7 +7,8 @@ import { type IWorkflowDataProxyData, } from 'n8n-workflow'; -import type { N8nStructuredOutputParser } from '../../../../utils/output_parsers/N8nStructuredOutputParser'; +import type { N8nStructuredOutputParser } from '@utils/output_parsers/N8nStructuredOutputParser'; + import { OutputParserStructured } from '../OutputParserStructured.node'; describe('OutputParserStructured', () => { diff --git a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.ts b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.ts index 8017caa1ad..74db608551 100644 --- a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.ts @@ -1,4 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ + +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import type { BaseRetriever } from '@langchain/core/retrievers'; +import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression'; +import { LLMChainExtractor } from 'langchain/retrievers/document_compressors/chain_extract'; import { NodeConnectionType, type INodeType, @@ -7,12 +12,7 @@ import { type SupplyData, } from 'n8n-workflow'; -import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression'; -import { LLMChainExtractor } from 'langchain/retrievers/document_compressors/chain_extract'; -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import type { BaseRetriever } from '@langchain/core/retrievers'; - -import { logWrapper } from '../../../utils/logWrapper'; +import { logWrapper } from '@utils/logWrapper'; export class RetrieverContextualCompression implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.ts b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.ts index f814ba875e..3805eb5374 100644 --- a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.ts @@ -1,4 +1,8 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ + +import type { BaseLanguageModel } from '@langchain/core/language_models/base'; +import type { BaseRetriever } from '@langchain/core/retrievers'; +import { MultiQueryRetriever } from 'langchain/retrievers/multi_query'; import { NodeConnectionType, type INodeType, @@ -7,11 +11,7 @@ import { type SupplyData, } from 'n8n-workflow'; -import { MultiQueryRetriever } from 'langchain/retrievers/multi_query'; -import type { BaseLanguageModel } from '@langchain/core/language_models/base'; -import type { BaseRetriever } from '@langchain/core/retrievers'; - -import { logWrapper } from '../../../utils/logWrapper'; +import { logWrapper } from '@utils/logWrapper'; export class RetrieverMultiQuery implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.ts b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.ts index 5e79a6a754..74f88e5561 100644 --- a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { VectorStore } from '@langchain/core/vectorstores'; import { NodeConnectionType, type INodeType, @@ -6,8 +7,8 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import type { VectorStore } from '@langchain/core/vectorstores'; -import { logWrapper } from '../../../utils/logWrapper'; + +import { logWrapper } from '@utils/logWrapper'; export class RetrieverVectorStore implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.ts index bbee553999..5e9fecd47a 100644 --- a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.ts @@ -1,4 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { CallbackManagerForRetrieverRun } from '@langchain/core/callbacks/manager'; +import { Document } from '@langchain/core/documents'; +import { BaseRetriever, type BaseRetrieverInput } from '@langchain/core/retrievers'; +import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces'; +import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { IDataObject, @@ -13,13 +18,7 @@ import type { ExecuteWorkflowData, } from 'n8n-workflow'; -import { BaseRetriever, type BaseRetrieverInput } from '@langchain/core/retrievers'; -import { Document } from '@langchain/core/documents'; - -import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces'; -import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode'; -import type { CallbackManagerForRetrieverRun } from '@langchain/core/callbacks/manager'; -import { logWrapper } from '../../../utils/logWrapper'; +import { logWrapper } from '@utils/logWrapper'; function objectToString(obj: Record | IDataObject, level = 0) { let result = ''; diff --git a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.ts b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.ts index f62e8f01f1..c78bd39a6c 100644 --- a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.ts @@ -1,4 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { CharacterTextSplitterParams } from '@langchain/textsplitters'; +import { CharacterTextSplitter } from '@langchain/textsplitters'; import { NodeConnectionType, type INodeType, @@ -6,10 +8,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import type { CharacterTextSplitterParams } from '@langchain/textsplitters'; -import { CharacterTextSplitter } from '@langchain/textsplitters'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class TextSplitterCharacterTextSplitter implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.ts b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.ts index 21a0520766..cfe8a32757 100644 --- a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.ts @@ -1,4 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { + RecursiveCharacterTextSplitterParams, + SupportedTextSplitterLanguage, +} from '@langchain/textsplitters'; +import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'; import { NodeConnectionType, type INodeType, @@ -6,13 +11,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import type { - RecursiveCharacterTextSplitterParams, - SupportedTextSplitterLanguage, -} from '@langchain/textsplitters'; -import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; const supportedLanguages: SupportedTextSplitterLanguage[] = [ 'cpp', diff --git a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.ts b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.ts index 247d142fa8..cd881916d6 100644 --- a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { TokenTextSplitter } from '@langchain/textsplitters'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { TokenTextSplitter } from '@langchain/textsplitters'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class TextSplitterTokenSplitter implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCalculator/ToolCalculator.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCalculator/ToolCalculator.node.ts index f50a6216c0..b3ed23c576 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCalculator/ToolCalculator.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCalculator/ToolCalculator.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { Calculator } from '@langchain/community/tools/calculator'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { Calculator } from '@langchain/community/tools/calculator'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class ToolCalculator implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts index 1491662e61..214d4ed82a 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts @@ -15,14 +15,11 @@ import type { } from 'n8n-workflow'; import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@utils/descriptions'; +import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import type { DynamicZodObject } from '../../../types/zod.types'; -import { - inputSchemaField, - jsonSchemaExampleField, - schemaTypeField, -} from '../../../utils/descriptions'; -import { convertJsonSchemaToZod, generateSchema } from '../../../utils/schemaParsing'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; export class ToolCode implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts index f279c1e751..bfdd3e7ace 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { DynamicTool } from '@langchain/core/tools'; import type { INodeType, INodeTypeDescription, @@ -9,19 +10,8 @@ import type { } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, tryToParseAlphanumericString } from 'n8n-workflow'; -import { DynamicTool } from '@langchain/core/tools'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; - -import { N8nTool } from '../../../utils/N8nTool'; -import { - configureHttpRequestFunction, - configureResponseOptimizer, - extractParametersFromText, - prepareToolDescription, - configureToolFunction, - updateParametersAndOptions, - makeToolInputSchema, -} from './utils'; +import { N8nTool } from '@utils/N8nTool'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { authenticationProperties, @@ -31,8 +21,16 @@ import { placeholderDefinitionsCollection, specifyBySelector, } from './descriptions'; - import type { PlaceholderDefinition, ToolParameter } from './interfaces'; +import { + configureHttpRequestFunction, + configureResponseOptimizer, + extractParametersFromText, + prepareToolDescription, + configureToolFunction, + updateParametersAndOptions, + makeToolInputSchema, +} from './utils'; export class ToolHttpRequest implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts index 1a99896fff..05ed1e619c 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts @@ -2,7 +2,8 @@ import { mock } from 'jest-mock-extended'; import type { IExecuteFunctions, INode } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow'; -import type { N8nTool } from '../../../../utils/N8nTool'; +import type { N8nTool } from '@utils/N8nTool'; + import { ToolHttpRequest } from '../ToolHttpRequest.node'; describe('ToolHttpRequest', () => { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolSerpApi/ToolSerpApi.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolSerpApi/ToolSerpApi.node.ts index 709b06b7ac..7a7a09b933 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolSerpApi/ToolSerpApi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolSerpApi/ToolSerpApi.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { SerpAPI } from '@langchain/community/tools/serpapi'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { SerpAPI } from '@langchain/community/tools/serpapi'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class ToolSerpApi implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolVectorStore/ToolVectorStore.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolVectorStore/ToolVectorStore.node.ts index 0086681dca..d539afb0cf 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolVectorStore/ToolVectorStore.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolVectorStore/ToolVectorStore.node.ts @@ -10,8 +10,8 @@ import type { } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class ToolVectorStore implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWikipedia/ToolWikipedia.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWikipedia/ToolWikipedia.node.ts index e462e38feb..4eef3a1b45 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWikipedia/ToolWikipedia.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWikipedia/ToolWikipedia.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class ToolWikipedia implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.ts index 93290e63ad..162b78ba8e 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { WolframAlphaTool } from '@langchain/community/tools/wolframalpha'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,9 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import { WolframAlphaTool } from '@langchain/community/tools/wolframalpha'; -import { logWrapper } from '../../../utils/logWrapper'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +import { logWrapper } from '@utils/logWrapper'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; export class ToolWolframAlpha implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts index d85ff72271..6b09cbfc88 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts @@ -21,14 +21,11 @@ import type { } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; +import { jsonSchemaExampleField, schemaTypeField, inputSchemaField } from '@utils/descriptions'; +import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import type { DynamicZodObject } from '../../../types/zod.types'; -import { - jsonSchemaExampleField, - schemaTypeField, - inputSchemaField, -} from '../../../utils/descriptions'; -import { convertJsonSchemaToZod, generateSchema } from '../../../utils/schemaParsing'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; export class ToolWorkflow implements INodeType { description: INodeTypeDescription = { diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts index 489b4fe28b..27fb1bcd35 100644 --- a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/ChatTrigger.node.ts @@ -1,6 +1,6 @@ import type { BaseChatMemory } from '@langchain/community/memory/chat_memory'; import { pick } from 'lodash'; -import { Node, NodeConnectionType, commonCORSParameters } from 'n8n-workflow'; +import { Node, NodeConnectionType } from 'n8n-workflow'; import type { IDataObject, IWebhookFunctions, @@ -241,14 +241,19 @@ export class ChatTrigger extends Node { default: {}, options: [ // CORS parameters are only valid for when chat is used in hosted or webhook mode - ...commonCORSParameters.map((p) => ({ - ...p, + { + displayName: 'Allowed Origins (CORS)', + name: 'allowedOrigins', + type: 'string', + default: '*', + description: + 'Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.', displayOptions: { show: { '/mode': ['hostedChat', 'webhook'], }, }, - })), + }, { ...allowFileUploadsOption, displayOptions: { diff --git a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/GenericFunctions.ts b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/GenericFunctions.ts index 02620ab797..c065569eb2 100644 --- a/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/GenericFunctions.ts +++ b/packages/@n8n/nodes-langchain/nodes/trigger/ChatTrigger/GenericFunctions.ts @@ -1,5 +1,6 @@ -import type { ICredentialDataDecryptedObject, IWebhookFunctions } from 'n8n-workflow'; import basicAuth from 'basic-auth'; +import type { ICredentialDataDecryptedObject, IWebhookFunctions } from 'n8n-workflow'; + import { ChatTriggerAuthorizationError } from './error'; import type { AuthenticationChatOption } from './types'; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.ts index 5508f957f8..dc99db630d 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemory/VectorStoreInMemory.node.ts @@ -1,4 +1,5 @@ import type { INodeProperties } from 'n8n-workflow'; + import { createVectorStoreNode } from '../shared/createVectorStoreNode'; import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager'; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.ts index 225201a5e1..34d4815034 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryInsert/VectorStoreInMemoryInsert.node.ts @@ -1,4 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { Embeddings } from '@langchain/core/embeddings'; +import type { Document } from 'langchain/document'; import { NodeConnectionType, type INodeExecutionData, @@ -6,11 +8,11 @@ import { type INodeType, type INodeTypeDescription, } from 'n8n-workflow'; -import type { Document } from 'langchain/document'; -import type { Embeddings } from '@langchain/core/embeddings'; -import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; -import { processDocuments } from '../shared/processDocuments'; + +import type { N8nJsonLoader } from '@utils/N8nJsonLoader'; + import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager'; +import { processDocuments } from '../shared/processDocuments'; // This node is deprecated. Use VectorStoreInMemory instead. export class VectorStoreInMemoryInsert implements INodeType { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.ts index 7bf48c3d8c..dd2def31e3 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.ts @@ -1,4 +1,5 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import type { Embeddings } from '@langchain/core/embeddings'; import { NodeConnectionType, type INodeType, @@ -6,9 +7,10 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import type { Embeddings } from '@langchain/core/embeddings'; + +import { logWrapper } from '@utils/logWrapper'; + import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager'; -import { logWrapper } from '../../../utils/logWrapper'; // This node is deprecated. Use VectorStoreInMemory instead. export class VectorStoreInMemoryLoad implements INodeType { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts index 8336958cc5..6d5da1615b 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePGVector/VectorStorePGVector.node.ts @@ -9,7 +9,8 @@ import { configurePostgres } from 'n8n-nodes-base/dist/nodes/Postgres/v2/transpo import type { INodeProperties } from 'n8n-workflow'; import type pg from 'pg'; -import { metadataFilterField } from '../../../utils/sharedFields'; +import { metadataFilterField } from '@utils/sharedFields'; + import { createVectorStoreNode } from '../shared/createVectorStoreNode'; type CollectionOptions = { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.ts index d153979ef4..6e684ebed3 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePinecone/VectorStorePinecone.node.ts @@ -1,9 +1,11 @@ -import { NodeOperationError, type INodeProperties } from 'n8n-workflow'; import type { PineconeStoreParams } from '@langchain/pinecone'; import { PineconeStore } from '@langchain/pinecone'; import { Pinecone } from '@pinecone-database/pinecone'; +import { NodeOperationError, type INodeProperties } from 'n8n-workflow'; + +import { metadataFilterField } from '@utils/sharedFields'; + import { createVectorStoreNode } from '../shared/createVectorStoreNode'; -import { metadataFilterField } from '../../../utils/sharedFields'; import { pineconeIndexRLC } from '../shared/descriptions'; import { pineconeIndexSearch } from '../shared/methods/listSearch'; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeInsert/VectorStorePineconeInsert.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeInsert/VectorStorePineconeInsert.node.ts index 023b65be84..6c10ff1427 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeInsert/VectorStorePineconeInsert.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeInsert/VectorStorePineconeInsert.node.ts @@ -1,3 +1,7 @@ +import type { Document } from '@langchain/core/documents'; +import type { Embeddings } from '@langchain/core/embeddings'; +import { PineconeStore } from '@langchain/pinecone'; +import { Pinecone } from '@pinecone-database/pinecone'; import { type IExecuteFunctions, type INodeType, @@ -5,15 +9,12 @@ import { type INodeExecutionData, NodeConnectionType, } from 'n8n-workflow'; -import type { Embeddings } from '@langchain/core/embeddings'; -import type { Document } from '@langchain/core/documents'; -import { PineconeStore } from '@langchain/pinecone'; -import { Pinecone } from '@pinecone-database/pinecone'; -import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; -import { processDocuments } from '../shared/processDocuments'; +import type { N8nJsonLoader } from '@utils/N8nJsonLoader'; + import { pineconeIndexRLC } from '../shared/descriptions'; import { pineconeIndexSearch } from '../shared/methods/listSearch'; +import { processDocuments } from '../shared/processDocuments'; // This node is deprecated. Use VectorStorePinecone instead. export class VectorStorePineconeInsert implements INodeType { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.ts index d46bccd9f7..54eea2e902 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.ts @@ -1,3 +1,7 @@ +import type { Embeddings } from '@langchain/core/embeddings'; +import type { PineconeStoreParams } from '@langchain/pinecone'; +import { PineconeStore } from '@langchain/pinecone'; +import { Pinecone } from '@pinecone-database/pinecone'; import { NodeConnectionType, type INodeType, @@ -5,14 +9,11 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import type { PineconeStoreParams } from '@langchain/pinecone'; -import { PineconeStore } from '@langchain/pinecone'; -import { Pinecone } from '@pinecone-database/pinecone'; -import type { Embeddings } from '@langchain/core/embeddings'; -import { logWrapper } from '../../../utils/logWrapper'; -import { metadataFilterField } from '../../../utils/sharedFields'; -import { getMetadataFiltersValues } from '../../../utils/helpers'; +import { getMetadataFiltersValues } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { metadataFilterField } from '@utils/sharedFields'; + import { pineconeIndexRLC } from '../shared/descriptions'; import { pineconeIndexSearch } from '../shared/methods/listSearch'; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.ts index 0b5859e0bc..988f607ad7 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreQdrant/VectorStoreQdrant.node.ts @@ -1,12 +1,13 @@ -import type { IDataObject, INodeProperties } from 'n8n-workflow'; +import type { Callbacks } from '@langchain/core/callbacks/manager'; +import type { Embeddings } from '@langchain/core/embeddings'; import type { QdrantLibArgs } from '@langchain/qdrant'; import { QdrantVectorStore } from '@langchain/qdrant'; import type { Schemas as QdrantSchemas } from '@qdrant/js-client-rest'; +import type { IDataObject, INodeProperties } from 'n8n-workflow'; + import { createVectorStoreNode } from '../shared/createVectorStoreNode'; import { qdrantCollectionRLC } from '../shared/descriptions'; import { qdrantCollectionsSearch } from '../shared/methods/listSearch'; -import type { Embeddings } from '@langchain/core/embeddings'; -import type { Callbacks } from '@langchain/core/callbacks/manager'; class ExtendedQdrantVectorStore extends QdrantVectorStore { private static defaultFilter: IDataObject = {}; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.ts index 549fcd5e7f..b1b80fea5a 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabase/VectorStoreSupabase.node.ts @@ -1,8 +1,10 @@ -import { NodeOperationError, type INodeProperties } from 'n8n-workflow'; -import { createClient } from '@supabase/supabase-js'; import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase'; +import { createClient } from '@supabase/supabase-js'; +import { NodeOperationError, type INodeProperties } from 'n8n-workflow'; + +import { metadataFilterField } from '@utils/sharedFields'; + import { createVectorStoreNode } from '../shared/createVectorStoreNode'; -import { metadataFilterField } from '../../../utils/sharedFields'; import { supabaseTableNameRLC } from '../shared/descriptions'; import { supabaseTableNameSearch } from '../shared/methods/listSearch'; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseInsert/VectorStoreSupabaseInsert.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseInsert/VectorStoreSupabaseInsert.node.ts index 44b3a6a397..332f534fe6 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseInsert/VectorStoreSupabaseInsert.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseInsert/VectorStoreSupabaseInsert.node.ts @@ -1,3 +1,7 @@ +import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase'; +import type { Document } from '@langchain/core/documents'; +import type { Embeddings } from '@langchain/core/embeddings'; +import { createClient } from '@supabase/supabase-js'; import { type IExecuteFunctions, type INodeType, @@ -5,15 +9,12 @@ import { type INodeExecutionData, NodeConnectionType, } from 'n8n-workflow'; -import type { Embeddings } from '@langchain/core/embeddings'; -import type { Document } from '@langchain/core/documents'; -import { createClient } from '@supabase/supabase-js'; -import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase'; -import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; -import { processDocuments } from '../shared/processDocuments'; +import type { N8nJsonLoader } from '@utils/N8nJsonLoader'; + import { supabaseTableNameRLC } from '../shared/descriptions'; import { supabaseTableNameSearch } from '../shared/methods/listSearch'; +import { processDocuments } from '../shared/processDocuments'; // This node is deprecated. Use VectorStoreSupabase instead. export class VectorStoreSupabaseInsert implements INodeType { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts index f4bdc49e44..eae056adb9 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts @@ -1,3 +1,7 @@ +import type { SupabaseLibArgs } from '@langchain/community/vectorstores/supabase'; +import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase'; +import type { Embeddings } from '@langchain/core/embeddings'; +import { createClient } from '@supabase/supabase-js'; import { type INodeType, type INodeTypeDescription, @@ -5,13 +9,11 @@ import { type SupplyData, NodeConnectionType, } from 'n8n-workflow'; -import type { Embeddings } from '@langchain/core/embeddings'; -import { createClient } from '@supabase/supabase-js'; -import type { SupabaseLibArgs } from '@langchain/community/vectorstores/supabase'; -import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase'; -import { logWrapper } from '../../../utils/logWrapper'; -import { metadataFilterField } from '../../../utils/sharedFields'; -import { getMetadataFiltersValues } from '../../../utils/helpers'; + +import { getMetadataFiltersValues } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { metadataFilterField } from '@utils/sharedFields'; + import { supabaseTableNameRLC } from '../shared/descriptions'; import { supabaseTableNameSearch } from '../shared/methods/listSearch'; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts index 184b720d31..d6e8914ae5 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZep/VectorStoreZep.node.ts @@ -1,9 +1,11 @@ -import type { IDataObject, INodeProperties } from 'n8n-workflow'; -import { NodeOperationError } from 'n8n-workflow'; import type { IZepConfig } from '@langchain/community/vectorstores/zep'; import { ZepVectorStore } from '@langchain/community/vectorstores/zep'; +import type { IDataObject, INodeProperties } from 'n8n-workflow'; +import { NodeOperationError } from 'n8n-workflow'; + +import { metadataFilterField } from '@utils/sharedFields'; + import { createVectorStoreNode } from '../shared/createVectorStoreNode'; -import { metadataFilterField } from '../../../utils/sharedFields'; const embeddingDimensions: INodeProperties = { displayName: 'Embedding Dimensions', diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.ts index 3b40e07d65..4892d8ad85 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepInsert/VectorStoreZepInsert.node.ts @@ -1,3 +1,6 @@ +import { ZepVectorStore } from '@langchain/community/vectorstores/zep'; +import type { Document } from '@langchain/core/documents'; +import type { Embeddings } from '@langchain/core/embeddings'; import { type IExecuteFunctions, type INodeType, @@ -5,10 +8,9 @@ import { type INodeExecutionData, NodeConnectionType, } from 'n8n-workflow'; -import { ZepVectorStore } from '@langchain/community/vectorstores/zep'; -import type { Embeddings } from '@langchain/core/embeddings'; -import type { Document } from '@langchain/core/documents'; -import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; + +import type { N8nJsonLoader } from '@utils/N8nJsonLoader'; + import { processDocuments } from '../shared/processDocuments'; // This node is deprecated. Use VectorStoreZep instead. diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts index dd30a0808e..040b845e57 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts @@ -1,3 +1,6 @@ +import type { IZepConfig } from '@langchain/community/vectorstores/zep'; +import { ZepVectorStore } from '@langchain/community/vectorstores/zep'; +import type { Embeddings } from '@langchain/core/embeddings'; import { NodeConnectionType, type INodeType, @@ -5,12 +8,10 @@ import { type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; -import type { IZepConfig } from '@langchain/community/vectorstores/zep'; -import { ZepVectorStore } from '@langchain/community/vectorstores/zep'; -import type { Embeddings } from '@langchain/core/embeddings'; -import { metadataFilterField } from '../../../utils/sharedFields'; -import { getMetadataFiltersValues } from '../../../utils/helpers'; -import { logWrapper } from '../../../utils/logWrapper'; + +import { getMetadataFiltersValues } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import { metadataFilterField } from '@utils/sharedFields'; // This node is deprecated. Use VectorStoreZep instead. export class VectorStoreZepLoad implements INodeType { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/MemoryVectorStoreManager.test.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/MemoryVectorStoreManager.test.ts index 6229868f32..1088505a0a 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/MemoryVectorStoreManager.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/MemoryVectorStoreManager.test.ts @@ -1,7 +1,7 @@ import type { OpenAIEmbeddings } from '@langchain/openai'; +import { mock } from 'jest-mock-extended'; import { MemoryVectorStoreManager } from './MemoryVectorStoreManager'; -import { mock } from 'jest-mock-extended'; describe('MemoryVectorStoreManager', () => { it('should create an instance of MemoryVectorStoreManager', () => { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts index 2661559af1..23384fa34d 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts @@ -3,6 +3,7 @@ import type { Document } from '@langchain/core/documents'; import type { Embeddings } from '@langchain/core/embeddings'; import type { VectorStore } from '@langchain/core/vectorstores'; +import { DynamicTool } from 'langchain/tools'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { IExecuteFunctions, @@ -19,13 +20,13 @@ import type { INodePropertyOptions, } from 'n8n-workflow'; +import { getMetadataFiltersValues, logAiEvent } from '@utils/helpers'; +import { logWrapper } from '@utils/logWrapper'; +import type { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; +import { N8nJsonLoader } from '@utils/N8nJsonLoader'; +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + import { processDocument } from './processDocuments'; -import { getMetadataFiltersValues, logAiEvent } from '../../../utils/helpers'; -import { logWrapper } from '../../../utils/logWrapper'; -import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; -import { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; -import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; -import { DynamicTool } from 'langchain/tools'; type NodeOperationMode = 'insert' | 'load' | 'retrieve' | 'update'; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/methods/listSearch.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/methods/listSearch.ts index f12ff5d5cf..278d879f90 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/methods/listSearch.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/methods/listSearch.ts @@ -1,6 +1,6 @@ -import { ApplicationError, type IDataObject, type ILoadOptionsFunctions } from 'n8n-workflow'; import { Pinecone } from '@pinecone-database/pinecone'; import { QdrantClient } from '@qdrant/js-client-rest'; +import { ApplicationError, type IDataObject, type ILoadOptionsFunctions } from 'n8n-workflow'; export async function pineconeIndexSearch(this: ILoadOptionsFunctions) { const credentials = await this.getCredentials('pineconeApi'); diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/processDocuments.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/processDocuments.ts index 5a3847d691..0c28d8db25 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/processDocuments.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/processDocuments.ts @@ -1,7 +1,8 @@ import type { Document } from '@langchain/core/documents'; import type { INodeExecutionData } from 'n8n-workflow'; -import { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; -import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; + +import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; +import { N8nJsonLoader } from '@utils/N8nJsonLoader'; export async function processDocuments( documentInput: N8nJsonLoader | N8nBinaryLoader | Array>>, diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts index 1743c69618..251618c01e 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts @@ -1,4 +1,5 @@ import type { IExecuteFunctions, INodeType } from 'n8n-workflow'; + import { router } from './actions/router'; import { versionDescription } from './actions/versionDescription'; import { listSearch, loadOptions } from './methods'; diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/create.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/create.operation.ts index b9a0dee535..3ac5e45352 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/create.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/create.operation.ts @@ -5,6 +5,7 @@ import type { IDataObject, } from 'n8n-workflow'; import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow'; + import { apiRequest } from '../../transport'; import { modelRLC } from '../descriptions'; diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.ts index 099e5fd3e6..7287d8ebfd 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/deleteAssistant.operation.ts @@ -1,5 +1,6 @@ import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import { updateDisplayOptions } from 'n8n-workflow'; + import { apiRequest } from '../../transport'; import { assistantRLC } from '../descriptions'; diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/index.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/index.ts index 3f869ffcc8..5973319492 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/index.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/index.ts @@ -2,8 +2,8 @@ import type { INodeProperties } from 'n8n-workflow'; import * as create from './create.operation'; import * as deleteAssistant from './deleteAssistant.operation'; -import * as message from './message.operation'; import * as list from './list.operation'; +import * as message from './message.operation'; import * as update from './update.operation'; export { create, deleteAssistant, message, list, update }; diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/list.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/list.operation.ts index c029af4d48..ec75be1cfd 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/list.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/list.operation.ts @@ -1,5 +1,6 @@ import type { INodeProperties, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import { updateDisplayOptions } from 'n8n-workflow'; + import { apiRequest } from '../../transport'; const properties: INodeProperties[] = [ diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts index 959d487abe..977530cc83 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts @@ -18,9 +18,10 @@ import { } from 'n8n-workflow'; import { OpenAI as OpenAIClient } from 'openai'; -import { promptTypeOptions } from '../../../../../utils/descriptions'; -import { getConnectedTools } from '../../../../../utils/helpers'; -import { getTracingConfig } from '../../../../../utils/tracing'; +import { promptTypeOptions } from '@utils/descriptions'; +import { getConnectedTools } from '@utils/helpers'; +import { getTracingConfig } from '@utils/tracing'; + import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { assistantRLC } from '../descriptions'; diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/update.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/update.operation.ts index 8a997aa99d..f197ace5ec 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/update.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/update.operation.ts @@ -4,7 +4,8 @@ import type { INodeExecutionData, IDataObject, } from 'n8n-workflow'; -import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow'; +import { ApplicationError, NodeOperationError, updateDisplayOptions } from 'n8n-workflow'; + import { apiRequest } from '../../transport'; import { assistantRLC, modelRLC } from '../descriptions'; @@ -116,6 +117,18 @@ const displayOptions = { export const description = updateDisplayOptions(displayOptions, properties); +function getFileIds(file_ids: unknown): string[] { + if (Array.isArray(file_ids)) { + return file_ids; + } + + if (typeof file_ids === 'string') { + return file_ids.split(',').map((file_id) => file_id.trim()); + } + + throw new ApplicationError('Invalid file_ids type'); +} + export async function execute(this: IExecuteFunctions, i: number): Promise { const assistantId = this.getNodeParameter('assistantId', i, '', { extractValue: true }) as string; const options = this.getNodeParameter('options', i, {}); @@ -137,11 +150,8 @@ export async function execute(this: IExecuteFunctions, i: number): Promise file_id.trim()); - } - if ((file_ids as IDataObject[]).length > 20) { + const files = getFileIds(file_ids); + if (files.length > 20) { throw new NodeOperationError( this.getNode(), 'The maximum number of files that can be attached to the assistant is 20', @@ -152,15 +162,12 @@ export async function execute(this: IExecuteFunctions, i: number): Promise { diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/test/OpenAi.node.test.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/test/OpenAi.node.test.ts index 211b99166c..b8a32c45bf 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/test/OpenAi.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/test/OpenAi.node.test.ts @@ -1,11 +1,11 @@ -import type { IDataObject, IExecuteFunctions } from 'n8n-workflow'; import get from 'lodash/get'; +import type { IDataObject, IExecuteFunctions } from 'n8n-workflow'; + import * as assistant from '../actions/assistant'; import * as audio from '../actions/audio'; import * as file from '../actions/file'; import * as image from '../actions/image'; import * as text from '../actions/text'; - import * as transport from '../transport'; const createExecuteFunctionsMock = (parameters: IDataObject) => { @@ -210,12 +210,89 @@ describe('OpenAi, Assistant resource', () => { code_interpreter: { file_ids: [], }, - file_search: { - vector_stores: [ - { - file_ids: [], - }, - ], + }, + tools: [{ type: 'existing_tool' }, { type: 'code_interpreter' }, { type: 'file_search' }], + }, + headers: { 'OpenAI-Beta': 'assistants=v2' }, + }); + }); + + it('update => should call apiRequest with file_ids as an array for search', async () => { + (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ + tools: [{ type: 'existing_tool' }], + }); + (transport.apiRequest as jest.Mock).mockResolvedValueOnce({}); + + await assistant.update.execute.call( + createExecuteFunctionsMock({ + assistantId: 'assistant-id', + options: { + modelId: 'gpt-model', + name: 'name', + instructions: 'some instructions', + codeInterpreter: true, + knowledgeRetrieval: true, + file_ids: ['1234'], + removeCustomTools: false, + }, + }), + 0, + ); + + expect(transport.apiRequest).toHaveBeenCalledTimes(2); + expect(transport.apiRequest).toHaveBeenCalledWith('GET', '/assistants/assistant-id', { + headers: { 'OpenAI-Beta': 'assistants=v2' }, + }); + expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants/assistant-id', { + body: { + instructions: 'some instructions', + model: 'gpt-model', + name: 'name', + tool_resources: { + code_interpreter: { + file_ids: ['1234'], + }, + }, + tools: [{ type: 'existing_tool' }, { type: 'code_interpreter' }, { type: 'file_search' }], + }, + headers: { 'OpenAI-Beta': 'assistants=v2' }, + }); + }); + + it('update => should call apiRequest with file_ids as strings for search', async () => { + (transport.apiRequest as jest.Mock).mockResolvedValueOnce({ + tools: [{ type: 'existing_tool' }], + }); + (transport.apiRequest as jest.Mock).mockResolvedValueOnce({}); + + await assistant.update.execute.call( + createExecuteFunctionsMock({ + assistantId: 'assistant-id', + options: { + modelId: 'gpt-model', + name: 'name', + instructions: 'some instructions', + codeInterpreter: true, + knowledgeRetrieval: true, + file_ids: '1234, 5678, 90', + removeCustomTools: false, + }, + }), + 0, + ); + + expect(transport.apiRequest).toHaveBeenCalledTimes(2); + expect(transport.apiRequest).toHaveBeenCalledWith('GET', '/assistants/assistant-id', { + headers: { 'OpenAI-Beta': 'assistants=v2' }, + }); + expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants/assistant-id', { + body: { + instructions: 'some instructions', + model: 'gpt-model', + name: 'name', + tool_resources: { + code_interpreter: { + file_ids: ['1234', '5678', '90'], }, }, tools: [{ type: 'existing_tool' }, { type: 'code_interpreter' }, { type: 'file_search' }], diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index f086f2e82f..05715fcf7c 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -1,19 +1,18 @@ { "name": "@n8n/n8n-nodes-langchain", - "version": "1.71.0", + "version": "1.72.0", "description": "", "main": "index.js", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm run watch", "typecheck": "tsc --noEmit", - "build": "tsc -p tsconfig.build.json && pnpm n8n-copy-icons && pnpm build:metadata", - "build:metadata": "pnpm n8n-generate-known && pnpm n8n-generate-ui-types", + "build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json && pnpm n8n-copy-icons && pnpm n8n-generate-metadata", "format": "biome format --write .", "format:check": "biome ci .", "lint": "eslint nodes credentials --quiet", "lintfix": "eslint nodes credentials --fix", - "watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-ui-types\"", + "watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-metadata\"", "test": "jest", "test:dev": "jest --watch" }, diff --git a/packages/@n8n/nodes-langchain/tsconfig.json b/packages/@n8n/nodes-langchain/tsconfig.json index a0bd21149f..d72fd76a4a 100644 --- a/packages/@n8n/nodes-langchain/tsconfig.json +++ b/packages/@n8n/nodes-langchain/tsconfig.json @@ -1,6 +1,9 @@ { "extends": ["../../../tsconfig.json", "../../../tsconfig.backend.json"], "compilerOptions": { + "paths": { + "@utils/*": ["./utils/*"] + }, "tsBuildInfoFile": "dist/typecheck.tsbuildinfo", // TODO: remove all options below this line "useUnknownInCatchVariables": false diff --git a/packages/@n8n/nodes-langchain/utils/descriptions.ts b/packages/@n8n/nodes-langchain/utils/descriptions.ts index ef683df3e9..e629bab812 100644 --- a/packages/@n8n/nodes-langchain/utils/descriptions.ts +++ b/packages/@n8n/nodes-langchain/utils/descriptions.ts @@ -71,13 +71,12 @@ export const promptTypeOptions: INodeProperties = { type: 'options', options: [ { - // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased - name: 'Take from previous node automatically', + name: 'Connected Chat Trigger Node', value: 'auto', - description: 'Looks for an input field called chatInput', + description: + "Looks for an input field called 'chatInput' that is coming from a directly connected Chat Trigger", }, { - // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased name: 'Define below', value: 'define', description: 'Use an expression to reference data in previous nodes or enter static text', diff --git a/packages/@n8n/nodes-langchain/utils/helpers.ts b/packages/@n8n/nodes-langchain/utils/helpers.ts index f19cf67153..6b5816e7b8 100644 --- a/packages/@n8n/nodes-langchain/utils/helpers.ts +++ b/packages/@n8n/nodes-langchain/utils/helpers.ts @@ -130,7 +130,7 @@ export function getSessionId( if (sessionId === '' || sessionId === undefined) { throw new NodeOperationError(ctx.getNode(), 'Key parameter is empty', { description: - "Provide a key to use as session ID in the 'Key' parameter or use the 'Take from previous node automatically' option to use the session ID from the previous node, e.t. chat trigger node", + "Provide a key to use as session ID in the 'Key' parameter or use the 'Connected Chat Trigger Node' option to use the session ID from your Chat Trigger", itemIndex, }); } diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json index daec02c8e9..4375aa413b 100644 --- a/packages/@n8n/task-runner/package.json +++ b/packages/@n8n/task-runner/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/task-runner", - "version": "1.9.0", + "version": "1.10.0", "scripts": { "clean": "rimraf dist .turbo", "start": "node dist/start.js", @@ -35,13 +35,12 @@ }, "dependencies": { "@n8n/config": "workspace:*", - "@sentry/integrations": "catalog:", "@sentry/node": "catalog:", "acorn": "8.14.0", "acorn-walk": "8.3.4", "n8n-core": "workspace:*", "n8n-workflow": "workspace:*", - "nanoid": "^3.3.6", + "nanoid": "catalog:", "typedi": "catalog:", "ws": "^8.18.0" }, diff --git a/packages/@n8n/task-runner/src/__tests__/error-reporter.test.ts b/packages/@n8n/task-runner/src/__tests__/error-reporter.test.ts deleted file mode 100644 index 9345819329..0000000000 --- a/packages/@n8n/task-runner/src/__tests__/error-reporter.test.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { mock } from 'jest-mock-extended'; -import { ApplicationError } from 'n8n-workflow'; - -import { ErrorReporter } from '../error-reporter'; - -describe('ErrorReporter', () => { - const errorReporting = new ErrorReporter(mock()); - - describe('beforeSend', () => { - it('should return null if originalException is an ApplicationError with level warning', () => { - const hint = { originalException: new ApplicationError('Test error', { level: 'warning' }) }; - expect(errorReporting.beforeSend(mock(), hint)).toBeNull(); - }); - - it('should return event if originalException is an ApplicationError with level error', () => { - const hint = { originalException: new ApplicationError('Test error', { level: 'error' }) }; - expect(errorReporting.beforeSend(mock(), hint)).not.toBeNull(); - }); - - it('should return null if originalException is an Error with a non-unique stack', () => { - const hint = { originalException: new Error('Test error') }; - errorReporting.beforeSend(mock(), hint); - expect(errorReporting.beforeSend(mock(), hint)).toBeNull(); - }); - - it('should return event if originalException is an Error with a unique stack', () => { - const hint = { originalException: new Error('Test error') }; - expect(errorReporting.beforeSend(mock(), hint)).not.toBeNull(); - }); - }); -}); diff --git a/packages/@n8n/task-runner/src/config/base-runner-config.ts b/packages/@n8n/task-runner/src/config/base-runner-config.ts index a1059adf4b..d08056c5ae 100644 --- a/packages/@n8n/task-runner/src/config/base-runner-config.ts +++ b/packages/@n8n/task-runner/src/config/base-runner-config.ts @@ -37,6 +37,9 @@ export class BaseRunnerConfig { @Env('GENERIC_TIMEZONE') timezone: string = 'America/New_York'; + @Env('N8N_RUNNERS_TASK_TIMEOUT') + taskTimeout: number = 60; + @Nested healthcheckServer!: HealthcheckServerConfig; } diff --git a/packages/@n8n/task-runner/src/error-reporter.ts b/packages/@n8n/task-runner/src/error-reporter.ts deleted file mode 100644 index 167cc37c92..0000000000 --- a/packages/@n8n/task-runner/src/error-reporter.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { RewriteFrames } from '@sentry/integrations'; -import { init, setTag, captureException, close } from '@sentry/node'; -import type { ErrorEvent, EventHint } from '@sentry/types'; -import * as a from 'assert/strict'; -import { createHash } from 'crypto'; -import { ApplicationError } from 'n8n-workflow'; - -import type { SentryConfig } from '@/config/sentry-config'; - -/** - * Handles error reporting using Sentry - */ -export class ErrorReporter { - private isInitialized = false; - - /** Hashes of error stack traces, to deduplicate error reports. */ - private readonly seenErrors = new Set(); - - private get dsn() { - return this.sentryConfig.sentryDsn; - } - - constructor(private readonly sentryConfig: SentryConfig) { - a.ok(this.dsn, 'Sentry DSN is required to initialize Sentry'); - } - - async start() { - if (this.isInitialized) return; - - // Collect longer stacktraces - Error.stackTraceLimit = 50; - - process.on('uncaughtException', captureException); - - const ENABLED_INTEGRATIONS = [ - 'InboundFilters', - 'FunctionToString', - 'LinkedErrors', - 'OnUnhandledRejection', - 'ContextLines', - ]; - - setTag('server_type', 'task_runner'); - - init({ - dsn: this.dsn, - release: this.sentryConfig.n8nVersion, - environment: this.sentryConfig.environment, - enableTracing: false, - serverName: this.sentryConfig.deploymentName, - beforeBreadcrumb: () => null, - beforeSend: async (event, hint) => await this.beforeSend(event, hint), - integrations: (integrations) => [ - ...integrations.filter(({ name }) => ENABLED_INTEGRATIONS.includes(name)), - new RewriteFrames({ root: process.cwd() }), - ], - }); - - this.isInitialized = true; - } - - async stop() { - if (!this.isInitialized) { - return; - } - - await close(1000); - } - - async beforeSend(event: ErrorEvent, { originalException }: EventHint) { - if (!originalException) return null; - - if (originalException instanceof Promise) { - originalException = await originalException.catch((error) => error as Error); - } - - if (originalException instanceof ApplicationError) { - const { level, extra, tags } = originalException; - if (level === 'warning') return null; - event.level = level; - if (extra) event.extra = { ...event.extra, ...extra }; - if (tags) event.tags = { ...event.tags, ...tags }; - } - - if (originalException instanceof Error && originalException.stack) { - const eventHash = createHash('sha1').update(originalException.stack).digest('base64'); - if (this.seenErrors.has(eventHash)) return null; - this.seenErrors.add(eventHash); - } - - return event; - } -} diff --git a/packages/@n8n/task-runner/src/healthcheck-server.ts b/packages/@n8n/task-runner/src/health-check-server.ts similarity index 88% rename from packages/@n8n/task-runner/src/healthcheck-server.ts rename to packages/@n8n/task-runner/src/health-check-server.ts index c6d8965a86..9cb4cae6a0 100644 --- a/packages/@n8n/task-runner/src/healthcheck-server.ts +++ b/packages/@n8n/task-runner/src/health-check-server.ts @@ -1,7 +1,7 @@ import { ApplicationError } from 'n8n-workflow'; import { createServer } from 'node:http'; -export class HealthcheckServer { +export class HealthCheckServer { private server = createServer((_, res) => { res.writeHead(200); res.end('OK'); @@ -21,7 +21,7 @@ export class HealthcheckServer { this.server.listen(port, host, () => { this.server.removeListener('error', portInUseErrorHandler); - console.log(`Healthcheck server listening on ${host}, port ${port}`); + console.log(`Health check server listening on ${host}, port ${port}`); resolve(); }); }); diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts index 439de19eac..e5df5b64a3 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts @@ -1,3 +1,4 @@ +import { mock } from 'jest-mock-extended'; import { DateTime } from 'luxon'; import { setGlobalState, type CodeExecutionMode, type IDataObject } from 'n8n-workflow'; import fs from 'node:fs'; @@ -35,6 +36,7 @@ describe('JsTaskRunner', () => { grantToken: 'grantToken', maxConcurrency: 1, taskBrokerUri: 'http://localhost', + taskTimeout: 60, ...baseRunnerOpts, }, jsRunnerConfig: { @@ -61,7 +63,7 @@ describe('JsTaskRunner', () => { runner?: JsTaskRunner; }) => { jest.spyOn(runner, 'requestData').mockResolvedValue(taskData); - return await runner.executeTask(task); + return await runner.executeTask(task, mock()); }; afterEach(() => { @@ -135,6 +137,36 @@ describe('JsTaskRunner', () => { ]); }, ); + + it('should not throw when using unsupported console methods', async () => { + const task = newTaskWithSettings({ + code: ` + console.warn('test'); + console.error('test'); + console.info('test'); + console.debug('test'); + console.trace('test'); + console.dir({}); + console.time('test'); + console.timeEnd('test'); + console.timeLog('test'); + console.assert(true); + console.clear(); + console.group('test'); + console.groupEnd(); + console.table([]); + return {json: {}} + `, + nodeMode: 'runOnceForAllItems', + }); + + await expect( + execTaskWithParams({ + task, + taskData: newDataRequestResponse([wrapIntoJson({})]), + }), + ).resolves.toBeDefined(); + }); }); describe('built-in methods and variables available in the context', () => { @@ -213,6 +245,7 @@ describe('JsTaskRunner', () => { ['$runIndex', 0], ['{ wf: $workflow }', { wf: { active: true, id: '1', name: 'Test Workflow' } }], ['$vars', { var: 'value' }], + ['$getWorkflowStaticData("global")', {}], ], 'Node.js internal functions': [ ['typeof Function', 'function'], @@ -363,6 +396,177 @@ describe('JsTaskRunner', () => { }); }); + describe("$getWorkflowStaticData('global')", () => { + it('should have the global workflow static data available in runOnceForAllItems', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $getWorkflowStaticData("global") }', + nodeMode: 'runOnceForAllItems', + }), + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { + staticData: { + global: { key: 'value' }, + }, + }), + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: { key: 'value' } })]); + }); + + it('should have the global workflow static data available in runOnceForEachItem', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $getWorkflowStaticData("global") }', + nodeMode: 'runOnceForEachItem', + }), + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { + staticData: { + global: { key: 'value' }, + }, + }), + }); + + expect(outcome.result).toEqual([ + withPairedItem(0, wrapIntoJson({ val: { key: 'value' } })), + ]); + }); + + test.each<[CodeExecutionMode]>([['runOnceForAllItems'], ['runOnceForEachItem']])( + "does not return static data if it hasn't been modified in %s", + async (mode) => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: ` + const staticData = $getWorkflowStaticData("global"); + return { val: staticData }; + `, + nodeMode: mode, + }), + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { + staticData: { + global: { key: 'value' }, + }, + }), + }); + + expect(outcome.staticData).toBeUndefined(); + }, + ); + + test.each<[CodeExecutionMode]>([['runOnceForAllItems'], ['runOnceForEachItem']])( + 'returns the updated static data in %s', + async (mode) => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: ` + const staticData = $getWorkflowStaticData("global"); + staticData.newKey = 'newValue'; + return { val: staticData }; + `, + nodeMode: mode, + }), + taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), { + staticData: { + global: { key: 'value' }, + 'node:OtherNode': { some: 'data' }, + }, + }), + }); + + expect(outcome.staticData).toEqual({ + global: { key: 'value', newKey: 'newValue' }, + 'node:OtherNode': { some: 'data' }, + }); + }, + ); + }); + + describe("$getWorkflowStaticData('node')", () => { + const createTaskDataWithNodeStaticData = (nodeStaticData: IDataObject) => { + const taskData = newDataRequestResponse(inputItems.map(wrapIntoJson)); + const taskDataKey = `node:${taskData.node.name}`; + taskData.workflow.staticData = { + global: { 'global-key': 'global-value' }, + 'node:OtherNode': { 'other-key': 'other-value' }, + [taskDataKey]: nodeStaticData, + }; + + return taskData; + }; + + it('should have the node workflow static data available in runOnceForAllItems', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $getWorkflowStaticData("node") }', + nodeMode: 'runOnceForAllItems', + }), + taskData: createTaskDataWithNodeStaticData({ key: 'value' }), + }); + + expect(outcome.result).toEqual([wrapIntoJson({ val: { key: 'value' } })]); + }); + + it('should have the node workflow static data available in runOnceForEachItem', async () => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: 'return { val: $getWorkflowStaticData("node") }', + nodeMode: 'runOnceForEachItem', + }), + taskData: createTaskDataWithNodeStaticData({ key: 'value' }), + }); + + expect(outcome.result).toEqual([ + withPairedItem(0, wrapIntoJson({ val: { key: 'value' } })), + ]); + }); + + test.each<[CodeExecutionMode]>([['runOnceForAllItems'], ['runOnceForEachItem']])( + "does not return static data if it hasn't been modified in %s", + async (mode) => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: ` + const staticData = $getWorkflowStaticData("node"); + return { val: staticData }; + `, + nodeMode: mode, + }), + taskData: createTaskDataWithNodeStaticData({ key: 'value' }), + }); + + expect(outcome.staticData).toBeUndefined(); + }, + ); + + test.each<[CodeExecutionMode]>([['runOnceForAllItems'], ['runOnceForEachItem']])( + 'returns the updated static data in %s', + async (mode) => { + const outcome = await execTaskWithParams({ + task: newTaskWithSettings({ + code: ` + const staticData = $getWorkflowStaticData("node"); + staticData.newKey = 'newValue'; + return { val: staticData }; + `, + nodeMode: mode, + }), + taskData: createTaskDataWithNodeStaticData({ key: 'value' }), + }); + + expect(outcome.staticData).toEqual({ + global: { 'global-key': 'global-value' }, + 'node:JsCode': { + key: 'value', + newKey: 'newValue', + }, + 'node:OtherNode': { + 'other-key': 'other-value', + }, + }); + }, + ); + }); + it('should allow access to Node.js Buffers', async () => { const outcomeAll = await execTaskWithParams({ task: newTaskWithSettings({ diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/task-runner.test.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/task-runner.test.ts index c633e95688..e12770f770 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/task-runner.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/task-runner.test.ts @@ -21,6 +21,7 @@ describe('TestRunner', () => { maxPayloadSize: 1024, taskBrokerUri: 'http://localhost:8080', timezone: 'America/New_York', + taskTimeout: 60, healthcheckServer: { enabled: false, host: 'localhost', @@ -37,6 +38,8 @@ describe('TestRunner', () => { maxPayload: 1024, }), ); + + runner.clearIdleTimer(); }); it('should handle different taskBrokerUri formats correctly', () => { @@ -48,6 +51,7 @@ describe('TestRunner', () => { maxPayloadSize: 1024, taskBrokerUri: 'https://example.com:3000/path', timezone: 'America/New_York', + taskTimeout: 60, healthcheckServer: { enabled: false, host: 'localhost', @@ -64,6 +68,8 @@ describe('TestRunner', () => { maxPayload: 1024, }), ); + + runner.clearIdleTimer(); }); it('should throw an error if taskBrokerUri is invalid', () => { @@ -77,6 +83,7 @@ describe('TestRunner', () => { maxPayloadSize: 1024, taskBrokerUri: 'not-a-valid-uri', timezone: 'America/New_York', + taskTimeout: 60, healthcheckServer: { enabled: false, host: 'localhost', @@ -86,4 +93,65 @@ describe('TestRunner', () => { ).toThrowError(/Invalid URL/); }); }); + + describe('taskCancelled', () => { + it('should reject pending requests when task is cancelled', () => { + const runner = new TestRunner({ + taskType: 'test-task', + maxConcurrency: 5, + idleTimeout: 60, + grantToken: 'test-token', + maxPayloadSize: 1024, + taskBrokerUri: 'http://localhost:8080', + timezone: 'America/New_York', + taskTimeout: 60, + healthcheckServer: { + enabled: false, + host: 'localhost', + port: 8081, + }, + }); + + const taskId = 'test-task'; + runner.runningTasks.set(taskId, { + taskId, + active: false, + cancelled: false, + }); + + const dataRequestReject = jest.fn(); + const nodeTypesRequestReject = jest.fn(); + + runner.dataRequests.set('data-req', { + taskId, + requestId: 'data-req', + resolve: jest.fn(), + reject: dataRequestReject, + }); + + runner.nodeTypesRequests.set('node-req', { + taskId, + requestId: 'node-req', + resolve: jest.fn(), + reject: nodeTypesRequestReject, + }); + + runner.taskCancelled(taskId, 'test-reason'); + + expect(dataRequestReject).toHaveBeenCalledWith( + expect.objectContaining({ + message: 'Task cancelled: test-reason', + }), + ); + + expect(nodeTypesRequestReject).toHaveBeenCalledWith( + expect.objectContaining({ + message: 'Task cancelled: test-reason', + }), + ); + + expect(runner.dataRequests.size).toBe(0); + expect(runner.nodeTypesRequests.size).toBe(0); + }); + }); }); diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts index ef910e838f..f13939e51e 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/test-data.ts @@ -50,7 +50,9 @@ export const newTaskData = (opts: Partial & Pick */ export const newDataRequestResponse = ( inputData: INodeExecutionData[], - opts: Partial = {}, + opts: Partial & { + staticData?: IDataObject; + } = {}, ): DataRequestResponse => { const codeNode = newNode({ name: 'JsCode', @@ -81,6 +83,7 @@ export const newDataRequestResponse = ( }, }, nodes: [manualTriggerNode, codeNode], + staticData: opts.staticData, }, inputData: { main: [inputData], diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/task-cancelled-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/task-cancelled-error.ts new file mode 100644 index 0000000000..1970c11fcd --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/task-cancelled-error.ts @@ -0,0 +1,7 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class TaskCancelledError extends ApplicationError { + constructor(reason: string) { + super(`Task cancelled: ${reason}`, { level: 'warning' }); + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/errors/timeout-error.ts b/packages/@n8n/task-runner/src/js-task-runner/errors/timeout-error.ts new file mode 100644 index 0000000000..ef3cc89751 --- /dev/null +++ b/packages/@n8n/task-runner/src/js-task-runner/errors/timeout-error.ts @@ -0,0 +1,30 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class TimeoutError extends ApplicationError { + description: string; + + constructor(taskTimeout: number) { + super( + `Task execution timed out after ${taskTimeout} ${taskTimeout === 1 ? 'second' : 'seconds'}`, + ); + + const subtitle = 'The task runner was taking too long on this task, so the task was aborted.'; + + const fixes = { + optimizeScript: + 'Optimize your script to prevent long-running tasks, e.g. by processing data in smaller batches.', + ensureTermination: + 'Ensure that all paths in your script are able to terminate, i.e. no infinite loops.', + }; + + const suggestions = [fixes.optimizeScript, fixes.ensureTermination]; + + const suggestionsText = suggestions + .map((suggestion, index) => `${index + 1}. ${suggestion}`) + .join('
'); + + const description = `${subtitle} You can try the following:

${suggestionsText}`; + + this.description = description; + } +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts index 005267862e..89931ce67f 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts @@ -1,5 +1,5 @@ import { getAdditionalKeys } from 'n8n-core'; -import { WorkflowDataProxy, Workflow } from 'n8n-workflow'; +import { WorkflowDataProxy, Workflow, ObservableObject } from 'n8n-workflow'; import type { CodeExecutionMode, IWorkflowExecuteAdditionalData, @@ -32,6 +32,7 @@ import { BuiltInsParserState } from './built-ins-parser/built-ins-parser-state'; import { isErrorLike } from './errors/error-like'; import { ExecutionError } from './errors/execution-error'; import { makeSerializable } from './errors/serializable-error'; +import { TimeoutError } from './errors/timeout-error'; import type { RequireResolver } from './require-resolver'; import { createRequireResolver } from './require-resolver'; import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation'; @@ -94,7 +95,7 @@ export class JsTaskRunner extends TaskRunner { }); } - async executeTask(task: Task): Promise { + async executeTask(task: Task, signal: AbortSignal): Promise { const settings = task.settings; a.ok(settings, 'JS Code not sent to runner'); @@ -120,7 +121,13 @@ export class JsTaskRunner extends TaskRunner { nodeTypes: this.nodeTypes, }); + const noOp = () => {}; const customConsole = { + // all except `log` are dummy methods that disregard without throwing, following existing Code node behavior + ...Object.keys(console).reduce void>>((acc, name) => { + acc[name] = noOp; + return acc; + }, {}), // Send log output back to the main process. It will take care of forwarding // it to the UI or printing to console. log: (...args: unknown[]) => { @@ -131,14 +138,17 @@ export class JsTaskRunner extends TaskRunner { }, }; + workflow.staticData = ObservableObject.create(workflow.staticData); + const result = settings.nodeMode === 'runOnceForAllItems' - ? await this.runForAllItems(task.taskId, settings, data, workflow, customConsole) - : await this.runForEachItem(task.taskId, settings, data, workflow, customConsole); + ? await this.runForAllItems(task.taskId, settings, data, workflow, customConsole, signal) + : await this.runForEachItem(task.taskId, settings, data, workflow, customConsole, signal); return { result, customData: data.runExecutionData.resultData.metadata, + staticData: workflow.staticData.__dataChanged ? workflow.staticData : undefined, }; } @@ -183,6 +193,7 @@ export class JsTaskRunner extends TaskRunner { data: JsTaskData, workflow: Workflow, customConsole: CustomConsole, + signal: AbortSignal, ): Promise { const dataProxy = this.createDataProxy(data, workflow, data.itemIndex); const inputItems = data.connectionInputData; @@ -192,17 +203,33 @@ export class JsTaskRunner extends TaskRunner { module: {}, console: customConsole, items: inputItems, - + $getWorkflowStaticData: (type: 'global' | 'node') => workflow.getStaticData(type, data.node), ...this.getNativeVariables(), ...dataProxy, ...this.buildRpcCallObject(taskId), }; try { - const result = (await runInNewContext( - `globalThis.global = globalThis; module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, - context, - )) as TaskResultData['result']; + const result = await new Promise((resolve, reject) => { + const abortHandler = () => { + reject(new TimeoutError(this.taskTimeout)); + }; + + signal.addEventListener('abort', abortHandler, { once: true }); + + const taskResult = runInNewContext( + `globalThis.global = globalThis; module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, + context, + { timeout: this.taskTimeout * 1000 }, + ) as Promise; + + void taskResult + .then(resolve) + .catch(reject) + .finally(() => { + signal.removeEventListener('abort', abortHandler); + }); + }); if (result === null) { return []; @@ -230,6 +257,7 @@ export class JsTaskRunner extends TaskRunner { data: JsTaskData, workflow: Workflow, customConsole: CustomConsole, + signal: AbortSignal, ): Promise { const inputItems = data.connectionInputData; const returnData: INodeExecutionData[] = []; @@ -248,17 +276,34 @@ export class JsTaskRunner extends TaskRunner { module: {}, console: customConsole, item, - + $getWorkflowStaticData: (type: 'global' | 'node') => + workflow.getStaticData(type, data.node), ...this.getNativeVariables(), ...dataProxy, ...this.buildRpcCallObject(taskId), }; try { - let result = (await runInNewContext( - `module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, - context, - )) as INodeExecutionData | undefined; + let result = await new Promise((resolve, reject) => { + const abortHandler = () => { + reject(new TimeoutError(this.taskTimeout)); + }; + + signal.addEventListener('abort', abortHandler); + + const taskResult = runInNewContext( + `module.exports = async function VmCodeWrapper() {${settings.code}\n}()`, + context, + { timeout: this.taskTimeout * 1000 }, + ) as Promise; + + void taskResult + .then(resolve) + .catch(reject) + .finally(() => { + signal.removeEventListener('abort', abortHandler); + }); + }); // Filter out null values if (result === null) { diff --git a/packages/@n8n/task-runner/src/runner-types.ts b/packages/@n8n/task-runner/src/runner-types.ts index 174d652e7f..5075b19db2 100644 --- a/packages/@n8n/task-runner/src/runner-types.ts +++ b/packages/@n8n/task-runner/src/runner-types.ts @@ -61,6 +61,7 @@ export interface DataRequestResponse { export interface TaskResultData { result: INodeExecutionData[]; customData?: Record; + staticData?: IDataObject; } export interface TaskData { diff --git a/packages/@n8n/task-runner/src/start.ts b/packages/@n8n/task-runner/src/start.ts index f68779f38d..69e9462516 100644 --- a/packages/@n8n/task-runner/src/start.ts +++ b/packages/@n8n/task-runner/src/start.ts @@ -1,17 +1,17 @@ +import type { ErrorReporter } from 'n8n-core'; import { ensureError, setGlobalState } from 'n8n-workflow'; import Container from 'typedi'; import { MainConfig } from './config/main-config'; -import type { ErrorReporter } from './error-reporter'; -import type { HealthcheckServer } from './healthcheck-server'; +import type { HealthCheckServer } from './health-check-server'; import { JsTaskRunner } from './js-task-runner/js-task-runner'; -let healthcheckServer: HealthcheckServer | undefined; +let healthCheckServer: HealthCheckServer | undefined; let runner: JsTaskRunner | undefined; let isShuttingDown = false; let errorReporter: ErrorReporter | undefined; -function createSignalHandler(signal: string) { +function createSignalHandler(signal: string, timeoutInS = 10) { return async function onSignal() { if (isShuttingDown) { return; @@ -19,16 +19,21 @@ function createSignalHandler(signal: string) { console.log(`Received ${signal} signal, shutting down...`); + setTimeout(() => { + console.error('Shutdown timeout reached, forcing shutdown...'); + process.exit(1); + }, timeoutInS * 1000).unref(); + isShuttingDown = true; try { if (runner) { await runner.stop(); runner = undefined; - void healthcheckServer?.stop(); + void healthCheckServer?.stop(); } if (errorReporter) { - await errorReporter.stop(); + await errorReporter.shutdown(); errorReporter = undefined; } } catch (e) { @@ -49,22 +54,23 @@ void (async function start() { }); if (config.sentryConfig.sentryDsn) { - const { ErrorReporter } = await import('@/error-reporter'); - errorReporter = new ErrorReporter(config.sentryConfig); - await errorReporter.start(); + const { ErrorReporter } = await import('n8n-core'); + errorReporter = new ErrorReporter(); + await errorReporter.init('task_runner', config.sentryConfig.sentryDsn); } runner = new JsTaskRunner(config); runner.on('runner:reached-idle-timeout', () => { - void createSignalHandler('IDLE_TIMEOUT')(); + // Use shorter timeout since we know we don't have any tasks running + void createSignalHandler('IDLE_TIMEOUT', 1)(); }); const { enabled, host, port } = config.baseRunnerConfig.healthcheckServer; if (enabled) { - const { HealthcheckServer } = await import('./healthcheck-server'); - healthcheckServer = new HealthcheckServer(); - await healthcheckServer.start(host, port); + const { HealthCheckServer } = await import('./health-check-server'); + healthCheckServer = new HealthCheckServer(); + await healthCheckServer.start(host, port); } process.on('SIGINT', createSignalHandler('SIGINT')); diff --git a/packages/@n8n/task-runner/src/task-runner.ts b/packages/@n8n/task-runner/src/task-runner.ts index f0af115b5a..e8ee605ef5 100644 --- a/packages/@n8n/task-runner/src/task-runner.ts +++ b/packages/@n8n/task-runner/src/task-runner.ts @@ -8,6 +8,8 @@ import type { BrokerMessage, RunnerMessage } from '@/message-types'; import { TaskRunnerNodeTypes } from '@/node-types'; import { RPC_ALLOW_LIST, type TaskResultData } from '@/runner-types'; +import { TaskCancelledError } from './js-task-runner/errors/task-cancelled-error'; + export interface Task { taskId: string; settings?: T; @@ -21,12 +23,14 @@ export interface TaskOffer { } interface DataRequest { + taskId: string; requestId: string; resolve: (data: unknown) => void; reject: (error: unknown) => void; } interface NodeTypesRequest { + taskId: string; requestId: string; resolve: (data: unknown) => void; reject: (error: unknown) => void; @@ -82,14 +86,20 @@ export abstract class TaskRunner extends EventEmitter { private idleTimer: NodeJS.Timeout | undefined; + /** How long (in seconds) a task is allowed to take for completion, else the task will be aborted. */ + protected readonly taskTimeout: number; + /** How long (in seconds) a runner may be idle for before exit. */ private readonly idleTimeout: number; + protected taskCancellations = new Map(); + constructor(opts: TaskRunnerOpts) { super(); this.taskType = opts.taskType; this.name = opts.name ?? 'Node.js Task Runner SDK'; this.maxConcurrency = opts.maxConcurrency; + this.taskTimeout = opts.taskTimeout; this.idleTimeout = opts.idleTimeout; const { host: taskBrokerHost } = new URL(opts.taskBrokerUri); @@ -210,7 +220,7 @@ export abstract class TaskRunner extends EventEmitter { this.offerAccepted(message.offerId, message.taskId); break; case 'broker:taskcancel': - this.taskCancelled(message.taskId); + this.taskCancelled(message.taskId, message.reason); break; case 'broker:tasksettings': void this.receivedSettings(message.taskId, message.settings); @@ -285,17 +295,35 @@ export abstract class TaskRunner extends EventEmitter { }); } - taskCancelled(taskId: string) { + taskCancelled(taskId: string, reason: string) { const task = this.runningTasks.get(taskId); if (!task) { return; } task.cancelled = true; - if (task.active) { - // TODO - } else { - this.runningTasks.delete(taskId); + + for (const [requestId, request] of this.dataRequests.entries()) { + if (request.taskId === taskId) { + request.reject(new TaskCancelledError(reason)); + this.dataRequests.delete(requestId); + } } + + for (const [requestId, request] of this.nodeTypesRequests.entries()) { + if (request.taskId === taskId) { + request.reject(new TaskCancelledError(reason)); + this.nodeTypesRequests.delete(requestId); + } + } + + const controller = this.taskCancellations.get(taskId); + if (controller) { + controller.abort(); + this.taskCancellations.delete(taskId); + } + + if (!task.active) this.runningTasks.delete(taskId); + this.sendOffers(); } @@ -328,20 +356,33 @@ export abstract class TaskRunner extends EventEmitter { this.runningTasks.delete(taskId); return; } + + const controller = new AbortController(); + this.taskCancellations.set(taskId, controller); + + const taskTimeout = setTimeout(() => { + if (!task.cancelled) { + controller.abort(); + this.taskCancellations.delete(taskId); + } + }, this.taskTimeout * 1_000); + task.settings = settings; task.active = true; try { - const data = await this.executeTask(task); + const data = await this.executeTask(task, controller.signal); this.taskDone(taskId, data); } catch (error) { - this.taskErrored(taskId, error); + if (!task.cancelled) this.taskErrored(taskId, error); } finally { + clearTimeout(taskTimeout); + this.taskCancellations.delete(taskId); this.resetIdleTimer(); } } // eslint-disable-next-line @typescript-eslint/naming-convention - async executeTask(_task: Task): Promise { + async executeTask(_task: Task, _signal: AbortSignal): Promise { throw new ApplicationError('Unimplemented'); } @@ -354,6 +395,7 @@ export abstract class TaskRunner extends EventEmitter { const nodeTypesPromise = new Promise((resolve, reject) => { this.nodeTypesRequests.set(requestId, { requestId, + taskId, resolve: resolve as (data: unknown) => void, reject, }); @@ -382,6 +424,7 @@ export abstract class TaskRunner extends EventEmitter { const p = new Promise((resolve, reject) => { this.dataRequests.set(requestId, { requestId, + taskId, resolve: resolve as (data: unknown) => void, reject, }); diff --git a/packages/cli/package.json b/packages/cli/package.json index 87f0c65122..317aeb0d9c 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "1.71.0", + "version": "1.72.0", "description": "n8n Workflow Automation Tool", "main": "dist/index", "types": "dist/index.d.ts", @@ -98,7 +98,6 @@ "@n8n_io/license-sdk": "2.13.1", "@oclif/core": "4.0.7", "@rudderstack/rudder-sdk-node": "2.0.9", - "@sentry/integrations": "catalog:", "@sentry/node": "catalog:", "aws4": "1.11.0", "axios": "catalog:", diff --git a/packages/cli/src/__tests__/active-workflow-manager.test.ts b/packages/cli/src/__tests__/active-workflow-manager.test.ts new file mode 100644 index 0000000000..a167f1e5a5 --- /dev/null +++ b/packages/cli/src/__tests__/active-workflow-manager.test.ts @@ -0,0 +1,125 @@ +import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; +import type { + WorkflowParameters, + INode, + INodeType, + INodeTypeDescription, + WorkflowActivateMode, +} from 'n8n-workflow'; +import { Workflow } from 'n8n-workflow'; + +import { ActiveWorkflowManager } from '@/active-workflow-manager'; +import type { NodeTypes } from '@/node-types'; + +describe('ActiveWorkflowManager', () => { + let activeWorkflowManager: ActiveWorkflowManager; + const instanceSettings = mock(); + const nodeTypes = mock(); + + beforeEach(() => { + jest.clearAllMocks(); + activeWorkflowManager = new ActiveWorkflowManager( + mock(), + mock(), + mock(), + mock(), + mock(), + nodeTypes, + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + instanceSettings, + mock(), + ); + }); + + describe('checkIfWorkflowCanBeActivated', () => { + const disabledNode = mock({ type: 'triggerNode', disabled: true }); + const unknownNode = mock({ type: 'unknownNode' }); + const noTriggersNode = mock({ type: 'noTriggersNode' }); + const pollNode = mock({ type: 'pollNode' }); + const triggerNode = mock({ type: 'triggerNode' }); + const webhookNode = mock({ type: 'webhookNode' }); + + nodeTypes.getByNameAndVersion.mockImplementation((type) => { + // TODO: getByNameAndVersion signature needs to be updated to allow returning undefined + if (type === 'unknownNode') return undefined as unknown as INodeType; + const partial: Partial = { + poll: undefined, + trigger: undefined, + webhook: undefined, + description: mock({ + properties: [], + }), + }; + if (type === 'pollNode') partial.poll = jest.fn(); + if (type === 'triggerNode') partial.trigger = jest.fn(); + if (type === 'webhookNode') partial.webhook = jest.fn(); + return mock(partial); + }); + + test.each([ + ['should skip disabled nodes', disabledNode, [], false], + ['should skip nodes marked as ignored', triggerNode, ['triggerNode'], false], + ['should skip unknown nodes', unknownNode, [], false], + ['should skip nodes with no trigger method', noTriggersNode, [], false], + ['should activate if poll method exists', pollNode, [], true], + ['should activate if trigger method exists', triggerNode, [], true], + ['should activate if webhook method exists', webhookNode, [], true], + ])('%s', async (_, node, ignoredNodes, expected) => { + const workflow = new Workflow(mock({ nodeTypes, nodes: [node] })); + const canBeActivated = activeWorkflowManager.checkIfWorkflowCanBeActivated( + workflow, + ignoredNodes, + ); + expect(canBeActivated).toBe(expected); + }); + }); + + describe('shouldAddWebhooks', () => { + describe('if leader', () => { + beforeAll(() => { + Object.assign(instanceSettings, { isLeader: true, isFollower: false }); + }); + + test('should return `true` for `init`', () => { + // ensure webhooks are populated on init: https://github.com/n8n-io/n8n/pull/8830 + const result = activeWorkflowManager.shouldAddWebhooks('init'); + expect(result).toBe(true); + }); + + test('should return `false` for `leadershipChange`', () => { + const result = activeWorkflowManager.shouldAddWebhooks('leadershipChange'); + expect(result).toBe(false); + }); + + test('should return `true` for `update` or `activate`', () => { + const modes = ['update', 'activate'] as WorkflowActivateMode[]; + for (const mode of modes) { + const result = activeWorkflowManager.shouldAddWebhooks(mode); + expect(result).toBe(true); + } + }); + }); + + describe('if follower', () => { + beforeAll(() => { + Object.assign(instanceSettings, { isLeader: false, isFollower: true }); + }); + + test('should return `false` for `update` or `activate`', () => { + const modes = ['update', 'activate'] as WorkflowActivateMode[]; + for (const mode of modes) { + const result = activeWorkflowManager.shouldAddWebhooks(mode); + expect(result).toBe(false); + } + }); + }); + }); +}); diff --git a/packages/cli/src/__tests__/credential-types.test.ts b/packages/cli/src/__tests__/credential-types.test.ts index 0ccb5ab771..82780d114c 100644 --- a/packages/cli/src/__tests__/credential-types.test.ts +++ b/packages/cli/src/__tests__/credential-types.test.ts @@ -1,41 +1,121 @@ -import { Container } from 'typedi'; +import { mock } from 'jest-mock-extended'; +import { UnrecognizedCredentialTypeError } from 'n8n-core'; +import type { ICredentialType, LoadedClass } from 'n8n-workflow'; import { CredentialTypes } from '@/credential-types'; -import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; -import { mockInstance } from '@test/mocking'; +import type { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; describe('CredentialTypes', () => { - const mockNodesAndCredentials = mockInstance(LoadNodesAndCredentials, { - loadedCredentials: { - fakeFirstCredential: { - type: { - name: 'fakeFirstCredential', - displayName: 'Fake First Credential', - properties: [], - }, - sourcePath: '', - }, - fakeSecondCredential: { - type: { - name: 'fakeSecondCredential', - displayName: 'Fake Second Credential', - properties: [], - }, - sourcePath: '', - }, - }, + const loadNodesAndCredentials = mock(); + + const credentialTypes = new CredentialTypes(loadNodesAndCredentials); + + const testCredential: LoadedClass = { + sourcePath: '', + type: mock(), + }; + + loadNodesAndCredentials.getCredential.mockImplementation((credentialType) => { + if (credentialType === 'testCredential') return testCredential; + throw new UnrecognizedCredentialTypeError(credentialType); }); - const credentialTypes = Container.get(CredentialTypes); - - test('Should throw error when calling invalid credential name', () => { - expect(() => credentialTypes.getByName('fakeThirdCredential')).toThrowError(); + beforeEach(() => { + jest.clearAllMocks(); }); - test('Should return correct credential type for valid name', () => { - const mockedCredentialTypes = mockNodesAndCredentials.loadedCredentials; - expect(credentialTypes.getByName('fakeFirstCredential')).toStrictEqual( - mockedCredentialTypes.fakeFirstCredential.type, - ); + describe('getByName', () => { + test('Should throw error when calling invalid credential name', () => { + expect(() => credentialTypes.getByName('unknownCredential')).toThrowError('c'); + }); + + test('Should return correct credential type for valid name', () => { + expect(credentialTypes.getByName('testCredential')).toStrictEqual(testCredential.type); + }); + }); + + describe('recognizes', () => { + test('Should recognize credential type that exists in knownCredentials', () => { + const credentialTypes = new CredentialTypes( + mock({ + loadedCredentials: {}, + knownCredentials: { testCredential: mock({ supportedNodes: [] }) }, + }), + ); + + expect(credentialTypes.recognizes('testCredential')).toBe(true); + }); + + test('Should recognize credential type that exists in loadedCredentials', () => { + const credentialTypes = new CredentialTypes( + mock({ + loadedCredentials: { testCredential }, + knownCredentials: {}, + }), + ); + + expect(credentialTypes.recognizes('testCredential')).toBe(true); + }); + + test('Should not recognize unknown credential type', () => { + expect(credentialTypes.recognizes('unknownCredential')).toBe(false); + }); + }); + + describe('getSupportedNodes', () => { + test('Should return supported nodes for known credential type', () => { + const supportedNodes = ['node1', 'node2']; + const credentialTypes = new CredentialTypes( + mock({ + knownCredentials: { testCredential: mock({ supportedNodes }) }, + }), + ); + + expect(credentialTypes.getSupportedNodes('testCredential')).toEqual(supportedNodes); + }); + + test('Should return empty array for unknown credential type supported nodes', () => { + expect(credentialTypes.getSupportedNodes('unknownCredential')).toBeEmptyArray(); + }); + }); + + describe('getParentTypes', () => { + test('Should return parent types for credential type with extends', () => { + const credentialTypes = new CredentialTypes( + mock({ + knownCredentials: { + childType: { extends: ['parentType1', 'parentType2'] }, + parentType1: { extends: ['grandparentType'] }, + parentType2: { extends: [] }, + grandparentType: { extends: [] }, + }, + }), + ); + + const parentTypes = credentialTypes.getParentTypes('childType'); + expect(parentTypes).toContain('parentType1'); + expect(parentTypes).toContain('parentType2'); + expect(parentTypes).toContain('grandparentType'); + }); + + test('Should return empty array for credential type without extends', () => { + const credentialTypes = new CredentialTypes( + mock({ + knownCredentials: { testCredential: { extends: [] } }, + }), + ); + + expect(credentialTypes.getParentTypes('testCredential')).toBeEmptyArray(); + }); + + test('Should return empty array for unknown credential type parent types', () => { + const credentialTypes = new CredentialTypes( + mock({ + knownCredentials: {}, + }), + ); + + expect(credentialTypes.getParentTypes('unknownCredential')).toBeEmptyArray(); + }); }); }); diff --git a/packages/cli/src/__tests__/credentials-helper.test.ts b/packages/cli/src/__tests__/credentials-helper.test.ts index 62cab968e4..7deffcd229 100644 --- a/packages/cli/src/__tests__/credentials-helper.test.ts +++ b/packages/cli/src/__tests__/credentials-helper.test.ts @@ -1,3 +1,4 @@ +import { mock } from 'jest-mock-extended'; import type { IAuthenticateGeneric, ICredentialDataDecryptedObject, @@ -5,59 +6,25 @@ import type { IHttpRequestOptions, INode, INodeProperties, + INodeTypes, } from 'n8n-workflow'; -import { NodeConnectionType, deepCopy } from 'n8n-workflow'; -import { Workflow } from 'n8n-workflow'; -import Container from 'typedi'; +import { deepCopy, Workflow } from 'n8n-workflow'; +import { CredentialTypes } from '@/credential-types'; import { CredentialsHelper } from '@/credentials-helper'; -import { CredentialsRepository } from '@/databases/repositories/credentials.repository'; -import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository'; -import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; -import { NodeTypes } from '@/node-types'; -import { mockInstance } from '@test/mocking'; +import type { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; describe('CredentialsHelper', () => { - mockInstance(CredentialsRepository); - mockInstance(SharedCredentialsRepository); - const mockNodesAndCredentials = mockInstance(LoadNodesAndCredentials, { - loadedNodes: { - 'test.set': { - sourcePath: '', - type: { - description: { - displayName: 'Set', - name: 'set', - group: ['input'], - version: 1, - description: 'Sets a value', - defaults: { - name: 'Set', - color: '#0000FF', - }, - inputs: [NodeConnectionType.Main], - outputs: [NodeConnectionType.Main], - properties: [ - { - displayName: 'Value1', - name: 'value1', - type: 'string', - default: 'default-value1', - }, - { - displayName: 'Value2', - name: 'value2', - type: 'string', - default: 'default-value2', - }, - ], - }, - }, - }, - }, - }); + const nodeTypes = mock(); + const mockNodesAndCredentials = mock(); - const nodeTypes = mockInstance(NodeTypes); + const credentialsHelper = new CredentialsHelper( + new CredentialTypes(mockNodesAndCredentials), + mock(), + mock(), + mock(), + mock(), + ); describe('authenticate', () => { const tests: Array<{ @@ -272,19 +239,16 @@ describe('CredentialsHelper', () => { for (const testData of tests) { test(testData.description, async () => { - //@ts-expect-error `loadedCredentials` is a getter and we are replacing it here with a property - mockNodesAndCredentials.loadedCredentials = { - [testData.input.credentialType.name]: { - type: testData.input.credentialType, - sourcePath: '', - }, - }; + const { credentialType } = testData.input; - const credentialsHelper = Container.get(CredentialsHelper); + mockNodesAndCredentials.getCredential.calledWith(credentialType.name).mockReturnValue({ + type: credentialType, + sourcePath: '', + }); const result = await credentialsHelper.authenticate( testData.input.credentials, - testData.input.credentialType.name, + credentialType.name, deepCopy(incomingRequestOptions), workflow, node, diff --git a/packages/cli/src/__tests__/error-reporting.test.ts b/packages/cli/src/__tests__/error-reporting.test.ts deleted file mode 100644 index 5e472b8b99..0000000000 --- a/packages/cli/src/__tests__/error-reporting.test.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { GlobalConfig } from '@n8n/config'; -import type { ClientOptions, ErrorEvent } from '@sentry/types'; -import { strict as assert } from 'node:assert'; -import { Container } from 'typedi'; - -import { InternalServerError } from '@/errors/response-errors/internal-server.error'; - -const init = jest.fn(); - -jest.mock('@sentry/integrations'); -jest.mock('@sentry/node', () => ({ - init, - setTag: jest.fn(), - captureException: jest.fn(), - Integrations: {}, -})); - -jest.spyOn(process, 'on'); - -describe('initErrorHandling', () => { - let beforeSend: ClientOptions['beforeSend']; - - beforeAll(async () => { - Container.get(GlobalConfig).sentry.backendDsn = 'backend-dsn'; - const errorReporting = require('@/error-reporting'); - await errorReporting.initErrorHandling(); - const options = (init.mock.calls[0] as [ClientOptions])[0]; - beforeSend = options.beforeSend; - }); - - it('ignores errors with level warning', async () => { - const originalException = new InternalServerError('test'); - originalException.level = 'warning'; - - const event = {} as ErrorEvent; - - assert(beforeSend); - expect(await beforeSend(event, { originalException })).toEqual(null); - }); - - it('keeps events with a cause with error level', async () => { - const cause = new Error('cause-error'); - - const originalException = new InternalServerError('test', cause); - const event = {} as ErrorEvent; - - assert(beforeSend); - expect(await beforeSend(event, { originalException })).toEqual(event); - }); - - it('ignores events with error cause with warning level', async () => { - const cause: Error & { level?: 'warning' } = new Error('cause-error'); - cause.level = 'warning'; - - const originalException = new InternalServerError('test', cause); - const event = {} as ErrorEvent; - - assert(beforeSend); - expect(await beforeSend(event, { originalException })).toEqual(null); - }); -}); diff --git a/packages/cli/src/__tests__/license.test.ts b/packages/cli/src/__tests__/license.test.ts index d33d7c37cf..aa0aba1d53 100644 --- a/packages/cli/src/__tests__/license.test.ts +++ b/packages/cli/src/__tests__/license.test.ts @@ -38,7 +38,7 @@ describe('License', () => { license: licenseConfig, multiMainSetup: { enabled: false }, }); - license = new License(mockLogger(), instanceSettings, mock(), mock(), mock(), globalConfig); + license = new License(mockLogger(), instanceSettings, mock(), mock(), globalConfig); await license.init(); }); @@ -70,7 +70,6 @@ describe('License', () => { mock({ instanceType: 'worker' }), mock(), mock(), - mock(), mock({ license: licenseConfig }), ); await license.init(); @@ -211,7 +210,6 @@ describe('License', () => { mock({ instanceType: 'main' }), mock(), mock(), - mock(), globalConfig, ).init(); @@ -229,7 +227,6 @@ describe('License', () => { mock(), mock(), mock(), - mock(), ).init(); expect(LicenseManager).toHaveBeenCalledWith( @@ -250,7 +247,7 @@ describe('License', () => { }); config.set('multiMainSetup.instanceType', status); - await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); + await new License(mockLogger(), mock(), mock(), mock(), globalConfig).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -267,7 +264,7 @@ describe('License', () => { }); config.set('multiMainSetup.instanceType', status); - await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); + await new License(mockLogger(), mock(), mock(), mock(), globalConfig).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -281,7 +278,7 @@ describe('License', () => { }); config.set('multiMainSetup.instanceType', 'leader'); - await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); + await new License(mockLogger(), mock(), mock(), mock(), globalConfig).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }), @@ -293,7 +290,7 @@ describe('License', () => { describe('reinit', () => { it('should reinitialize license manager', async () => { - const license = new License(mockLogger(), mock(), mock(), mock(), mock(), mock()); + const license = new License(mockLogger(), mock(), mock(), mock(), mock()); await license.init(); const initSpy = jest.spyOn(license, 'init'); diff --git a/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts b/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts index bcf485445f..75aa602301 100644 --- a/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts +++ b/packages/cli/src/__tests__/load-nodes-and-credentials.test.ts @@ -1,5 +1,7 @@ import { mock } from 'jest-mock-extended'; import type { DirectoryLoader } from 'n8n-core'; +import type { INodeProperties, INodeTypeDescription } from 'n8n-workflow'; +import { NodeConnectionType } from 'n8n-workflow'; import { LoadNodesAndCredentials } from '../load-nodes-and-credentials'; @@ -8,7 +10,7 @@ describe('LoadNodesAndCredentials', () => { let instance: LoadNodesAndCredentials; beforeEach(() => { - instance = new LoadNodesAndCredentials(mock(), mock(), mock()); + instance = new LoadNodesAndCredentials(mock(), mock(), mock(), mock()); instance.loaders.package1 = mock({ directory: '/icons/package1', }); @@ -34,4 +36,179 @@ describe('LoadNodesAndCredentials', () => { expect(result).toBeUndefined(); }); }); + + describe('convertNodeToAiTool', () => { + const instance = new LoadNodesAndCredentials(mock(), mock(), mock(), mock()); + + let fullNodeWrapper: { description: INodeTypeDescription }; + + beforeEach(() => { + fullNodeWrapper = { + description: { + displayName: 'Test Node', + name: 'testNode', + group: ['test'], + description: 'A test node', + version: 1, + defaults: {}, + inputs: [NodeConnectionType.Main], + outputs: [NodeConnectionType.Main], + properties: [], + }, + }; + }); + + it('should modify the name and displayName correctly', () => { + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.name).toBe('testNodeTool'); + expect(result.description.displayName).toBe('Test Node Tool'); + }); + + it('should update inputs and outputs', () => { + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.inputs).toEqual([]); + expect(result.description.outputs).toEqual([NodeConnectionType.AiTool]); + }); + + it('should remove the usableAsTool property', () => { + fullNodeWrapper.description.usableAsTool = true; + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.usableAsTool).toBeUndefined(); + }); + + it("should add toolDescription property if it doesn't exist", () => { + const result = instance.convertNodeToAiTool(fullNodeWrapper); + const toolDescriptionProp = result.description.properties.find( + (prop) => prop.name === 'toolDescription', + ); + expect(toolDescriptionProp).toBeDefined(); + expect(toolDescriptionProp?.type).toBe('string'); + expect(toolDescriptionProp?.default).toBe(fullNodeWrapper.description.description); + }); + + it('should set codex categories correctly', () => { + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.codex).toEqual({ + categories: ['AI'], + subcategories: { + AI: ['Tools'], + Tools: ['Other Tools'], + }, + resources: {}, + }); + }); + + it('should preserve existing properties', () => { + const existingProp: INodeProperties = { + displayName: 'Existing Prop', + name: 'existingProp', + type: 'string', + default: 'test', + }; + fullNodeWrapper.description.properties = [existingProp]; + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.properties).toHaveLength(3); // Existing prop + toolDescription + notice + expect(result.description.properties).toContainEqual(existingProp); + }); + + it('should handle nodes with resource property', () => { + const resourceProp: INodeProperties = { + displayName: 'Resource', + name: 'resource', + type: 'options', + options: [{ name: 'User', value: 'user' }], + default: 'user', + }; + fullNodeWrapper.description.properties = [resourceProp]; + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.properties[1].name).toBe('descriptionType'); + expect(result.description.properties[2].name).toBe('toolDescription'); + expect(result.description.properties[3]).toEqual(resourceProp); + }); + + it('should handle nodes with operation property', () => { + const operationProp: INodeProperties = { + displayName: 'Operation', + name: 'operation', + type: 'options', + options: [{ name: 'Create', value: 'create' }], + default: 'create', + }; + fullNodeWrapper.description.properties = [operationProp]; + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.properties[1].name).toBe('descriptionType'); + expect(result.description.properties[2].name).toBe('toolDescription'); + expect(result.description.properties[3]).toEqual(operationProp); + }); + + it('should handle nodes with both resource and operation properties', () => { + const resourceProp: INodeProperties = { + displayName: 'Resource', + name: 'resource', + type: 'options', + options: [{ name: 'User', value: 'user' }], + default: 'user', + }; + const operationProp: INodeProperties = { + displayName: 'Operation', + name: 'operation', + type: 'options', + options: [{ name: 'Create', value: 'create' }], + default: 'create', + }; + fullNodeWrapper.description.properties = [resourceProp, operationProp]; + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.properties[1].name).toBe('descriptionType'); + expect(result.description.properties[2].name).toBe('toolDescription'); + expect(result.description.properties[3]).toEqual(resourceProp); + expect(result.description.properties[4]).toEqual(operationProp); + }); + + it('should handle nodes with empty properties', () => { + fullNodeWrapper.description.properties = []; + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.properties).toHaveLength(2); + expect(result.description.properties[1].name).toBe('toolDescription'); + }); + + it('should handle nodes with existing codex property', () => { + fullNodeWrapper.description.codex = { + categories: ['Existing'], + subcategories: { + Existing: ['Category'], + }, + resources: { + primaryDocumentation: [{ url: 'https://example.com' }], + }, + }; + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.codex).toEqual({ + categories: ['AI'], + subcategories: { + AI: ['Tools'], + Tools: ['Other Tools'], + }, + resources: { + primaryDocumentation: [{ url: 'https://example.com' }], + }, + }); + }); + + it('should handle nodes with very long names', () => { + fullNodeWrapper.description.name = 'veryLongNodeNameThatExceedsNormalLimits'.repeat(10); + fullNodeWrapper.description.displayName = + 'Very Long Node Name That Exceeds Normal Limits'.repeat(10); + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.name.endsWith('Tool')).toBe(true); + expect(result.description.displayName.endsWith('Tool')).toBe(true); + }); + + it('should handle nodes with special characters in name and displayName', () => { + fullNodeWrapper.description.name = 'special@#$%Node'; + fullNodeWrapper.description.displayName = 'Special @#$% Node'; + const result = instance.convertNodeToAiTool(fullNodeWrapper); + expect(result.description.name).toBe('special@#$%NodeTool'); + expect(result.description.displayName).toBe('Special @#$% Node Tool'); + }); + }); }); diff --git a/packages/cli/src/__tests__/workflow-helpers.test.ts b/packages/cli/src/__tests__/manual-execution.service.test.ts similarity index 69% rename from packages/cli/src/__tests__/workflow-helpers.test.ts rename to packages/cli/src/__tests__/manual-execution.service.test.ts index e24cfa1f68..383a8dc87c 100644 --- a/packages/cli/src/__tests__/workflow-helpers.test.ts +++ b/packages/cli/src/__tests__/manual-execution.service.test.ts @@ -1,8 +1,11 @@ +import { mock } from 'jest-mock-extended'; import type { Workflow, IWorkflowExecutionDataProcess } from 'n8n-workflow'; -import { getExecutionStartNode } from '@/workflow-helpers'; +import { ManualExecutionService } from '@/manual-execution.service'; + +describe('ManualExecutionService', () => { + const manualExecutionService = new ManualExecutionService(mock()); -describe('WorkflowHelpers', () => { describe('getExecutionStartNode', () => { it('Should return undefined', () => { const data = { @@ -16,9 +19,10 @@ describe('WorkflowHelpers', () => { }; }, } as unknown as Workflow; - const executionStartNode = getExecutionStartNode(data, workflow); + const executionStartNode = manualExecutionService.getExecutionStartNode(data, workflow); expect(executionStartNode).toBeUndefined(); }); + it('Should return startNode', () => { const data = { pinData: { @@ -37,7 +41,7 @@ describe('WorkflowHelpers', () => { return undefined; }, } as unknown as Workflow; - const executionStartNode = getExecutionStartNode(data, workflow); + const executionStartNode = manualExecutionService.getExecutionStartNode(data, workflow); expect(executionStartNode).toEqual({ name: 'node2', }); diff --git a/packages/cli/src/__tests__/node-types.test.ts b/packages/cli/src/__tests__/node-types.test.ts index 11e2c5ba2b..78d0c5e18a 100644 --- a/packages/cli/src/__tests__/node-types.test.ts +++ b/packages/cli/src/__tests__/node-types.test.ts @@ -1,95 +1,114 @@ import { mock } from 'jest-mock-extended'; -import type { INodeType, IVersionedNodeType } from 'n8n-workflow'; +import { UnrecognizedNodeTypeError } from 'n8n-core'; +import type { + LoadedClass, + INodeType, + IVersionedNodeType, + INodeTypeDescription, +} from 'n8n-workflow'; -import type { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; - -import { NodeTypes } from '../node-types'; +import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; +import { NodeTypes } from '@/node-types'; describe('NodeTypes', () => { - let nodeTypes: NodeTypes; const loadNodesAndCredentials = mock(); + const nodeTypes: NodeTypes = new NodeTypes(loadNodesAndCredentials); + + const nonVersionedNode: LoadedClass = { + sourcePath: '', + type: { + description: mock({ + name: 'n8n-nodes-base.nonVersioned', + usableAsTool: undefined, + }), + }, + }; + const v1Node = mock(); + const v2Node = mock(); + const versionedNode: LoadedClass = { + sourcePath: '', + type: { + description: mock({ + name: 'n8n-nodes-base.versioned', + }), + currentVersion: 2, + nodeVersions: { + 1: v1Node, + 2: v2Node, + }, + getNodeType(version) { + if (version === 1) return v1Node; + return v2Node; + }, + }, + }; + const toolSupportingNode: LoadedClass = { + sourcePath: '', + type: { + description: mock({ + name: 'n8n-nodes-base.testNode', + displayName: 'TestNode', + usableAsTool: true, + properties: [], + }), + }, + }; + + loadNodesAndCredentials.getNode.mockImplementation((fullNodeType) => { + const [packageName, nodeType] = fullNodeType.split('.'); + if (nodeType === 'nonVersioned') return nonVersionedNode; + if (nodeType === 'versioned') return versionedNode; + if (nodeType === 'testNode') return toolSupportingNode; + throw new UnrecognizedNodeTypeError(packageName, nodeType); + }); + beforeEach(() => { jest.clearAllMocks(); - nodeTypes = new NodeTypes(loadNodesAndCredentials); + }); + + describe('getByName', () => { + it('should return node type when it exists', () => { + const result = nodeTypes.getByName('n8n-nodes-base.nonVersioned'); + expect(result).toBe(nonVersionedNode.type); + }); }); describe('getByNameAndVersion', () => { - const nodeTypeName = 'n8n-nodes-base.testNode'; + it('should throw an error if the package does not exist', () => { + expect(() => nodeTypes.getByNameAndVersion('invalid-package.unknownNode')).toThrow( + 'Unrecognized node type: invalid-package.unknownNode', + ); + }); it('should throw an error if the node-type does not exist', () => { - const nodeTypeName = 'unknownNode'; - - // @ts-expect-error overwriting a readonly property - loadNodesAndCredentials.loadedNodes = {}; - // @ts-expect-error overwriting a readonly property - loadNodesAndCredentials.knownNodes = {}; - - expect(() => nodeTypes.getByNameAndVersion(nodeTypeName)).toThrow( - 'Unrecognized node type: unknownNode', + expect(() => nodeTypes.getByNameAndVersion('n8n-nodes-base.unknownNode')).toThrow( + 'Unrecognized node type: n8n-nodes-base.unknownNode', ); }); it('should return a regular node-type without version', () => { - const nodeType = mock(); - - // @ts-expect-error overwriting a readonly property - loadNodesAndCredentials.loadedNodes = { - [nodeTypeName]: { type: nodeType }, - }; - - const result = nodeTypes.getByNameAndVersion(nodeTypeName); - - expect(result).toEqual(nodeType); + const result = nodeTypes.getByNameAndVersion('n8n-nodes-base.nonVersioned'); + expect(result).toBe(nonVersionedNode.type); }); it('should return a regular node-type with version', () => { - const nodeTypeV1 = mock(); - const nodeType = mock({ - nodeVersions: { 1: nodeTypeV1 }, - getNodeType: () => nodeTypeV1, - }); - - // @ts-expect-error overwriting a readonly property - loadNodesAndCredentials.loadedNodes = { - [nodeTypeName]: { type: nodeType }, - }; - - const result = nodeTypes.getByNameAndVersion(nodeTypeName); - - expect(result).toEqual(nodeTypeV1); + const result = nodeTypes.getByNameAndVersion('n8n-nodes-base.versioned'); + expect(result).toBe(v2Node); }); it('should throw when a node-type is requested as tool, but does not support being used as one', () => { - const nodeType = mock(); - - // @ts-expect-error overwriting a readonly property - loadNodesAndCredentials.loadedNodes = { - [nodeTypeName]: { type: nodeType }, - }; - - expect(() => nodeTypes.getByNameAndVersion(`${nodeTypeName}Tool`)).toThrow( + expect(() => nodeTypes.getByNameAndVersion('n8n-nodes-base.nonVersionedTool')).toThrow( 'Node cannot be used as a tool', ); }); it('should return the tool node-type when requested as tool', () => { - const nodeType = mock(); - // @ts-expect-error can't use a mock here - nodeType.description = { - name: nodeTypeName, - displayName: 'TestNode', - usableAsTool: true, - properties: [], - }; - - // @ts-expect-error overwriting a readonly property - loadNodesAndCredentials.loadedNodes = { - [nodeTypeName]: { type: nodeType }, - }; - - const result = nodeTypes.getByNameAndVersion(`${nodeTypeName}Tool`); - expect(result).not.toEqual(nodeType); + // @ts-expect-error don't mock convertNodeToAiTool for now + loadNodesAndCredentials.convertNodeToAiTool = + LoadNodesAndCredentials.prototype.convertNodeToAiTool; + const result = nodeTypes.getByNameAndVersion('n8n-nodes-base.testNodeTool'); + expect(result).not.toEqual(toolSupportingNode); expect(result.description.name).toEqual('n8n-nodes-base.testNodeTool'); expect(result.description.displayName).toEqual('TestNode Tool'); expect(result.description.codex?.categories).toContain('AI'); @@ -97,4 +116,47 @@ describe('NodeTypes', () => { expect(result.description.outputs).toEqual(['ai_tool']); }); }); + + describe('getWithSourcePath', () => { + it('should return description and source path for existing node', () => { + const result = nodeTypes.getWithSourcePath('n8n-nodes-base.nonVersioned', 1); + expect(result).toHaveProperty('description'); + expect(result).toHaveProperty('sourcePath'); + expect(result.sourcePath).toBe(nonVersionedNode.sourcePath); + }); + + it('should throw error for non-existent node', () => { + expect(() => nodeTypes.getWithSourcePath('n8n-nodes-base.nonExistent', 1)).toThrow( + 'Unrecognized node type: n8n-nodes-base.nonExistent', + ); + }); + }); + + describe('getKnownTypes', () => { + it('should return known node types', () => { + // @ts-expect-error readonly property + loadNodesAndCredentials.knownNodes = ['n8n-nodes-base.nonVersioned']; + const result = nodeTypes.getKnownTypes(); + expect(result).toEqual(['n8n-nodes-base.nonVersioned']); + }); + }); + + describe('getNodeTypeDescriptions', () => { + it('should return descriptions for valid node types', () => { + const nodeTypes = new NodeTypes(loadNodesAndCredentials); + const result = nodeTypes.getNodeTypeDescriptions([ + { name: 'n8n-nodes-base.nonVersioned', version: 1 }, + ]); + + expect(result).toHaveLength(1); + expect(result[0].name).toBe('n8n-nodes-base.nonVersioned'); + }); + + it('should throw error for invalid node type', () => { + const nodeTypes = new NodeTypes(loadNodesAndCredentials); + expect(() => + nodeTypes.getNodeTypeDescriptions([{ name: 'n8n-nodes-base.nonExistent', version: 1 }]), + ).toThrow('Unrecognized node type: n8n-nodes-base.nonExistent'); + }); + }); }); diff --git a/packages/cli/src/__tests__/wait-tracker.test.ts b/packages/cli/src/__tests__/wait-tracker.test.ts index 49e8517272..6721c31bae 100644 --- a/packages/cli/src/__tests__/wait-tracker.test.ts +++ b/packages/cli/src/__tests__/wait-tracker.test.ts @@ -1,7 +1,9 @@ import { mock } from 'jest-mock-extended'; import type { InstanceSettings } from 'n8n-core'; -import type { IWorkflowBase } from 'n8n-workflow'; +import type { IRun, IWorkflowBase } from 'n8n-workflow'; +import { createDeferredPromise } from 'n8n-workflow'; +import type { ActiveExecutions } from '@/active-executions'; import type { Project } from '@/databases/entities/project'; import type { ExecutionRepository } from '@/databases/repositories/execution.repository'; import type { IExecutionResponse } from '@/interfaces'; @@ -12,15 +14,16 @@ import { WaitTracker } from '@/wait-tracker'; import type { WorkflowRunner } from '@/workflow-runner'; import { mockLogger } from '@test/mocking'; -jest.useFakeTimers(); +jest.useFakeTimers({ advanceTimers: true }); describe('WaitTracker', () => { + const activeExecutions = mock(); const ownershipService = mock(); const workflowRunner = mock(); const executionRepository = mock(); const multiMainSetup = mock(); const orchestrationService = new OrchestrationService(mock(), multiMainSetup, mock()); - const instanceSettings = mock({ isLeader: true }); + const instanceSettings = mock({ isLeader: true, isMultiMain: false }); const project = mock({ id: 'projectId' }); const execution = mock({ @@ -30,6 +33,7 @@ describe('WaitTracker', () => { mode: 'manual', data: mock({ pushRef: 'push_ref', + parentExecution: undefined, }), }); execution.workflowData = mock({ id: 'abcd' }); @@ -40,6 +44,7 @@ describe('WaitTracker', () => { mockLogger(), executionRepository, ownershipService, + activeExecutions, workflowRunner, orchestrationService, instanceSettings, @@ -80,7 +85,9 @@ describe('WaitTracker', () => { let startExecutionSpy: jest.SpyInstance, [executionId: string]>; beforeEach(() => { - executionRepository.findSingleExecution.mockResolvedValue(execution); + executionRepository.findSingleExecution + .calledWith(execution.id) + .mockResolvedValue(execution); executionRepository.getWaitingExecutions.mockResolvedValue([execution]); ownershipService.getWorkflowProjectCached.mockResolvedValue(project); @@ -110,13 +117,17 @@ describe('WaitTracker', () => { }); describe('startExecution()', () => { - it('should query for execution to start', async () => { + beforeEach(() => { executionRepository.getWaitingExecutions.mockResolvedValue([]); waitTracker.init(); - executionRepository.findSingleExecution.mockResolvedValue(execution); + executionRepository.findSingleExecution.calledWith(execution.id).mockResolvedValue(execution); ownershipService.getWorkflowProjectCached.mockResolvedValue(project); + execution.data.parentExecution = undefined; + }); + + it('should query for execution to start', async () => { await waitTracker.startExecution(execution.id); expect(executionRepository.findSingleExecution).toHaveBeenCalledWith(execution.id, { @@ -137,6 +148,65 @@ describe('WaitTracker', () => { execution.id, ); }); + + it('should also resume parent execution once sub-workflow finishes', async () => { + const parentExecution = mock({ + id: 'parent_execution_id', + finished: false, + }); + parentExecution.workflowData = mock({ id: 'parent_workflow_id' }); + execution.data.parentExecution = { + executionId: parentExecution.id, + workflowId: parentExecution.workflowData.id, + }; + executionRepository.findSingleExecution + .calledWith(parentExecution.id) + .mockResolvedValue(parentExecution); + const postExecutePromise = createDeferredPromise(); + activeExecutions.getPostExecutePromise + .calledWith(execution.id) + .mockReturnValue(postExecutePromise.promise); + + await waitTracker.startExecution(execution.id); + + expect(executionRepository.findSingleExecution).toHaveBeenNthCalledWith(1, execution.id, { + includeData: true, + unflattenData: true, + }); + + expect(workflowRunner.run).toHaveBeenCalledTimes(1); + expect(workflowRunner.run).toHaveBeenNthCalledWith( + 1, + { + executionMode: execution.mode, + executionData: execution.data, + workflowData: execution.workflowData, + projectId: project.id, + pushRef: execution.data.pushRef, + }, + false, + false, + execution.id, + ); + + postExecutePromise.resolve(mock()); + await jest.advanceTimersByTimeAsync(100); + + expect(workflowRunner.run).toHaveBeenCalledTimes(2); + expect(workflowRunner.run).toHaveBeenNthCalledWith( + 2, + { + executionMode: parentExecution.mode, + executionData: parentExecution.data, + workflowData: parentExecution.workflowData, + projectId: project.id, + pushRef: parentExecution.data.pushRef, + }, + false, + false, + parentExecution.id, + ); + }); }); describe('single-main setup', () => { @@ -151,8 +221,6 @@ describe('WaitTracker', () => { describe('multi-main setup', () => { it('should start tracking if leader', () => { - jest.spyOn(orchestrationService, 'isSingleMainSetup', 'get').mockReturnValue(false); - executionRepository.getWaitingExecutions.mockResolvedValue([]); waitTracker.init(); @@ -165,11 +233,11 @@ describe('WaitTracker', () => { mockLogger(), executionRepository, ownershipService, + activeExecutions, workflowRunner, orchestrationService, - mock({ isLeader: false }), + mock({ isLeader: false, isMultiMain: false }), ); - jest.spyOn(orchestrationService, 'isSingleMainSetup', 'get').mockReturnValue(false); executionRepository.getWaitingExecutions.mockResolvedValue([]); diff --git a/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts b/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts index d0aeb3111f..e7d94d3e34 100644 --- a/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts +++ b/packages/cli/src/__tests__/workflow-execute-additional-data.test.ts @@ -1,11 +1,11 @@ import { mock } from 'jest-mock-extended'; import type { IWorkflowBase } from 'n8n-workflow'; -import { - type IExecuteWorkflowInfo, - type IWorkflowExecuteAdditionalData, - type ExecuteWorkflowOptions, - type IRun, - type INodeExecutionData, +import type { + IExecuteWorkflowInfo, + IWorkflowExecuteAdditionalData, + ExecuteWorkflowOptions, + IRun, + INodeExecutionData, } from 'n8n-workflow'; import type PCancelable from 'p-cancelable'; import Container from 'typedi'; @@ -50,6 +50,7 @@ const getMockRun = ({ lastNodeOutput }: { lastNodeOutput: Array @@ -114,7 +115,9 @@ describe('WorkflowExecuteAdditionalData', () => { }); describe('executeWorkflow', () => { - const runWithData = getMockRun({ lastNodeOutput: [[{ json: { test: 1 } }]] }); + const runWithData = getMockRun({ + lastNodeOutput: [[{ json: { test: 1 } }]], + }); beforeEach(() => { workflowRepository.get.mockResolvedValue( @@ -159,6 +162,23 @@ describe('WorkflowExecuteAdditionalData', () => { expect(executionRepository.setRunning).toHaveBeenCalledWith(EXECUTION_ID); }); + + it('should return waitTill property when workflow execution is waiting', async () => { + const waitTill = new Date(); + runWithData.waitTill = waitTill; + + const response = await executeWorkflow( + mock(), + mock(), + mock({ loadedWorkflowData: undefined, doNotWaitToFinish: false }), + ); + + expect(response).toEqual({ + data: runWithData.data.resultData.runData[LAST_NODE_EXECUTED][0].data!.main, + executionId: EXECUTION_ID, + waitTill, + }); + }); }); describe('getRunData', () => { @@ -230,6 +250,10 @@ describe('WorkflowExecuteAdditionalData', () => { waitingExecution: {}, waitingExecutionSource: {}, }, + parentExecution: { + executionId: '123', + workflowId: '567', + }, resultData: { runData: {} }, startData: {}, }, diff --git a/packages/cli/src/abstract-server.ts b/packages/cli/src/abstract-server.ts index f440b2879a..f4a8a5b2cc 100644 --- a/packages/cli/src/abstract-server.ts +++ b/packages/cli/src/abstract-server.ts @@ -94,11 +94,8 @@ export abstract class AbstractServer { const { app } = this; // Augment errors sent to Sentry - const { - Handlers: { requestHandler, errorHandler }, - } = await import('@sentry/node'); - app.use(requestHandler()); - app.use(errorHandler()); + const { setupExpressErrorHandler } = await import('@sentry/node'); + setupExpressErrorHandler(app); } private setupCommonMiddlewares() { diff --git a/packages/cli/src/active-workflow-manager.ts b/packages/cli/src/active-workflow-manager.ts index 22cc0f5700..6ef3753af7 100644 --- a/packages/cli/src/active-workflow-manager.ts +++ b/packages/cli/src/active-workflow-manager.ts @@ -1,9 +1,8 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ - import { ActiveWorkflows, + ErrorReporter, InstanceSettings, - NodeExecuteFunctions, PollContext, TriggerContext, } from 'n8n-core'; @@ -25,7 +24,6 @@ import type { import { Workflow, WorkflowActivationError, - ErrorReporterProxy as ErrorReporter, WebhookPathTakenError, ApplicationError, } from 'n8n-workflow'; @@ -41,10 +39,12 @@ import { import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { OnShutdown } from '@/decorators/on-shutdown'; +import { ExecutionService } from '@/executions/execution.service'; import { ExternalHooks } from '@/external-hooks'; import type { IWorkflowDb } from '@/interfaces'; import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; +import { Publisher } from '@/scaling/pubsub/publisher.service'; import { ActiveWorkflowsService } from '@/services/active-workflows.service'; import { OrchestrationService } from '@/services/orchestration.service'; import * as WebhookHelpers from '@/webhooks/webhook-helpers'; @@ -53,9 +53,6 @@ import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-da import { WorkflowExecutionService } from '@/workflows/workflow-execution.service'; import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service'; -import { ExecutionService } from './executions/execution.service'; -import { Publisher } from './scaling/pubsub/publisher.service'; - interface QueuedActivation { activationMode: WorkflowActivateMode; lastTimeout: number; @@ -69,6 +66,7 @@ export class ActiveWorkflowManager { constructor( private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly activeWorkflows: ActiveWorkflows, private readonly activeExecutions: ActiveExecutions, private readonly externalHooks: ExternalHooks, @@ -186,12 +184,7 @@ export class ActiveWorkflowManager { try { // TODO: this should happen in a transaction, that way we don't need to manually remove this in `catch` await this.webhookService.storeWebhook(webhook); - await workflow.createWebhookIfNotExists( - webhookData, - NodeExecuteFunctions, - mode, - activation, - ); + await this.webhookService.createWebhookIfNotExists(workflow, webhookData, mode, activation); } catch (error) { if (activation === 'init' && error.name === 'QueryFailedError') { // n8n does not remove the registered webhooks on exit. @@ -205,7 +198,7 @@ export class ActiveWorkflowManager { try { await this.clearWebhooks(workflow.id); } catch (error1) { - ErrorReporter.error(error1); + this.errorReporter.error(error1); this.logger.error( `Could not remove webhooks of workflow "${workflow.id}" because of error: "${error1.message}"`, ); @@ -261,7 +254,7 @@ export class ActiveWorkflowManager { const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData, undefined, true); for (const webhookData of webhooks) { - await workflow.deleteWebhook(webhookData, NodeExecuteFunctions, mode, 'update'); + await this.webhookService.deleteWebhook(workflow, webhookData, mode, 'update'); } await this.workflowStaticDataService.saveStaticData(workflow); @@ -439,7 +432,7 @@ export class ActiveWorkflowManager { this.logger.info(' => Started'); } } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); this.logger.info( ' => ERROR: Workflow could not be activated on first try, keep on trying if not an auth issue', ); @@ -511,7 +504,7 @@ export class ActiveWorkflowManager { existingWorkflow?: WorkflowEntity, { shouldPublish } = { shouldPublish: true }, ) { - if (this.orchestrationService.isMultiMainSetupEnabled && shouldPublish) { + if (this.instanceSettings.isMultiMain && shouldPublish) { void this.publisher.publishCommand({ command: 'add-webhooks-triggers-and-pollers', payload: { workflowId }, @@ -557,7 +550,7 @@ export class ActiveWorkflowManager { settings: dbWorkflow.settings, }); - const canBeActivated = workflow.checkIfWorkflowCanBeActivated(STARTING_NODES); + const canBeActivated = this.checkIfWorkflowCanBeActivated(workflow, STARTING_NODES); if (!canBeActivated) { throw new WorkflowActivationError( @@ -601,6 +594,48 @@ export class ActiveWorkflowManager { return shouldDisplayActivationMessage; } + /** + * A workflow can only be activated if it has a node which has either triggers + * or webhooks defined. + * + * @param {string[]} [ignoreNodeTypes] Node-types to ignore in the check + */ + checkIfWorkflowCanBeActivated(workflow: Workflow, ignoreNodeTypes?: string[]): boolean { + let node: INode; + let nodeType: INodeType | undefined; + + for (const nodeName of Object.keys(workflow.nodes)) { + node = workflow.nodes[nodeName]; + + if (node.disabled === true) { + // Deactivated nodes can not trigger a run so ignore + continue; + } + + if (ignoreNodeTypes !== undefined && ignoreNodeTypes.includes(node.type)) { + continue; + } + + nodeType = this.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + + if (nodeType === undefined) { + // Type is not known so check is not possible + continue; + } + + if ( + nodeType.poll !== undefined || + nodeType.trigger !== undefined || + nodeType.webhook !== undefined + ) { + // Is a trigger node. So workflow can be activated. + return true; + } + } + + return false; + } + /** * Count all triggers in the workflow, excluding Manual Trigger. */ @@ -635,7 +670,7 @@ export class ActiveWorkflowManager { try { await this.add(workflowId, activationMode, workflowData); } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); let lastTimeout = this.queuedActivations[workflowId].lastTimeout; if (lastTimeout < WORKFLOW_REACTIVATE_MAX_TIMEOUT) { lastTimeout = Math.min(lastTimeout * 2, WORKFLOW_REACTIVATE_MAX_TIMEOUT); @@ -703,11 +738,11 @@ export class ActiveWorkflowManager { // TODO: this should happen in a transaction // maybe, see: https://github.com/n8n-io/n8n/pull/8904#discussion_r1530150510 async remove(workflowId: string) { - if (this.orchestrationService.isMultiMainSetupEnabled) { + if (this.instanceSettings.isMultiMain) { try { await this.clearWebhooks(workflowId); } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); this.logger.error( `Could not remove webhooks of workflow "${workflowId}" because of error: "${error.message}"`, ); @@ -724,7 +759,7 @@ export class ActiveWorkflowManager { try { await this.clearWebhooks(workflowId); } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); this.logger.error( `Could not remove webhooks of workflow "${workflowId}" because of error: "${error.message}"`, ); diff --git a/packages/cli/src/collaboration/collaboration.service.ts b/packages/cli/src/collaboration/collaboration.service.ts index cb2ca0d77a..ece93bd5b2 100644 --- a/packages/cli/src/collaboration/collaboration.service.ts +++ b/packages/cli/src/collaboration/collaboration.service.ts @@ -1,6 +1,7 @@ import type { PushPayload } from '@n8n/api-types'; +import { ErrorReporter } from 'n8n-core'; import type { Workflow } from 'n8n-workflow'; -import { ApplicationError, ErrorReporterProxy } from 'n8n-workflow'; +import { ApplicationError } from 'n8n-workflow'; import { Service } from 'typedi'; import { CollaborationState } from '@/collaboration/collaboration.state'; @@ -20,6 +21,7 @@ import { parseWorkflowMessage } from './collaboration.message'; @Service() export class CollaborationService { constructor( + private readonly errorReporter: ErrorReporter, private readonly push: Push, private readonly state: CollaborationState, private readonly userRepository: UserRepository, @@ -31,7 +33,7 @@ export class CollaborationService { try { await this.handleUserMessage(event.userId, event.msg); } catch (error) { - ErrorReporterProxy.error( + this.errorReporter.error( new ApplicationError('Error handling CollaborationService push message', { extra: { msg: event.msg, diff --git a/packages/cli/src/commands/base-command.ts b/packages/cli/src/commands/base-command.ts index b8f15d9f33..286fec1de6 100644 --- a/packages/cli/src/commands/base-command.ts +++ b/packages/cli/src/commands/base-command.ts @@ -6,13 +6,9 @@ import { InstanceSettings, ObjectStoreService, DataDeduplicationService, + ErrorReporter, } from 'n8n-core'; -import { - ApplicationError, - ensureError, - ErrorReporterProxy as ErrorReporter, - sleep, -} from 'n8n-workflow'; +import { ApplicationError, ensureError, sleep } from 'n8n-workflow'; import { Container } from 'typedi'; import type { AbstractServer } from '@/abstract-server'; @@ -22,7 +18,6 @@ import * as CrashJournal from '@/crash-journal'; import * as Db from '@/db'; import { getDataDeduplicationService } from '@/deduplication'; import { DeprecationService } from '@/deprecation/deprecation.service'; -import { initErrorHandling } from '@/error-reporting'; import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay'; import { initExpressionEvaluator } from '@/expression-evaluator'; @@ -39,6 +34,8 @@ import { WorkflowHistoryManager } from '@/workflows/workflow-history/workflow-hi export abstract class BaseCommand extends Command { protected logger = Container.get(Logger); + protected errorReporter: ErrorReporter; + protected externalHooks?: ExternalHooks; protected nodeTypes: NodeTypes; @@ -63,7 +60,11 @@ export abstract class BaseCommand extends Command { protected needsCommunityPackages = false; async init(): Promise { - await initErrorHandling(); + this.errorReporter = Container.get(ErrorReporter); + await this.errorReporter.init( + this.instanceSettings.instanceType, + this.globalConfig.sentry.backendDsn, + ); initExpressionEvaluator(); process.once('SIGTERM', this.onTerminationSignal('SIGTERM')); @@ -130,7 +131,7 @@ export abstract class BaseCommand extends Command { } protected async exitWithCrash(message: string, error: unknown) { - ErrorReporter.error(new Error(message, { cause: error }), { level: 'fatal' }); + this.errorReporter.error(new Error(message, { cause: error }), { level: 'fatal' }); await sleep(2000); process.exit(1); } diff --git a/packages/cli/src/commands/execute-batch.ts b/packages/cli/src/commands/execute-batch.ts index a70717c40b..0b19e25652 100644 --- a/packages/cli/src/commands/execute-batch.ts +++ b/packages/cli/src/commands/execute-batch.ts @@ -4,7 +4,7 @@ import fs from 'fs'; import { diff } from 'json-diff'; import pick from 'lodash/pick'; import type { IRun, ITaskData, IWorkflowExecutionDataProcess } from 'n8n-workflow'; -import { ApplicationError, jsonParse, ErrorReporterProxy } from 'n8n-workflow'; +import { ApplicationError, jsonParse } from 'n8n-workflow'; import os from 'os'; import { sep } from 'path'; import { Container } from 'typedi'; @@ -822,7 +822,7 @@ export class ExecuteBatch extends BaseCommand { } } } catch (e) { - ErrorReporterProxy.error(e, { + this.errorReporter.error(e, { extra: { workflowId: workflowData.id, }, diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index 42b5df13e6..63ec3d9240 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -100,7 +100,7 @@ export class Start extends BaseCommand { await this.activeWorkflowManager.removeAllTriggerAndPollerBasedWorkflows(); - if (Container.get(OrchestrationService).isMultiMainSetupEnabled) { + if (this.instanceSettings.isMultiMain) { await Container.get(OrchestrationService).shutdown(); } @@ -192,6 +192,9 @@ export class Start extends BaseCommand { await super.init(); this.activeWorkflowManager = Container.get(ActiveWorkflowManager); + this.instanceSettings.setMultiMainEnabled( + config.getEnv('executions.mode') === 'queue' && this.globalConfig.multiMainSetup.enabled, + ); await this.initLicense(); await this.initOrchestration(); @@ -253,7 +256,7 @@ export class Start extends BaseCommand { this.logger.scoped(['scaling', 'pubsub']).debug('Pubsub setup completed'); - if (!orchestrationService.isMultiMainSetupEnabled) return; + if (this.instanceSettings.isSingleMain) return; orchestrationService.multiMainSetup .on('leader-stepdown', async () => { diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index 1891d8193d..54fa07e7f5 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -405,11 +405,4 @@ export const schema = { doc: 'Set this to 1 to enable the new partial execution logic by default.', }, }, - - virtualSchemaView: { - doc: 'Whether to display the virtualized schema view', - format: Boolean, - default: false, - env: 'N8N_VIRTUAL_SCHEMA_VIEW', - }, }; diff --git a/packages/cli/src/config/types.ts b/packages/cli/src/config/types.ts index 78f2358f5d..33fff9b946 100644 --- a/packages/cli/src/config/types.ts +++ b/packages/cli/src/config/types.ts @@ -80,6 +80,7 @@ type ExceptionPaths = { processedDataManager: IProcessedDataConfig; 'userManagement.isInstanceOwnerSetUp': boolean; 'ui.banners.dismissed': string[] | undefined; + easyAIWorkflowOnboarded: boolean | undefined; }; // ----------------------------------- diff --git a/packages/cli/src/credential-types.ts b/packages/cli/src/credential-types.ts index 24e2d9f2ba..a6d3f29eb0 100644 --- a/packages/cli/src/credential-types.ts +++ b/packages/cli/src/credential-types.ts @@ -1,13 +1,6 @@ -import { loadClassInIsolation } from 'n8n-core'; -import { - ApplicationError, - type ICredentialType, - type ICredentialTypes, - type LoadedClass, -} from 'n8n-workflow'; +import type { ICredentialType, ICredentialTypes } from 'n8n-workflow'; import { Service } from 'typedi'; -import { RESPONSE_ERROR_MESSAGES } from '@/constants'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; @Service() @@ -20,7 +13,7 @@ export class CredentialTypes implements ICredentialTypes { } getByName(credentialType: string): ICredentialType { - return this.getCredential(credentialType).type; + return this.loadNodesAndCredentials.getCredential(credentialType).type; } getSupportedNodes(type: string): string[] { @@ -39,21 +32,4 @@ export class CredentialTypes implements ICredentialTypes { } return extendsArr; } - - private getCredential(type: string): LoadedClass { - const { loadedCredentials, knownCredentials } = this.loadNodesAndCredentials; - if (type in loadedCredentials) { - return loadedCredentials[type]; - } - - if (type in knownCredentials) { - const { className, sourcePath } = knownCredentials[type]; - const loaded: ICredentialType = loadClassInIsolation(sourcePath, className); - loadedCredentials[type] = { sourcePath, type: loaded }; - return loadedCredentials[type]; - } - throw new ApplicationError(RESPONSE_ERROR_MESSAGES.NO_CREDENTIAL, { - tags: { credentialType: type }, - }); - } } diff --git a/packages/cli/src/databases/entities/test-definition.ee.ts b/packages/cli/src/databases/entities/test-definition.ee.ts index 77f8ca2bdc..a7cb393b5d 100644 --- a/packages/cli/src/databases/entities/test-definition.ee.ts +++ b/packages/cli/src/databases/entities/test-definition.ee.ts @@ -5,7 +5,12 @@ import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity. import type { TestMetric } from '@/databases/entities/test-metric.ee'; import { WorkflowEntity } from '@/databases/entities/workflow-entity'; -import { WithTimestampsAndStringId } from './abstract-entity'; +import { jsonColumnType, WithTimestampsAndStringId } from './abstract-entity'; + +// Entity representing a node in a workflow under test, for which data should be mocked during test execution +export type MockedNodeItem = { + name: string; +}; /** * Entity representing a Test Definition @@ -27,6 +32,9 @@ export class TestDefinition extends WithTimestampsAndStringId { @Column('text') description: string; + @Column(jsonColumnType, { default: '[]' }) + mockedNodes: MockedNodeItem[]; + /** * Relation to the workflow under test */ diff --git a/packages/cli/src/databases/migrations/common/1733133775640-AddMockedNodesColumnToTestDefinition.ts b/packages/cli/src/databases/migrations/common/1733133775640-AddMockedNodesColumnToTestDefinition.ts new file mode 100644 index 0000000000..09ce45722c --- /dev/null +++ b/packages/cli/src/databases/migrations/common/1733133775640-AddMockedNodesColumnToTestDefinition.ts @@ -0,0 +1,22 @@ +import type { MigrationContext, ReversibleMigration } from '@/databases/types'; + +// We have to use raw query migration instead of schemaBuilder helpers, +// because the typeorm schema builder implements addColumns by a table recreate for sqlite +// which causes weird issues with the migration +export class AddMockedNodesColumnToTestDefinition1733133775640 implements ReversibleMigration { + async up({ escape, runQuery }: MigrationContext) { + const tableName = escape.tableName('test_definition'); + const mockedNodesColumnName = escape.columnName('mockedNodes'); + + await runQuery( + `ALTER TABLE ${tableName} ADD COLUMN ${mockedNodesColumnName} JSON DEFAULT '[]' NOT NULL`, + ); + } + + async down({ escape, runQuery }: MigrationContext) { + const tableName = escape.tableName('test_definition'); + const columnName = escape.columnName('mockedNodes'); + + await runQuery(`ALTER TABLE ${tableName} DROP COLUMN ${columnName}`); + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/index.ts b/packages/cli/src/databases/migrations/mysqldb/index.ts index d962042333..b977f6b013 100644 --- a/packages/cli/src/databases/migrations/mysqldb/index.ts +++ b/packages/cli/src/databases/migrations/mysqldb/index.ts @@ -73,6 +73,7 @@ import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556- import { AddDescriptionToTestDefinition1731404028106 } from '../common/1731404028106-AddDescriptionToTestDefinition'; import { CreateTestMetricTable1732271325258 } from '../common/1732271325258-CreateTestMetricTable'; import { CreateTestRun1732549866705 } from '../common/1732549866705-CreateTestRunTable'; +import { AddMockedNodesColumnToTestDefinition1733133775640 } from '../common/1733133775640-AddMockedNodesColumnToTestDefinition'; export const mysqlMigrations: Migration[] = [ InitialMigration1588157391238, @@ -148,4 +149,5 @@ export const mysqlMigrations: Migration[] = [ MigrateTestDefinitionKeyToString1731582748663, CreateTestMetricTable1732271325258, CreateTestRun1732549866705, + AddMockedNodesColumnToTestDefinition1733133775640, ]; diff --git a/packages/cli/src/databases/migrations/postgresdb/index.ts b/packages/cli/src/databases/migrations/postgresdb/index.ts index 012b18e31d..985e6964e1 100644 --- a/packages/cli/src/databases/migrations/postgresdb/index.ts +++ b/packages/cli/src/databases/migrations/postgresdb/index.ts @@ -73,6 +73,7 @@ import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556- import { AddDescriptionToTestDefinition1731404028106 } from '../common/1731404028106-AddDescriptionToTestDefinition'; import { CreateTestMetricTable1732271325258 } from '../common/1732271325258-CreateTestMetricTable'; import { CreateTestRun1732549866705 } from '../common/1732549866705-CreateTestRunTable'; +import { AddMockedNodesColumnToTestDefinition1733133775640 } from '../common/1733133775640-AddMockedNodesColumnToTestDefinition'; export const postgresMigrations: Migration[] = [ InitialMigration1587669153312, @@ -148,4 +149,5 @@ export const postgresMigrations: Migration[] = [ MigrateTestDefinitionKeyToString1731582748663, CreateTestMetricTable1732271325258, CreateTestRun1732549866705, + AddMockedNodesColumnToTestDefinition1733133775640, ]; diff --git a/packages/cli/src/databases/migrations/sqlite/index.ts b/packages/cli/src/databases/migrations/sqlite/index.ts index 7c8fcbf86f..34d548b684 100644 --- a/packages/cli/src/databases/migrations/sqlite/index.ts +++ b/packages/cli/src/databases/migrations/sqlite/index.ts @@ -70,6 +70,7 @@ import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/172 import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable'; import { CreateTestMetricTable1732271325258 } from '../common/1732271325258-CreateTestMetricTable'; import { CreateTestRun1732549866705 } from '../common/1732549866705-CreateTestRunTable'; +import { AddMockedNodesColumnToTestDefinition1733133775640 } from '../common/1733133775640-AddMockedNodesColumnToTestDefinition'; const sqliteMigrations: Migration[] = [ InitialMigration1588102412422, @@ -142,6 +143,7 @@ const sqliteMigrations: Migration[] = [ MigrateTestDefinitionKeyToString1731582748663, CreateTestMetricTable1732271325258, CreateTestRun1732549866705, + AddMockedNodesColumnToTestDefinition1733133775640, ]; export { sqliteMigrations }; diff --git a/packages/cli/src/databases/repositories/execution.repository.ts b/packages/cli/src/databases/repositories/execution.repository.ts index 5bef675a79..fbcb7de445 100644 --- a/packages/cli/src/databases/repositories/execution.repository.ts +++ b/packages/cli/src/databases/repositories/execution.repository.ts @@ -21,12 +21,8 @@ import { import { DateUtils } from '@n8n/typeorm/util/DateUtils'; import { parse, stringify } from 'flatted'; import pick from 'lodash/pick'; -import { BinaryDataService } from 'n8n-core'; -import { - ExecutionCancelledError, - ErrorReporterProxy as ErrorReporter, - ApplicationError, -} from 'n8n-workflow'; +import { BinaryDataService, ErrorReporter } from 'n8n-core'; +import { ExecutionCancelledError, ApplicationError } from 'n8n-workflow'; import type { AnnotationVote, ExecutionStatus, @@ -125,6 +121,7 @@ export class ExecutionRepository extends Repository { dataSource: DataSource, private readonly globalConfig: GlobalConfig, private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly executionDataRepository: ExecutionDataRepository, private readonly binaryDataService: BinaryDataService, ) { @@ -209,7 +206,7 @@ export class ExecutionRepository extends Repository { reportInvalidExecutions(executions: ExecutionEntity[]) { if (executions.length === 0) return; - ErrorReporter.error( + this.errorReporter.error( new ApplicationError('Found executions without executionData', { extra: { executionIds: executions.map(({ id }) => id) }, }), diff --git a/packages/cli/src/databases/repositories/installed-packages.repository.ts b/packages/cli/src/databases/repositories/installed-packages.repository.ts index 50f9e7ad4f..77faf96817 100644 --- a/packages/cli/src/databases/repositories/installed-packages.repository.ts +++ b/packages/cli/src/databases/repositories/installed-packages.repository.ts @@ -35,7 +35,7 @@ export class InstalledPackagesRepository extends Repository { for (const loadedNode of loadedNodes) { const installedNode = this.installedNodesRepository.create({ name: nodeTypes[loadedNode.name].type.description.displayName, - type: loadedNode.name, + type: `${packageName}.${loadedNode.name}`, latestVersion: loadedNode.version, package: { packageName }, }); diff --git a/packages/cli/src/databases/repositories/settings.repository.ts b/packages/cli/src/databases/repositories/settings.repository.ts index 8d87fcffff..aa4410d6d4 100644 --- a/packages/cli/src/databases/repositories/settings.repository.ts +++ b/packages/cli/src/databases/repositories/settings.repository.ts @@ -1,5 +1,5 @@ import { DataSource, Repository } from '@n8n/typeorm'; -import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; +import { ErrorReporter } from 'n8n-core'; import { Service } from 'typedi'; import config from '@/config'; @@ -9,7 +9,10 @@ import { Settings } from '../entities/settings'; @Service() export class SettingsRepository extends Repository { - constructor(dataSource: DataSource) { + constructor( + dataSource: DataSource, + private readonly errorReporter: ErrorReporter, + ) { super(Settings, dataSource.manager); } @@ -49,7 +52,7 @@ export class SettingsRepository extends Repository { config.set(key, value); return { success: true }; } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); } return { success: false }; } diff --git a/packages/cli/src/databases/repositories/shared-workflow.repository.ts b/packages/cli/src/databases/repositories/shared-workflow.repository.ts index 31eef22e2a..8f4bedcb15 100644 --- a/packages/cli/src/databases/repositories/shared-workflow.repository.ts +++ b/packages/cli/src/databases/repositories/shared-workflow.repository.ts @@ -211,4 +211,13 @@ export class SharedWorkflowRepository extends Repository { }, }); } + + async getAllRelationsForWorkflows(workflowIds: string[]) { + return await this.find({ + where: { + workflowId: In(workflowIds), + }, + relations: ['project'], + }); + } } diff --git a/packages/cli/src/databases/repositories/workflow.repository.ts b/packages/cli/src/databases/repositories/workflow.repository.ts index 0317124472..5dcd369def 100644 --- a/packages/cli/src/databases/repositories/workflow.repository.ts +++ b/packages/cli/src/databases/repositories/workflow.repository.ts @@ -95,7 +95,8 @@ export class WorkflowRepository extends Repository { .execute(); } - async getMany(sharedWorkflowIds: string[], options?: ListQuery.Options) { + async getMany(sharedWorkflowIds: string[], originalOptions: ListQuery.Options = {}) { + const options = structuredClone(originalOptions); if (sharedWorkflowIds.length === 0) return { workflows: [], count: 0 }; if (typeof options?.filter?.projectId === 'string' && options.filter.projectId !== '') { diff --git a/packages/cli/src/databases/subscribers/user-subscriber.ts b/packages/cli/src/databases/subscribers/user-subscriber.ts index 2f9e698890..1c55572b14 100644 --- a/packages/cli/src/databases/subscribers/user-subscriber.ts +++ b/packages/cli/src/databases/subscribers/user-subscriber.ts @@ -1,6 +1,7 @@ import type { EntitySubscriberInterface, UpdateEvent } from '@n8n/typeorm'; import { EventSubscriber } from '@n8n/typeorm'; -import { ApplicationError, ErrorReporterProxy } from 'n8n-workflow'; +import { ErrorReporter } from 'n8n-core'; +import { ApplicationError } from 'n8n-workflow'; import { Container } from 'typedi'; import { Logger } from '@/logging/logger.service'; @@ -11,6 +12,8 @@ import { UserRepository } from '../repositories/user.repository'; @EventSubscriber() export class UserSubscriber implements EntitySubscriberInterface { + private readonly eventReporter = Container.get(ErrorReporter); + listenTo() { return User; } @@ -47,7 +50,7 @@ export class UserSubscriber implements EntitySubscriberInterface { const message = "Could not update the personal project's name"; Container.get(Logger).warn(message, event.entity); const exception = new ApplicationError(message); - ErrorReporterProxy.warn(exception, event.entity); + this.eventReporter.warn(exception, event.entity); return; } @@ -69,7 +72,7 @@ export class UserSubscriber implements EntitySubscriberInterface { const message = "Could not update the personal project's name"; Container.get(Logger).warn(message, event.entity); const exception = new ApplicationError(message); - ErrorReporterProxy.warn(exception, event.entity); + this.eventReporter.warn(exception, event.entity); } } } diff --git a/packages/cli/src/db.ts b/packages/cli/src/db.ts index 13147b4106..e1c2b0e402 100644 --- a/packages/cli/src/db.ts +++ b/packages/cli/src/db.ts @@ -2,11 +2,8 @@ import type { EntityManager } from '@n8n/typeorm'; // eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import import { DataSource as Connection } from '@n8n/typeorm'; -import { - DbConnectionTimeoutError, - ensureError, - ErrorReporterProxy as ErrorReporter, -} from 'n8n-workflow'; +import { ErrorReporter } from 'n8n-core'; +import { DbConnectionTimeoutError, ensureError } from 'n8n-workflow'; import { Container } from 'typedi'; import { inTest } from '@/constants'; @@ -38,7 +35,7 @@ if (!inTest) { connectionState.connected = true; return; } catch (error) { - ErrorReporter.error(error); + Container.get(ErrorReporter).error(error); } finally { pingTimer = setTimeout(pingDBFn, 2000); } diff --git a/packages/cli/src/decorators/__tests__/on-shutdown.test.ts b/packages/cli/src/decorators/__tests__/on-shutdown.test.ts index 28e70dac47..774ae2ef48 100644 --- a/packages/cli/src/decorators/__tests__/on-shutdown.test.ts +++ b/packages/cli/src/decorators/__tests__/on-shutdown.test.ts @@ -8,7 +8,7 @@ describe('OnShutdown', () => { let shutdownService: ShutdownService; beforeEach(() => { - shutdownService = new ShutdownService(mock()); + shutdownService = new ShutdownService(mock(), mock()); Container.set(ShutdownService, shutdownService); jest.spyOn(shutdownService, 'register'); }); diff --git a/packages/cli/src/environments/source-control/source-control-import.service.ee.ts b/packages/cli/src/environments/source-control/source-control-import.service.ee.ts index b5012d2762..b24fe74530 100644 --- a/packages/cli/src/environments/source-control/source-control-import.service.ee.ts +++ b/packages/cli/src/environments/source-control/source-control-import.service.ee.ts @@ -1,13 +1,8 @@ // eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import import { In } from '@n8n/typeorm'; import glob from 'fast-glob'; -import { Credentials, InstanceSettings } from 'n8n-core'; -import { - ApplicationError, - jsonParse, - ErrorReporterProxy as ErrorReporter, - ensureError, -} from 'n8n-workflow'; +import { Credentials, ErrorReporter, InstanceSettings } from 'n8n-core'; +import { ApplicationError, jsonParse, ensureError } from 'n8n-workflow'; import { readFile as fsReadFile } from 'node:fs/promises'; import path from 'path'; import { Container, Service } from 'typedi'; @@ -56,6 +51,7 @@ export class SourceControlImportService { constructor( private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly variablesService: VariablesService, private readonly activeWorkflowManager: ActiveWorkflowManager, private readonly tagRepository: TagRepository, @@ -104,7 +100,7 @@ export class SourceControlImportService { if (local.updatedAt instanceof Date) { updatedAt = local.updatedAt; } else { - ErrorReporter.warn('updatedAt is not a Date', { + this.errorReporter.warn('updatedAt is not a Date', { extra: { type: typeof local.updatedAt, value: local.updatedAt, diff --git a/packages/cli/src/environments/variables/variables.controller.ee.ts b/packages/cli/src/environments/variables/variables.controller.ee.ts index 5da4221a3d..a38906b800 100644 --- a/packages/cli/src/environments/variables/variables.controller.ee.ts +++ b/packages/cli/src/environments/variables/variables.controller.ee.ts @@ -1,4 +1,7 @@ +import { VariableListRequestDto } from '@n8n/api-types'; + import { Delete, Get, GlobalScope, Licensed, Patch, Post, RestController } from '@/decorators'; +import { Query } from '@/decorators/args'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { VariableCountLimitReachedError } from '@/errors/variable-count-limit-reached.error'; @@ -13,8 +16,8 @@ export class VariablesController { @Get('/') @GlobalScope('variable:list') - async getVariables() { - return await this.variablesService.getAllCached(); + async getVariables(_req: unknown, _res: unknown, @Query query: VariableListRequestDto) { + return await this.variablesService.getAllCached(query.state); } @Post('/') diff --git a/packages/cli/src/environments/variables/variables.service.ee.ts b/packages/cli/src/environments/variables/variables.service.ee.ts index 31cf725099..38ad5703ea 100644 --- a/packages/cli/src/environments/variables/variables.service.ee.ts +++ b/packages/cli/src/environments/variables/variables.service.ee.ts @@ -18,13 +18,22 @@ export class VariablesService { private readonly eventService: EventService, ) {} - async getAllCached(): Promise { - const variables = await this.cacheService.get('variables', { + async getAllCached(state?: 'empty'): Promise { + let variables = await this.cacheService.get('variables', { async refreshFn() { return await Container.get(VariablesService).findAll(); }, }); - return (variables as Array>).map((v) => this.variablesRepository.create(v)); + + if (variables === undefined) { + return []; + } + + if (state === 'empty') { + variables = variables.filter((v) => v.value === ''); + } + + return variables.map((v) => this.variablesRepository.create(v)); } async getCount(): Promise { diff --git a/packages/cli/src/error-reporting.ts b/packages/cli/src/error-reporting.ts deleted file mode 100644 index fd2ce078cd..0000000000 --- a/packages/cli/src/error-reporting.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { GlobalConfig } from '@n8n/config'; -// eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import -import { QueryFailedError } from '@n8n/typeorm'; -import { AxiosError } from 'axios'; -import { createHash } from 'crypto'; -import { InstanceSettings } from 'n8n-core'; -import { ErrorReporterProxy, ApplicationError } from 'n8n-workflow'; -import Container from 'typedi'; - -let initialized = false; - -export const initErrorHandling = async () => { - if (initialized) return; - - process.on('uncaughtException', (error) => { - ErrorReporterProxy.error(error); - }); - - const dsn = Container.get(GlobalConfig).sentry.backendDsn; - if (!dsn) { - initialized = true; - return; - } - - // Collect longer stacktraces - Error.stackTraceLimit = 50; - - const { - N8N_VERSION: release, - ENVIRONMENT: environment, - DEPLOYMENT_NAME: serverName, - } = process.env; - - const { init, captureException, setTag } = await import('@sentry/node'); - - const { RewriteFrames } = await import('@sentry/integrations'); - const { Integrations } = await import('@sentry/node'); - - const enabledIntegrations = [ - 'InboundFilters', - 'FunctionToString', - 'LinkedErrors', - 'OnUnhandledRejection', - 'ContextLines', - ]; - const seenErrors = new Set(); - - init({ - dsn, - release, - environment, - enableTracing: false, - serverName, - beforeBreadcrumb: () => null, - integrations: (integrations) => [ - ...integrations.filter(({ name }) => enabledIntegrations.includes(name)), - new RewriteFrames({ root: process.cwd() }), - new Integrations.RequestData({ - include: { - cookies: false, - data: false, - headers: false, - query_string: false, - url: true, - user: false, - }, - }), - ], - async beforeSend(event, { originalException }) { - if (!originalException) return null; - - if (originalException instanceof Promise) { - originalException = await originalException.catch((error) => error as Error); - } - - if (originalException instanceof AxiosError) return null; - - if ( - originalException instanceof QueryFailedError && - ['SQLITE_FULL', 'SQLITE_IOERR'].some((errMsg) => originalException.message.includes(errMsg)) - ) { - return null; - } - - if (originalException instanceof ApplicationError) { - const { level, extra, tags } = originalException; - if (level === 'warning') return null; - event.level = level; - if (extra) event.extra = { ...event.extra, ...extra }; - if (tags) event.tags = { ...event.tags, ...tags }; - } - - if ( - originalException instanceof Error && - 'cause' in originalException && - originalException.cause instanceof Error && - 'level' in originalException.cause && - originalException.cause.level === 'warning' - ) { - // handle underlying errors propagating from dependencies like ai-assistant-sdk - return null; - } - - if (originalException instanceof Error && originalException.stack) { - const eventHash = createHash('sha1').update(originalException.stack).digest('base64'); - if (seenErrors.has(eventHash)) return null; - seenErrors.add(eventHash); - } - - return event; - }, - }); - - setTag('server_type', Container.get(InstanceSettings).instanceType); - - ErrorReporterProxy.init({ - report: (error, options) => captureException(error, options), - }); - - initialized = true; -}; diff --git a/packages/cli/src/evaluation/test-definition.schema.ts b/packages/cli/src/evaluation/test-definition.schema.ts index 8f71ee6858..7760ae9dac 100644 --- a/packages/cli/src/evaluation/test-definition.schema.ts +++ b/packages/cli/src/evaluation/test-definition.schema.ts @@ -16,5 +16,6 @@ export const testDefinitionPatchRequestBodySchema = z description: z.string().optional(), evaluationWorkflowId: z.string().min(1).optional(), annotationTagId: z.string().min(1).optional(), + mockedNodes: z.array(z.object({ name: z.string() })).optional(), }) .strict(); diff --git a/packages/cli/src/evaluation/test-definition.service.ee.ts b/packages/cli/src/evaluation/test-definition.service.ee.ts index 55b7339ebe..e9a31e7eee 100644 --- a/packages/cli/src/evaluation/test-definition.service.ee.ts +++ b/packages/cli/src/evaluation/test-definition.service.ee.ts @@ -1,6 +1,6 @@ import { Service } from 'typedi'; -import type { TestDefinition } from '@/databases/entities/test-definition.ee'; +import type { MockedNodeItem, TestDefinition } from '@/databases/entities/test-definition.ee'; import { AnnotationTagRepository } from '@/databases/repositories/annotation-tag.repository.ee'; import { TestDefinitionRepository } from '@/databases/repositories/test-definition.repository.ee'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; @@ -31,6 +31,7 @@ export class TestDefinitionService { evaluationWorkflowId?: string; annotationTagId?: string; id?: string; + mockedNodes?: MockedNodeItem[]; }) { const entity: TestDefinitionLike = {}; @@ -64,6 +65,10 @@ export class TestDefinitionService { }; } + if (attrs.mockedNodes) { + entity.mockedNodes = attrs.mockedNodes; + } + return entity; } @@ -107,6 +112,24 @@ export class TestDefinitionService { } } + // If there are mocked nodes, validate them + if (attrs.mockedNodes && attrs.mockedNodes.length > 0) { + const existingTestDefinition = await this.testDefinitionRepository.findOneOrFail({ + where: { + id, + }, + relations: ['workflow'], + }); + + const existingNodeNames = new Set(existingTestDefinition.workflow.nodes.map((n) => n.name)); + + attrs.mockedNodes.forEach((node) => { + if (!existingNodeNames.has(node.name)) { + throw new BadRequestError(`Pinned node not found in the workflow: ${node.name}`); + } + }); + } + // Update the test definition const queryResult = await this.testDefinitionRepository.update(id, this.toEntityLike(attrs)); diff --git a/packages/cli/src/evaluation/test-definitions.types.ee.ts b/packages/cli/src/evaluation/test-definitions.types.ee.ts index 1beb415276..b7441a2763 100644 --- a/packages/cli/src/evaluation/test-definitions.types.ee.ts +++ b/packages/cli/src/evaluation/test-definitions.types.ee.ts @@ -1,3 +1,4 @@ +import type { MockedNodeItem } from '@/databases/entities/test-definition.ee'; import type { AuthenticatedRequest, ListQuery } from '@/requests'; // ---------------------------------- @@ -26,7 +27,12 @@ export declare namespace TestDefinitionsRequest { type Patch = AuthenticatedRequest< RouteParams.TestId, {}, - { name?: string; evaluationWorkflowId?: string; annotationTagId?: string } + { + name?: string; + evaluationWorkflowId?: string; + annotationTagId?: string; + mockedNodes?: MockedNodeItem[]; + } >; type Delete = AuthenticatedRequest; diff --git a/packages/cli/src/evaluation/test-runner/__tests__/create-pin-data.ee.test.ts b/packages/cli/src/evaluation/test-runner/__tests__/create-pin-data.ee.test.ts index 6da88f9c20..685c15552b 100644 --- a/packages/cli/src/evaluation/test-runner/__tests__/create-pin-data.ee.test.ts +++ b/packages/cli/src/evaluation/test-runner/__tests__/create-pin-data.ee.test.ts @@ -13,7 +13,9 @@ const executionDataJson = JSON.parse( describe('createPinData', () => { test('should create pin data from past execution data', () => { - const pinData = createPinData(wfUnderTestJson, executionDataJson); + const mockedNodes = ['When clicking ‘Test workflow’'].map((name) => ({ name })); + + const pinData = createPinData(wfUnderTestJson, mockedNodes, executionDataJson); expect(pinData).toEqual( expect.objectContaining({ @@ -21,4 +23,34 @@ describe('createPinData', () => { }), ); }); + + test('should not create pin data for non-existing mocked nodes', () => { + const mockedNodes = ['Non-existing node'].map((name) => ({ name })); + + const pinData = createPinData(wfUnderTestJson, mockedNodes, executionDataJson); + + expect(pinData).toEqual({}); + }); + + test('should create pin data for all mocked nodes', () => { + const mockedNodes = ['When clicking ‘Test workflow’', 'Edit Fields', 'Code'].map((name) => ({ + name, + })); + + const pinData = createPinData(wfUnderTestJson, mockedNodes, executionDataJson); + + expect(pinData).toEqual( + expect.objectContaining({ + 'When clicking ‘Test workflow’': expect.anything(), + 'Edit Fields': expect.anything(), + Code: expect.anything(), + }), + ); + }); + + test('should return empty object if no mocked nodes are provided', () => { + const pinData = createPinData(wfUnderTestJson, [], executionDataJson); + + expect(pinData).toEqual({}); + }); }); diff --git a/packages/cli/src/evaluation/test-runner/__tests__/get-start-node.ee.test.ts b/packages/cli/src/evaluation/test-runner/__tests__/get-start-node.ee.test.ts index 20f75fcf57..107cea80c6 100644 --- a/packages/cli/src/evaluation/test-runner/__tests__/get-start-node.ee.test.ts +++ b/packages/cli/src/evaluation/test-runner/__tests__/get-start-node.ee.test.ts @@ -1,7 +1,7 @@ import { readFileSync } from 'fs'; import path from 'path'; -import { getPastExecutionStartNode } from '../utils.ee'; +import { getPastExecutionTriggerNode } from '../utils.ee'; const executionDataJson = JSON.parse( readFileSync(path.join(__dirname, './mock-data/execution-data.json'), { encoding: 'utf-8' }), @@ -21,19 +21,19 @@ const executionDataMultipleTriggersJson2 = JSON.parse( describe('getPastExecutionStartNode', () => { test('should return the start node of the past execution', () => { - const startNode = getPastExecutionStartNode(executionDataJson); + const startNode = getPastExecutionTriggerNode(executionDataJson); expect(startNode).toEqual('When clicking ‘Test workflow’'); }); test('should return the start node of the past execution with multiple triggers', () => { - const startNode = getPastExecutionStartNode(executionDataMultipleTriggersJson); + const startNode = getPastExecutionTriggerNode(executionDataMultipleTriggersJson); expect(startNode).toEqual('When clicking ‘Test workflow’'); }); test('should return the start node of the past execution with multiple triggers - chat trigger', () => { - const startNode = getPastExecutionStartNode(executionDataMultipleTriggersJson2); + const startNode = getPastExecutionTriggerNode(executionDataMultipleTriggersJson2); expect(startNode).toEqual('When chat message received'); }); diff --git a/packages/cli/src/evaluation/test-runner/__tests__/test-runner.service.ee.test.ts b/packages/cli/src/evaluation/test-runner/__tests__/test-runner.service.ee.test.ts index cdb8e848d9..e03fb8dc47 100644 --- a/packages/cli/src/evaluation/test-runner/__tests__/test-runner.service.ee.test.ts +++ b/packages/cli/src/evaluation/test-runner/__tests__/test-runner.service.ee.test.ts @@ -163,6 +163,7 @@ describe('TestRunnerService', () => { mock({ workflowId: 'workflow-under-test-id', evaluationWorkflowId: 'evaluation-workflow-id', + mockedNodes: [], }), ); @@ -219,6 +220,7 @@ describe('TestRunnerService', () => { mock({ workflowId: 'workflow-under-test-id', evaluationWorkflowId: 'evaluation-workflow-id', + mockedNodes: [{ name: 'When clicking ‘Test workflow’' }], }), ); diff --git a/packages/cli/src/evaluation/test-runner/test-runner.service.ee.ts b/packages/cli/src/evaluation/test-runner/test-runner.service.ee.ts index 5aaaf25558..11903581cd 100644 --- a/packages/cli/src/evaluation/test-runner/test-runner.service.ee.ts +++ b/packages/cli/src/evaluation/test-runner/test-runner.service.ee.ts @@ -11,7 +11,7 @@ import { Service } from 'typedi'; import { ActiveExecutions } from '@/active-executions'; import type { ExecutionEntity } from '@/databases/entities/execution-entity'; -import type { TestDefinition } from '@/databases/entities/test-definition.ee'; +import type { MockedNodeItem, TestDefinition } from '@/databases/entities/test-definition.ee'; import type { User } from '@/databases/entities/user'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; @@ -22,7 +22,7 @@ import { getRunData } from '@/workflow-execute-additional-data'; import { WorkflowRunner } from '@/workflow-runner'; import { EvaluationMetrics } from './evaluation-metrics.ee'; -import { createPinData, getPastExecutionStartNode } from './utils.ee'; +import { createPinData, getPastExecutionTriggerNode } from './utils.ee'; /** * This service orchestrates the running of test cases. @@ -30,9 +30,7 @@ import { createPinData, getPastExecutionStartNode } from './utils.ee'; * past executions, creates pin data from them, * and runs the workflow-under-test with the pin data. * After the workflow-under-test finishes, it runs the evaluation workflow - * with the original and new run data. - * TODO: Node pinning - * TODO: Collect metrics + * with the original and new run data, and collects the metrics. */ @Service() export class TestRunnerService { @@ -52,13 +50,14 @@ export class TestRunnerService { private async runTestCase( workflow: WorkflowEntity, pastExecutionData: IRunExecutionData, + mockedNodes: MockedNodeItem[], userId: string, ): Promise { // Create pin data from the past execution data - const pinData = createPinData(workflow, pastExecutionData); + const pinData = createPinData(workflow, mockedNodes, pastExecutionData); // Determine the start node of the past execution - const pastExecutionStartNode = getPastExecutionStartNode(pastExecutionData); + const pastExecutionStartNode = getPastExecutionTriggerNode(pastExecutionData); // Prepare the data to run the workflow const data: IWorkflowExecutionDataProcess = { @@ -196,7 +195,12 @@ export class TestRunnerService { const executionData = parse(pastExecution.executionData.data) as IRunExecutionData; // Run the test case and wait for it to finish - const testCaseExecution = await this.runTestCase(workflow, executionData, user.id); + const testCaseExecution = await this.runTestCase( + workflow, + executionData, + test.mockedNodes, + user.id, + ); // In case of a permission check issue, the test case execution will be undefined. // Skip them and continue with the next test case diff --git a/packages/cli/src/evaluation/test-runner/utils.ee.ts b/packages/cli/src/evaluation/test-runner/utils.ee.ts index a6a4dc5ec2..e608ad6b4a 100644 --- a/packages/cli/src/evaluation/test-runner/utils.ee.ts +++ b/packages/cli/src/evaluation/test-runner/utils.ee.ts @@ -1,21 +1,29 @@ import type { IRunExecutionData, IPinData } from 'n8n-workflow'; +import type { MockedNodeItem } from '@/databases/entities/test-definition.ee'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; /** * Extracts the execution data from the past execution * and creates a pin data object from it for the given workflow. - * For now, it only pins trigger nodes. + * It uses a list of mocked nodes defined in a test definition + * to decide which nodes to pin. */ -export function createPinData(workflow: WorkflowEntity, executionData: IRunExecutionData) { - const triggerNodes = workflow.nodes.filter((node) => /trigger$/i.test(node.type)); - +export function createPinData( + workflow: WorkflowEntity, + mockedNodes: MockedNodeItem[], + executionData: IRunExecutionData, +) { const pinData = {} as IPinData; - for (const triggerNode of triggerNodes) { - const triggerData = executionData.resultData.runData[triggerNode.name]; - if (triggerData?.[0]?.data?.main?.[0]) { - pinData[triggerNode.name] = triggerData[0]?.data?.main?.[0]; + const workflowNodeNames = new Set(workflow.nodes.map((node) => node.name)); + + for (const mockedNode of mockedNodes) { + if (workflowNodeNames.has(mockedNode.name)) { + const nodeData = executionData.resultData.runData[mockedNode.name]; + if (nodeData?.[0]?.data?.main?.[0]) { + pinData[mockedNode.name] = nodeData[0]?.data?.main?.[0]; + } } } @@ -26,7 +34,7 @@ export function createPinData(workflow: WorkflowEntity, executionData: IRunExecu * Returns the start node of the past execution. * The start node is the node that has no source and has run data. */ -export function getPastExecutionStartNode(executionData: IRunExecutionData) { +export function getPastExecutionTriggerNode(executionData: IRunExecutionData) { return Object.keys(executionData.resultData.runData).find((nodeName) => { const data = executionData.resultData.runData[nodeName]; return !data[0].source || data[0].source.length === 0 || data[0].source[0] === null; diff --git a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-sentry.ee.ts b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-sentry.ee.ts index 5678cbb59c..35aef63cb1 100644 --- a/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-sentry.ee.ts +++ b/packages/cli/src/eventbus/message-event-bus-destination/message-event-bus-destination-sentry.ee.ts @@ -48,7 +48,7 @@ export class MessageEventBusDestinationSentry environment, release: N8N_VERSION, transport: Sentry.makeNodeTransport, - integrations: Sentry.defaultIntegrations, + integrations: Sentry.getDefaultIntegrations({}), stackParser: Sentry.defaultStackParser, }); } diff --git a/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts b/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts index 58d694e556..4448dbc41e 100644 --- a/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts +++ b/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts @@ -1,5 +1,6 @@ import type { GlobalConfig } from '@n8n/config'; import { mock } from 'jest-mock-extended'; +import { InstanceSettings } from 'n8n-core'; import type { IWorkflowBase } from 'n8n-workflow'; import { N8N_VERSION } from '@/constants'; @@ -14,6 +15,7 @@ import type { IWorkflowDb } from '@/interfaces'; import type { License } from '@/license'; import type { NodeTypes } from '@/node-types'; import type { Telemetry } from '@/telemetry'; +import { mockInstance } from '@test/mocking'; const flushPromises = async () => await new Promise((resolve) => setImmediate(resolve)); @@ -41,6 +43,7 @@ describe('TelemetryEventRelay', () => { outputs: ['console'], }, }); + const instanceSettings = mockInstance(InstanceSettings, { isDocker: false, n8nFolder: '/test' }); const workflowRepository = mock(); const nodeTypes = mock(); const sharedWorkflowRepository = mock(); @@ -55,6 +58,7 @@ describe('TelemetryEventRelay', () => { telemetry, license, globalConfig, + instanceSettings, workflowRepository, nodeTypes, sharedWorkflowRepository, @@ -65,11 +69,8 @@ describe('TelemetryEventRelay', () => { }); beforeEach(() => { - globalConfig.diagnostics.enabled = true; - }); - - afterEach(() => { jest.clearAllMocks(); + globalConfig.diagnostics.enabled = true; }); describe('init', () => { @@ -80,6 +81,7 @@ describe('TelemetryEventRelay', () => { telemetry, license, globalConfig, + instanceSettings, workflowRepository, nodeTypes, sharedWorkflowRepository, @@ -101,6 +103,7 @@ describe('TelemetryEventRelay', () => { telemetry, license, globalConfig, + instanceSettings, workflowRepository, nodeTypes, sharedWorkflowRepository, @@ -942,7 +945,36 @@ describe('TelemetryEventRelay', () => { await flushPromises(); - // expect(telemetry.identify).toHaveBeenCalled(); + expect(telemetry.identify).toHaveBeenCalledWith( + expect.objectContaining({ + version_cli: N8N_VERSION, + metrics: { + metrics_category_cache: false, + metrics_category_default: true, + metrics_category_logs: false, + metrics_category_queue: false, + metrics_category_routes: false, + metrics_enabled: true, + }, + n8n_binary_data_mode: 'default', + n8n_deployment_type: 'default', + saml_enabled: false, + smtp_set_up: true, + system_info: { + is_docker: false, + cpus: expect.objectContaining({ + count: expect.any(Number), + model: expect.any(String), + speed: expect.any(Number), + }), + memory: expect.any(Number), + os: expect.objectContaining({ + type: expect.any(String), + version: expect.any(String), + }), + }, + }), + ); expect(telemetry.track).toHaveBeenCalledWith( 'Instance started', expect.objectContaining({ diff --git a/packages/cli/src/events/relays/telemetry.event-relay.ts b/packages/cli/src/events/relays/telemetry.event-relay.ts index 0a352087e5..a34646f100 100644 --- a/packages/cli/src/events/relays/telemetry.event-relay.ts +++ b/packages/cli/src/events/relays/telemetry.event-relay.ts @@ -1,5 +1,6 @@ import { GlobalConfig } from '@n8n/config'; import { snakeCase } from 'change-case'; +import { InstanceSettings } from 'n8n-core'; import type { ExecutionStatus, INodesGraphResult, ITelemetryTrackProperties } from 'n8n-workflow'; import { TelemetryHelpers } from 'n8n-workflow'; import os from 'node:os'; @@ -28,6 +29,7 @@ export class TelemetryEventRelay extends EventRelay { private readonly telemetry: Telemetry, private readonly license: License, private readonly globalConfig: GlobalConfig, + private readonly instanceSettings: InstanceSettings, private readonly workflowRepository: WorkflowRepository, private readonly nodeTypes: NodeTypes, private readonly sharedWorkflowRepository: SharedWorkflowRepository, @@ -760,6 +762,7 @@ export class TelemetryEventRelay extends EventRelay { model: cpus[0].model, speed: cpus[0].speed, }, + is_docker: this.instanceSettings.isDocker, }, execution_variables: { executions_mode: config.getEnv('executions.mode'), diff --git a/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts b/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts index d89f2fb734..eedbf27c9e 100644 --- a/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts +++ b/packages/cli/src/execution-lifecycle-hooks/__tests__/save-execution-progress.test.ts @@ -1,9 +1,5 @@ -import { - ErrorReporterProxy, - type IRunExecutionData, - type ITaskData, - type IWorkflowBase, -} from 'n8n-workflow'; +import { ErrorReporter } from 'n8n-core'; +import type { IRunExecutionData, ITaskData, IWorkflowBase } from 'n8n-workflow'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { saveExecutionProgress } from '@/execution-lifecycle-hooks/save-execution-progress'; @@ -13,7 +9,7 @@ import { Logger } from '@/logging/logger.service'; import { mockInstance } from '@test/mocking'; mockInstance(Logger); - +const errorReporter = mockInstance(ErrorReporter); const executionRepository = mockInstance(ExecutionRepository); afterEach(() => { @@ -63,8 +59,6 @@ test('should update execution when saving progress is enabled', async () => { progress: true, }); - const reporterSpy = jest.spyOn(ErrorReporterProxy, 'error'); - executionRepository.findSingleExecution.mockResolvedValue({} as IExecutionResponse); await saveExecutionProgress(...commonArgs); @@ -83,7 +77,7 @@ test('should update execution when saving progress is enabled', async () => { status: 'running', }); - expect(reporterSpy).not.toHaveBeenCalled(); + expect(errorReporter.error).not.toHaveBeenCalled(); }); test('should report error on failure', async () => { @@ -92,8 +86,6 @@ test('should report error on failure', async () => { progress: true, }); - const reporterSpy = jest.spyOn(ErrorReporterProxy, 'error'); - const error = new Error('Something went wrong'); executionRepository.findSingleExecution.mockImplementation(() => { @@ -103,5 +95,5 @@ test('should report error on failure', async () => { await saveExecutionProgress(...commonArgs); expect(executionRepository.updateExistingExecution).not.toHaveBeenCalled(); - expect(reporterSpy).toHaveBeenCalledWith(error); + expect(errorReporter.error).toHaveBeenCalledWith(error); }); diff --git a/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts b/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts index ca9899e1ec..c1de2646c0 100644 --- a/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts +++ b/packages/cli/src/execution-lifecycle-hooks/save-execution-progress.ts @@ -1,5 +1,5 @@ +import { ErrorReporter } from 'n8n-core'; import type { IRunExecutionData, ITaskData, IWorkflowBase } from 'n8n-workflow'; -import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; import { Container } from 'typedi'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; @@ -85,7 +85,7 @@ export async function saveExecutionProgress( } catch (e) { const error = e instanceof Error ? e : new Error(`${e}`); - ErrorReporter.error(error); + Container.get(ErrorReporter).error(error); // TODO: Improve in the future! // Errors here might happen because of database access // For busy machines, we may get "Database is locked" errors. diff --git a/packages/cli/src/expression-evaluator.ts b/packages/cli/src/expression-evaluator.ts index 9a91b4864f..434c78e114 100644 --- a/packages/cli/src/expression-evaluator.ts +++ b/packages/cli/src/expression-evaluator.ts @@ -1,4 +1,6 @@ -import { ErrorReporterProxy, ExpressionEvaluatorProxy } from 'n8n-workflow'; +import { ErrorReporter } from 'n8n-core'; +import { ExpressionEvaluatorProxy } from 'n8n-workflow'; +import Container from 'typedi'; import config from '@/config'; @@ -6,7 +8,7 @@ export const initExpressionEvaluator = () => { ExpressionEvaluatorProxy.setEvaluator(config.getEnv('expression.evaluator')); ExpressionEvaluatorProxy.setDifferEnabled(config.getEnv('expression.reportDifference')); ExpressionEvaluatorProxy.setDiffReporter((expr) => { - ErrorReporterProxy.warn('Expression difference', { + Container.get(ErrorReporter).warn('Expression difference', { extra: { expression: expr, }, diff --git a/packages/cli/src/license.ts b/packages/cli/src/license.ts index 8f1bd26e64..2a3ae6fd6d 100644 --- a/packages/cli/src/license.ts +++ b/packages/cli/src/license.ts @@ -9,7 +9,6 @@ import { SettingsRepository } from '@/databases/repositories/settings.repository import { OnShutdown } from '@/decorators/on-shutdown'; import { Logger } from '@/logging/logger.service'; import { LicenseMetricsService } from '@/metrics/license-metrics.service'; -import { OrchestrationService } from '@/services/orchestration.service'; import { LICENSE_FEATURES, @@ -35,7 +34,6 @@ export class License { constructor( private readonly logger: Logger, private readonly instanceSettings: InstanceSettings, - private readonly orchestrationService: OrchestrationService, private readonly settingsRepository: SettingsRepository, private readonly licenseMetricsService: LicenseMetricsService, private readonly globalConfig: GlobalConfig, @@ -138,23 +136,24 @@ export class License { this.logger.debug('License feature change detected', _features); if (config.getEnv('executions.mode') === 'queue' && this.globalConfig.multiMainSetup.enabled) { - const isMultiMainLicensed = _features[LICENSE_FEATURES.MULTIPLE_MAIN_INSTANCES] as - | boolean - | undefined; + const isMultiMainLicensed = + (_features[LICENSE_FEATURES.MULTIPLE_MAIN_INSTANCES] as boolean | undefined) ?? false; - this.orchestrationService.setMultiMainSetupLicensed(isMultiMainLicensed ?? false); + this.instanceSettings.setMultiMainLicensed(isMultiMainLicensed); - if (this.orchestrationService.isMultiMainSetupEnabled && this.instanceSettings.isFollower) { - this.logger.debug( - '[Multi-main setup] Instance is follower, skipping sending of "reload-license" command...', - ); + if (this.instanceSettings.isMultiMain && !this.instanceSettings.isLeader) { + this.logger + .scoped(['scaling', 'multi-main-setup', 'license']) + .debug('Instance is not leader, skipping sending of "reload-license" command...'); return; } - if (this.orchestrationService.isMultiMainSetupEnabled && !isMultiMainLicensed) { - this.logger.debug( - '[Multi-main setup] License changed with no support for multi-main setup - no new followers will be allowed to init. To restore multi-main setup, please upgrade to a license that supports this feature.', - ); + if (this.globalConfig.multiMainSetup.enabled && !isMultiMainLicensed) { + this.logger + .scoped(['scaling', 'multi-main-setup', 'license']) + .debug( + 'License changed with no support for multi-main setup - no new followers will be allowed to init. To restore multi-main setup, please upgrade to a license that supports this feature.', + ); } } diff --git a/packages/cli/src/load-nodes-and-credentials.ts b/packages/cli/src/load-nodes-and-credentials.ts index 22273fb894..9fc3750329 100644 --- a/packages/cli/src/load-nodes-and-credentials.ts +++ b/packages/cli/src/load-nodes-and-credentials.ts @@ -4,10 +4,13 @@ import fsPromises from 'fs/promises'; import type { Class, DirectoryLoader, Types } from 'n8n-core'; import { CUSTOM_EXTENSION_ENV, + ErrorReporter, InstanceSettings, CustomDirectoryLoader, PackageDirectoryLoader, LazyPackageDirectoryLoader, + UnrecognizedCredentialTypeError, + UnrecognizedNodeTypeError, } from 'n8n-core'; import type { KnownNodesAndCredentials, @@ -15,8 +18,13 @@ import type { INodeTypeDescription, INodeTypeData, ICredentialTypeData, + LoadedClass, + ICredentialType, + INodeType, + IVersionedNodeType, + INodeProperties, } from 'n8n-workflow'; -import { NodeHelpers, ApplicationError, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; +import { ApplicationError, NodeConnectionType } from 'n8n-workflow'; import path from 'path'; import picocolors from 'picocolors'; import { Container, Service } from 'typedi'; @@ -57,6 +65,7 @@ export class LoadNodesAndCredentials { constructor( private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly instanceSettings: InstanceSettings, private readonly globalConfig: GlobalConfig, ) {} @@ -149,7 +158,7 @@ export class LoadNodesAndCredentials { ); } catch (error) { this.logger.error((error as Error).message); - ErrorReporter.error(error); + this.errorReporter.error(error); } } } @@ -285,7 +294,7 @@ export class LoadNodesAndCredentials { for (const usableNode of usableNodes) { const description: INodeTypeBaseDescription | INodeTypeDescription = structuredClone(usableNode); - const wrapped = NodeHelpers.convertNodeToAiTool({ description }).description; + const wrapped = this.convertNodeToAiTool({ description }).description; this.types.nodes.push(wrapped); this.known.nodes[wrapped.name] = structuredClone(this.known.nodes[usableNode.name]); @@ -307,13 +316,18 @@ export class LoadNodesAndCredentials { for (const loader of Object.values(this.loaders)) { // list of node & credential types that will be sent to the frontend - const { known, types, directory } = loader; - this.types.nodes = this.types.nodes.concat(types.nodes); + const { known, types, directory, packageName } = loader; + this.types.nodes = this.types.nodes.concat( + types.nodes.map(({ name, ...rest }) => ({ + ...rest, + name: `${packageName}.${name}`, + })), + ); this.types.credentials = this.types.credentials.concat(types.credentials); // Nodes and credentials that have been loaded immediately for (const nodeTypeName in loader.nodeTypes) { - this.loaded.nodes[nodeTypeName] = loader.nodeTypes[nodeTypeName]; + this.loaded.nodes[`${packageName}.${nodeTypeName}`] = loader.nodeTypes[nodeTypeName]; } for (const credentialTypeName in loader.credentialTypes) { @@ -322,7 +336,7 @@ export class LoadNodesAndCredentials { for (const type in known.nodes) { const { className, sourcePath } = known.nodes[type]; - this.known.nodes[type] = { + this.known.nodes[`${packageName}.${type}`] = { className, sourcePath: path.join(directory, sourcePath), }; @@ -356,6 +370,159 @@ export class LoadNodesAndCredentials { } } + getNode(fullNodeType: string): LoadedClass { + const [packageName, nodeType] = fullNodeType.split('.'); + const { loaders } = this; + const loader = loaders[packageName]; + if (!loader) { + throw new UnrecognizedNodeTypeError(packageName, nodeType); + } + return loader.getNode(nodeType); + } + + getCredential(credentialType: string): LoadedClass { + const { loadedCredentials } = this; + + for (const loader of Object.values(this.loaders)) { + if (credentialType in loader.known.credentials) { + const loaded = loader.getCredential(credentialType); + loadedCredentials[credentialType] = loaded; + } + } + + if (credentialType in loadedCredentials) { + return loadedCredentials[credentialType]; + } + + throw new UnrecognizedCredentialTypeError(credentialType); + } + + /** + * Modifies the description of the passed in object, such that it can be used + * as an AI Agent Tool. + * Returns the modified item (not copied) + */ + convertNodeToAiTool< + T extends object & { description: INodeTypeDescription | INodeTypeBaseDescription }, + >(item: T): T { + // quick helper function for type-guard down below + function isFullDescription(obj: unknown): obj is INodeTypeDescription { + return typeof obj === 'object' && obj !== null && 'properties' in obj; + } + + if (isFullDescription(item.description)) { + const isVectorStore = item.description.group.includes('vector-store'); + + item.description.name += 'Tool'; + if (!isVectorStore) { + item.description.inputs = []; + } + item.description.outputs = [NodeConnectionType.AiTool]; + item.description.displayName += ' Tool'; + delete item.description.usableAsTool; + + const hasResource = item.description.properties.some((prop) => prop.name === 'resource'); + const hasOperation = item.description.properties.some((prop) => prop.name === 'operation'); + + if (!item.description.properties.map((prop) => prop.name).includes('toolDescription')) { + const descriptionType: INodeProperties = { + displayName: 'Tool Description', + name: 'descriptionType', + type: 'options', + noDataExpression: true, + options: [ + { + name: 'Set Automatically', + value: 'auto', + description: 'Automatically set based on resource and operation', + }, + { + name: 'Set Manually', + value: 'manual', + description: 'Manually set the description', + }, + ], + default: 'auto', + }; + + if (isVectorStore) { + const metadataProp: INodeProperties = { + displayName: 'Include metadata', + name: 'includeDocumentMetadata', + type: 'boolean', + default: false, + description: 'Whether or not to include document metadata', + }; + + item.description.properties.unshift(metadataProp); + } + + const descProp: INodeProperties = { + displayName: 'Description', + name: 'toolDescription', + type: 'string', + default: item.description.description, + required: true, + typeOptions: { rows: 2 }, + description: + 'Explain to the LLM what this tool does, a good, specific description would allow LLMs to produce expected results much more often', + placeholder: `e.g. ${item.description.description}`, + }; + + const noticeProp: INodeProperties = { + displayName: + "Use the expression {{ $fromAI('placeholder_name') }} for any data to be filled by the model", + name: 'notice', + type: 'notice', + default: '', + }; + + item.description.properties.unshift(descProp); + + if (isVectorStore) { + const nameProp: INodeProperties = { + displayName: 'Name', + name: 'toolName', + type: 'string', + default: '', + required: true, + description: 'Name of the vector store', + placeholder: 'e.g. company_knowledge_base', + validateType: 'string-alphanumeric', + }; + + item.description.properties.unshift(nameProp); + } + + // If node has resource or operation we can determine pre-populate tool description based on it + // so we add the descriptionType property as the first property + if (hasResource || hasOperation) { + item.description.properties.unshift(descriptionType); + + descProp.displayOptions = { + show: { + descriptionType: ['manual'], + }, + }; + } + + item.description.properties.unshift(noticeProp); + } + } + + const resources = item.description.codex?.resources ?? {}; + + item.description.codex = { + categories: ['AI'], + subcategories: { + AI: ['Tools'], + Tools: ['Other Tools'], + }, + resources, + }; + return item; + } + async setupHotReload() { const { default: debounce } = await import('lodash/debounce'); // eslint-disable-next-line import/no-extraneous-dependencies diff --git a/packages/cli/src/manual-execution.service.ts b/packages/cli/src/manual-execution.service.ts new file mode 100644 index 0000000000..65174d20b5 --- /dev/null +++ b/packages/cli/src/manual-execution.service.ts @@ -0,0 +1,124 @@ +import * as a from 'assert/strict'; +import { + DirectedGraph, + filterDisabledNodes, + recreateNodeExecutionStack, + WorkflowExecute, +} from 'n8n-core'; +import type { + IPinData, + IRun, + IRunExecutionData, + IWorkflowExecuteAdditionalData, + IWorkflowExecutionDataProcess, + Workflow, +} from 'n8n-workflow'; +import type PCancelable from 'p-cancelable'; +import { Service } from 'typedi'; + +import { Logger } from '@/logging/logger.service'; + +@Service() +export class ManualExecutionService { + constructor(private readonly logger: Logger) {} + + getExecutionStartNode(data: IWorkflowExecutionDataProcess, workflow: Workflow) { + let startNode; + if ( + data.startNodes?.length === 1 && + Object.keys(data.pinData ?? {}).includes(data.startNodes[0].name) + ) { + startNode = workflow.getNode(data.startNodes[0].name) ?? undefined; + } + + return startNode; + } + + // eslint-disable-next-line @typescript-eslint/promise-function-async + runManually( + data: IWorkflowExecutionDataProcess, + workflow: Workflow, + additionalData: IWorkflowExecuteAdditionalData, + executionId: string, + pinData?: IPinData, + ): PCancelable { + if (data.triggerToStartFrom?.data && data.startNodes && !data.destinationNode) { + this.logger.debug( + `Execution ID ${executionId} had triggerToStartFrom. Starting from that trigger.`, + { executionId }, + ); + const startNodes = data.startNodes.map((startNode) => { + const node = workflow.getNode(startNode.name); + a.ok(node, `Could not find a node named "${startNode.name}" in the workflow.`); + return node; + }); + const runData = { [data.triggerToStartFrom.name]: [data.triggerToStartFrom.data] }; + + const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = + recreateNodeExecutionStack( + filterDisabledNodes(DirectedGraph.fromWorkflow(workflow)), + new Set(startNodes), + runData, + data.pinData ?? {}, + ); + const executionData: IRunExecutionData = { + resultData: { runData, pinData }, + executionData: { + contextData: {}, + metadata: {}, + nodeExecutionStack, + waitingExecution, + waitingExecutionSource, + }, + }; + + const workflowExecute = new WorkflowExecute(additionalData, 'manual', executionData); + return workflowExecute.processRunExecutionData(workflow); + } else if ( + data.runData === undefined || + data.startNodes === undefined || + data.startNodes.length === 0 + ) { + // Full Execution + // TODO: When the old partial execution logic is removed this block can + // be removed and the previous one can be merged into + // `workflowExecute.runPartialWorkflow2`. + // Partial executions then require either a destination node from which + // everything else can be derived, or a triggerToStartFrom with + // triggerData. + this.logger.debug(`Execution ID ${executionId} will run executing all nodes.`, { + executionId, + }); + // Execute all nodes + + const startNode = this.getExecutionStartNode(data, workflow); + + // Can execute without webhook so go on + const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); + return workflowExecute.run(workflow, startNode, data.destinationNode, data.pinData); + } else { + // Partial Execution + this.logger.debug(`Execution ID ${executionId} is a partial execution.`, { executionId }); + // Execute only the nodes between start and destination nodes + const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); + + if (data.partialExecutionVersion === '1') { + return workflowExecute.runPartialWorkflow2( + workflow, + data.runData, + data.pinData, + data.dirtyNodeNames, + data.destinationNode, + ); + } else { + return workflowExecute.runPartialWorkflow( + workflow, + data.runData, + data.startNodes, + data.destinationNode, + data.pinData, + ); + } + } + } +} diff --git a/packages/cli/src/node-types.ts b/packages/cli/src/node-types.ts index 2008ef57e3..91470d3471 100644 --- a/packages/cli/src/node-types.ts +++ b/packages/cli/src/node-types.ts @@ -1,26 +1,16 @@ import type { NeededNodeType } from '@n8n/task-runner'; import type { Dirent } from 'fs'; import { readdir } from 'fs/promises'; -import { loadClassInIsolation } from 'n8n-core'; -import type { - INodeType, - INodeTypeDescription, - INodeTypes, - IVersionedNodeType, - LoadedClass, -} from 'n8n-workflow'; +import type { INodeType, INodeTypeDescription, INodeTypes, IVersionedNodeType } from 'n8n-workflow'; import { ApplicationError, NodeHelpers } from 'n8n-workflow'; import { join, dirname } from 'path'; import { Service } from 'typedi'; -import { UnrecognizedNodeTypeError } from './errors/unrecognized-node-type.error'; import { LoadNodesAndCredentials } from './load-nodes-and-credentials'; @Service() export class NodeTypes implements INodeTypes { - constructor(private loadNodesAndCredentials: LoadNodesAndCredentials) { - loadNodesAndCredentials.addPostProcessor(async () => this.applySpecialNodeParameters()); - } + constructor(private loadNodesAndCredentials: LoadNodesAndCredentials) {} /** * Variant of `getByNameAndVersion` that includes the node's source path, used to locate a node's translations. @@ -29,19 +19,14 @@ export class NodeTypes implements INodeTypes { nodeTypeName: string, version: number, ): { description: INodeTypeDescription } & { sourcePath: string } { - const nodeType = this.getNode(nodeTypeName); - - if (!nodeType) { - throw new ApplicationError('Unknown node type', { tags: { nodeTypeName } }); - } - + const nodeType = this.loadNodesAndCredentials.getNode(nodeTypeName); const { description } = NodeHelpers.getVersionedNodeType(nodeType.type, version); return { description: { ...description }, sourcePath: nodeType.sourcePath }; } getByName(nodeType: string): INodeType | IVersionedNodeType { - return this.getNode(nodeType).type; + return this.loadNodesAndCredentials.getNode(nodeType).type; } getByNameAndVersion(nodeType: string, version?: number): INodeType { @@ -54,7 +39,7 @@ export class NodeTypes implements INodeTypes { nodeType = nodeType.replace(/Tool$/, ''); } - const node = this.getNode(nodeType); + const node = this.loadNodesAndCredentials.getNode(nodeType); const versionedNodeType = NodeHelpers.getVersionedNodeType(node.type, version); if (!toolRequested) return versionedNodeType; @@ -76,41 +61,15 @@ export class NodeTypes implements INodeTypes { const clonedNode = Object.create(versionedNodeType, { description: { value: clonedDescription }, }) as INodeType; - const tool = NodeHelpers.convertNodeToAiTool(clonedNode); + const tool = this.loadNodesAndCredentials.convertNodeToAiTool(clonedNode); loadedNodes[nodeType + 'Tool'] = { sourcePath: '', type: tool }; return tool; } - /* Some nodeTypes need to get special parameters applied like the polling nodes the polling times */ - applySpecialNodeParameters() { - for (const nodeTypeData of Object.values(this.loadNodesAndCredentials.loadedNodes)) { - const nodeType = NodeHelpers.getVersionedNodeType(nodeTypeData.type); - NodeHelpers.applySpecialNodeParameters(nodeType); - } - } - getKnownTypes() { return this.loadNodesAndCredentials.knownNodes; } - private getNode(type: string): LoadedClass { - const { loadedNodes, knownNodes } = this.loadNodesAndCredentials; - if (type in loadedNodes) { - return loadedNodes[type]; - } - - if (type in knownNodes) { - const { className, sourcePath } = knownNodes[type]; - const loaded: INodeType = loadClassInIsolation(sourcePath, className); - NodeHelpers.applySpecialNodeParameters(loaded); - - loadedNodes[type] = { sourcePath, type: loaded }; - return loadedNodes[type]; - } - - throw new UnrecognizedNodeTypeError(type); - } - async getNodeTranslationPath({ nodeSourcePath, longNodeType, @@ -155,14 +114,12 @@ export class NodeTypes implements INodeTypes { getNodeTypeDescriptions(nodeTypes: NeededNodeType[]): INodeTypeDescription[] { return nodeTypes.map(({ name: nodeTypeName, version: nodeTypeVersion }) => { - const nodeType = this.getNode(nodeTypeName); - - if (!nodeType) throw new ApplicationError(`Unknown node type: ${nodeTypeName}`); - + const nodeType = this.loadNodesAndCredentials.getNode(nodeTypeName); const { description } = NodeHelpers.getVersionedNodeType(nodeType.type, nodeTypeVersion); const descriptionCopy = { ...description }; + // TODO: do we still need this? descriptionCopy.name = descriptionCopy.name.startsWith('n8n-nodes') ? descriptionCopy.name : `n8n-nodes-base.${descriptionCopy.name}`; // nodes-base nodes are unprefixed diff --git a/packages/cli/src/push/abstract.push.ts b/packages/cli/src/push/abstract.push.ts index 24cafa8121..83a859fc75 100644 --- a/packages/cli/src/push/abstract.push.ts +++ b/packages/cli/src/push/abstract.push.ts @@ -1,4 +1,5 @@ import type { PushPayload, PushType } from '@n8n/api-types'; +import { ErrorReporter } from 'n8n-core'; import { assert, jsonStringify } from 'n8n-workflow'; import { Service } from 'typedi'; @@ -27,7 +28,10 @@ export abstract class AbstractPush extends TypedEmitter this.pingAll(), 60 * 1000); @@ -66,7 +70,7 @@ export abstract class AbstractPush extends TypedEmitter(type: Type, data: PushPayload, pushRefs: string[]) { - this.logger.debug(`Send data of type "${type}" to editor-UI`, { + this.logger.debug(`Pushed to frontend: ${type}`, { dataType: type, pushRefs: pushRefs.join(', '), }); diff --git a/packages/cli/src/push/index.ts b/packages/cli/src/push/index.ts index bfbfb43a51..3b29d85242 100644 --- a/packages/cli/src/push/index.ts +++ b/packages/cli/src/push/index.ts @@ -2,6 +2,7 @@ import type { PushPayload, PushType } from '@n8n/api-types'; import type { Application } from 'express'; import { ServerResponse } from 'http'; import type { Server } from 'http'; +import { InstanceSettings } from 'n8n-core'; import type { Socket } from 'net'; import { Container, Service } from 'typedi'; import { parse as parseUrl } from 'url'; @@ -13,7 +14,6 @@ import type { User } from '@/databases/entities/user'; import { OnShutdown } from '@/decorators/on-shutdown'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { Publisher } from '@/scaling/pubsub/publisher.service'; -import { OrchestrationService } from '@/services/orchestration.service'; import { TypedEmitter } from '@/typed-emitter'; import { SSEPush } from './sse.push'; @@ -41,7 +41,7 @@ export class Push extends TypedEmitter { private backend = useWebSockets ? Container.get(WebSocketPush) : Container.get(SSEPush); constructor( - private readonly orchestrationService: OrchestrationService, + private readonly instanceSettings: InstanceSettings, private readonly publisher: Publisher, ) { super(); @@ -92,7 +92,7 @@ export class Push extends TypedEmitter { * the webhook. If so, the handler process commands the creator process to * relay the former's execution lifecycle events to the creator's frontend. */ - if (this.orchestrationService.isMultiMainSetupEnabled && !this.backend.hasPushRef(pushRef)) { + if (this.instanceSettings.isMultiMain && !this.backend.hasPushRef(pushRef)) { void this.publisher.publishCommand({ command: 'relay-execution-lifecycle-event', payload: { type, args: data, pushRef }, diff --git a/packages/cli/src/push/websocket.push.ts b/packages/cli/src/push/websocket.push.ts index a2ea39c500..97e45028b2 100644 --- a/packages/cli/src/push/websocket.push.ts +++ b/packages/cli/src/push/websocket.push.ts @@ -1,4 +1,4 @@ -import { ApplicationError, ErrorReporterProxy } from 'n8n-workflow'; +import { ApplicationError } from 'n8n-workflow'; import { Service } from 'typedi'; import type WebSocket from 'ws'; @@ -24,7 +24,7 @@ export class WebSocketPush extends AbstractPush { this.onMessageReceived(pushRef, JSON.parse(buffer.toString('utf8'))); } catch (error) { - ErrorReporterProxy.error( + this.errorReporter.error( new ApplicationError('Error parsing push message', { extra: { userId, diff --git a/packages/cli/src/response-helper.ts b/packages/cli/src/response-helper.ts index 2b993c266c..0e70aa312f 100644 --- a/packages/cli/src/response-helper.ts +++ b/packages/cli/src/response-helper.ts @@ -1,10 +1,7 @@ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ import type { Request, Response } from 'express'; -import { - ErrorReporterProxy as ErrorReporter, - FORM_TRIGGER_PATH_IDENTIFIER, - NodeApiError, -} from 'n8n-workflow'; +import { ErrorReporter } from 'n8n-core'; +import { FORM_TRIGGER_PATH_IDENTIFIER, NodeApiError } from 'n8n-workflow'; import { Readable } from 'node:stream'; import picocolors from 'picocolors'; import Container from 'typedi'; @@ -141,7 +138,7 @@ export const isUniqueConstraintError = (error: Error) => export function reportError(error: Error) { if (!(error instanceof ResponseError) || error.httpStatusCode > 404) { - ErrorReporter.error(error); + Container.get(ErrorReporter).error(error); } } diff --git a/packages/cli/src/runners/__tests__/task-broker.test.ts b/packages/cli/src/runners/__tests__/task-broker.test.ts index 8e86f189e8..1f5030ada8 100644 --- a/packages/cli/src/runners/__tests__/task-broker.test.ts +++ b/packages/cli/src/runners/__tests__/task-broker.test.ts @@ -6,6 +6,7 @@ import { ApplicationError, type INodeTypeBaseDescription } from 'n8n-workflow'; import { Time } from '@/constants'; import { TaskRejectError } from '../errors'; +import { TaskRunnerTimeoutError } from '../errors/task-runner-timeout.error'; import type { RunnerLifecycleEvents } from '../runner-lifecycle-events'; import { TaskBroker } from '../task-broker.service'; import type { TaskOffer, TaskRequest, TaskRunner } from '../task-broker.service'; @@ -721,7 +722,7 @@ describe('TaskBroker', () => { beforeAll(() => { jest.useFakeTimers(); - config = mock({ taskTimeout: 30 }); + config = mock({ taskTimeout: 30, mode: 'internal' }); taskBroker = new TaskBroker(mock(), config, runnerLifecycleEvents); }); @@ -800,7 +801,7 @@ describe('TaskBroker', () => { expect(taskBroker.getTasks().get(taskId)).toBeUndefined(); }); - it('on timeout, we should emit `runner:timed-out-during-task` event and send error to requester', async () => { + it('[internal mode] on timeout, we should emit `runner:timed-out-during-task` event and send error to requester', async () => { jest.spyOn(global, 'clearTimeout'); const taskId = 'task1'; @@ -839,5 +840,50 @@ describe('TaskBroker', () => { expect(taskBroker.getTasks().get(taskId)).toBeUndefined(); }); + + it('[external mode] on timeout, we should instruct the runner to cancel and send error to requester', async () => { + const config = mock({ taskTimeout: 30, mode: 'external' }); + taskBroker = new TaskBroker(mock(), config, runnerLifecycleEvents); + + jest.spyOn(global, 'clearTimeout'); + + const taskId = 'task1'; + const runnerId = 'runner1'; + const requesterId = 'requester1'; + const runner = mock({ id: runnerId }); + const runnerCallback = jest.fn(); + const requesterCallback = jest.fn(); + + taskBroker.registerRunner(runner, runnerCallback); + taskBroker.registerRequester(requesterId, requesterCallback); + + taskBroker.setTasks({ + [taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' }, + }); + + await taskBroker.sendTaskSettings(taskId, {}); + runnerCallback.mockClear(); + + jest.runAllTimers(); + + await Promise.resolve(); // for timeout callback + await Promise.resolve(); // for sending messages to runner and requester + await Promise.resolve(); // for task cleanup and removal + + expect(runnerCallback).toHaveBeenLastCalledWith({ + type: 'broker:taskcancel', + taskId, + reason: 'Task execution timed out', + }); + + expect(requesterCallback).toHaveBeenCalledWith({ + type: 'broker:taskerror', + taskId, + error: expect.any(TaskRunnerTimeoutError), + }); + + expect(clearTimeout).toHaveBeenCalled(); + expect(taskBroker.getTasks().get(taskId)).toBeUndefined(); + }); }); }); diff --git a/packages/cli/src/runners/__tests__/task-runner-process-restart-loop-detector.test.ts b/packages/cli/src/runners/__tests__/task-runner-process-restart-loop-detector.test.ts new file mode 100644 index 0000000000..61cfb8b8e8 --- /dev/null +++ b/packages/cli/src/runners/__tests__/task-runner-process-restart-loop-detector.test.ts @@ -0,0 +1,57 @@ +import { TaskRunnersConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; + +import type { Logger } from '@/logging/logger.service'; +import type { TaskRunnerAuthService } from '@/runners/auth/task-runner-auth.service'; +import { TaskRunnerRestartLoopError } from '@/runners/errors/task-runner-restart-loop-error'; +import { RunnerLifecycleEvents } from '@/runners/runner-lifecycle-events'; +import { TaskRunnerProcess } from '@/runners/task-runner-process'; +import { TaskRunnerProcessRestartLoopDetector } from '@/runners/task-runner-process-restart-loop-detector'; + +describe('TaskRunnerProcessRestartLoopDetector', () => { + const mockLogger = mock(); + const mockAuthService = mock(); + const runnerConfig = new TaskRunnersConfig(); + const taskRunnerProcess = new TaskRunnerProcess( + mockLogger, + runnerConfig, + mockAuthService, + new RunnerLifecycleEvents(), + ); + + it('should detect a restart loop if process exits 5 times within 5s', () => { + const restartLoopDetector = new TaskRunnerProcessRestartLoopDetector(taskRunnerProcess); + let emittedError: TaskRunnerRestartLoopError | undefined = undefined; + restartLoopDetector.on('restart-loop-detected', (error) => { + emittedError = error; + }); + + taskRunnerProcess.emit('exit'); + taskRunnerProcess.emit('exit'); + taskRunnerProcess.emit('exit'); + taskRunnerProcess.emit('exit'); + taskRunnerProcess.emit('exit'); + + expect(emittedError).toBeInstanceOf(TaskRunnerRestartLoopError); + }); + + it('should not detect a restart loop if process exits less than 5 times within 5s', () => { + jest.useFakeTimers(); + const restartLoopDetector = new TaskRunnerProcessRestartLoopDetector(taskRunnerProcess); + let emittedError: TaskRunnerRestartLoopError | undefined = undefined; + restartLoopDetector.on('restart-loop-detected', (error) => { + emittedError = error; + }); + + taskRunnerProcess.emit('exit'); + taskRunnerProcess.emit('exit'); + taskRunnerProcess.emit('exit'); + taskRunnerProcess.emit('exit'); + + jest.advanceTimersByTime(5010); + + taskRunnerProcess.emit('exit'); + + expect(emittedError).toBeUndefined(); + }); +}); diff --git a/packages/cli/src/runners/errors/task-runner-restart-loop-error.ts b/packages/cli/src/runners/errors/task-runner-restart-loop-error.ts new file mode 100644 index 0000000000..b788d83808 --- /dev/null +++ b/packages/cli/src/runners/errors/task-runner-restart-loop-error.ts @@ -0,0 +1,14 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class TaskRunnerRestartLoopError extends ApplicationError { + constructor( + public readonly howManyTimes: number, + public readonly timePeriodMs: number, + ) { + const message = `Task runner has restarted ${howManyTimes} times within ${timePeriodMs / 1000} seconds. This is an abnormally high restart rate that suggests a bug or other issue is preventing your runner process from starting up. If this issues persists, please file a report at: https://github.com/n8n-io/n8n/issues`; + + super(message, { + level: 'fatal', + }); + } +} diff --git a/packages/cli/src/runners/errors/task-runner-timeout.error.ts b/packages/cli/src/runners/errors/task-runner-timeout.error.ts index 88f3533028..1d9d463e3a 100644 --- a/packages/cli/src/runners/errors/task-runner-timeout.error.ts +++ b/packages/cli/src/runners/errors/task-runner-timeout.error.ts @@ -1,15 +1,23 @@ +import type { TaskRunnerMode } from '@n8n/config/src/configs/runners.config'; import { ApplicationError } from 'n8n-workflow'; export class TaskRunnerTimeoutError extends ApplicationError { description: string; - constructor(taskTimeout: number, isSelfHosted: boolean) { + constructor({ + taskTimeout, + isSelfHosted, + mode, + }: { taskTimeout: number; isSelfHosted: boolean; mode: TaskRunnerMode }) { super( `Task execution timed out after ${taskTimeout} ${taskTimeout === 1 ? 'second' : 'seconds'}`, ); - const subtitle = - 'The task runner was taking too long on this task, so it was suspected of being unresponsive and restarted, and the task was aborted. You can try the following:'; + const subtitles = { + internal: + 'The task runner was taking too long on this task, so it was suspected of being unresponsive and restarted, and the task was aborted.', + external: 'The task runner was taking too long on this task, so the task was aborted.', + }; const fixes = { optimizeScript: @@ -27,7 +35,7 @@ export class TaskRunnerTimeoutError extends ApplicationError { .map((suggestion, index) => `${index + 1}. ${suggestion}`) .join('
'); - const description = `${subtitle}

${suggestionsText}`; + const description = `${mode === 'internal' ? subtitles.internal : subtitles.external} You can try the following:

${suggestionsText}`; this.description = description; } diff --git a/packages/cli/src/runners/task-broker.service.ts b/packages/cli/src/runners/task-broker.service.ts index 80e918b47a..e52992d38e 100644 --- a/packages/cli/src/runners/task-broker.service.ts +++ b/packages/cli/src/runners/task-broker.service.ts @@ -459,14 +459,25 @@ export class TaskBroker { const task = this.tasks.get(taskId); if (!task) return; - this.runnerLifecycleEvents.emit('runner:timed-out-during-task'); + if (this.taskRunnersConfig.mode === 'internal') { + this.runnerLifecycleEvents.emit('runner:timed-out-during-task'); + } else if (this.taskRunnersConfig.mode === 'external') { + await this.messageRunner(task.runnerId, { + type: 'broker:taskcancel', + taskId, + reason: 'Task execution timed out', + }); + } + + const { taskTimeout, mode } = this.taskRunnersConfig; await this.taskErrorHandler( taskId, - new TaskRunnerTimeoutError( - this.taskRunnersConfig.taskTimeout, - config.getEnv('deployment.type') !== 'cloud', - ), + new TaskRunnerTimeoutError({ + taskTimeout, + isSelfHosted: config.getEnv('deployment.type') !== 'cloud', + mode, + }), ); } diff --git a/packages/cli/src/runners/task-managers/task-manager.ts b/packages/cli/src/runners/task-managers/task-manager.ts index 66f07f7b0a..fd62dc2673 100644 --- a/packages/cli/src/runners/task-managers/task-manager.ts +++ b/packages/cli/src/runners/task-managers/task-manager.ts @@ -1,5 +1,6 @@ import type { TaskResultData, RequesterMessage, BrokerMessage, TaskData } from '@n8n/task-runner'; import { RPC_ALLOW_LIST } from '@n8n/task-runner'; +import { createResultOk, createResultError } from 'n8n-workflow'; import type { EnvProviderState, IExecuteFunctions, @@ -15,7 +16,6 @@ import type { IWorkflowExecuteAdditionalData, Result, } from 'n8n-workflow'; -import { createResultOk, createResultError } from 'n8n-workflow'; import { nanoid } from 'nanoid'; import { Service } from 'typedi'; @@ -158,6 +158,11 @@ export abstract class TaskManager { }); } + const { staticData: incomingStaticData } = resultData; + + // if the runner sent back static data, then it changed, so update it + if (incomingStaticData) workflow.overrideStaticData(incomingStaticData); + return createResultOk(resultData.result as TData); } catch (e: unknown) { return createResultError(e as TError); diff --git a/packages/cli/src/runners/task-runner-module.ts b/packages/cli/src/runners/task-runner-module.ts index 1502dd1f07..434daa066a 100644 --- a/packages/cli/src/runners/task-runner-module.ts +++ b/packages/cli/src/runners/task-runner-module.ts @@ -1,9 +1,14 @@ import { TaskRunnersConfig } from '@n8n/config'; +import { ErrorReporter } from 'n8n-core'; +import { sleep } from 'n8n-workflow'; import * as a from 'node:assert/strict'; import Container, { Service } from 'typedi'; import { OnShutdown } from '@/decorators/on-shutdown'; +import { Logger } from '@/logging/logger.service'; +import type { TaskRunnerRestartLoopError } from '@/runners/errors/task-runner-restart-loop-error'; import type { TaskRunnerProcess } from '@/runners/task-runner-process'; +import { TaskRunnerProcessRestartLoopDetector } from '@/runners/task-runner-process-restart-loop-detector'; import { MissingAuthTokenError } from './errors/missing-auth-token.error'; import { TaskRunnerWsServer } from './runner-ws-server'; @@ -25,7 +30,15 @@ export class TaskRunnerModule { private taskRunnerProcess: TaskRunnerProcess | undefined; - constructor(private readonly runnerConfig: TaskRunnersConfig) {} + private taskRunnerProcessRestartLoopDetector: TaskRunnerProcessRestartLoopDetector | undefined; + + constructor( + private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, + private readonly runnerConfig: TaskRunnersConfig, + ) { + this.logger = this.logger.scoped('task-runner'); + } async start() { a.ok(this.runnerConfig.enabled, 'Task runner is disabled'); @@ -83,6 +96,14 @@ export class TaskRunnerModule { const { TaskRunnerProcess } = await import('@/runners/task-runner-process'); this.taskRunnerProcess = Container.get(TaskRunnerProcess); + this.taskRunnerProcessRestartLoopDetector = new TaskRunnerProcessRestartLoopDetector( + this.taskRunnerProcess, + ); + this.taskRunnerProcessRestartLoopDetector.on( + 'restart-loop-detected', + this.onRunnerRestartLoopDetected, + ); + await this.taskRunnerProcess.start(); const { InternalTaskRunnerDisconnectAnalyzer } = await import( @@ -92,4 +113,13 @@ export class TaskRunnerModule { Container.get(InternalTaskRunnerDisconnectAnalyzer), ); } + + private onRunnerRestartLoopDetected = async (error: TaskRunnerRestartLoopError) => { + this.logger.error(error.message); + this.errorReporter.error(error); + + // Allow some time for the error to be flushed + await sleep(1000); + process.exit(1); + }; } diff --git a/packages/cli/src/runners/task-runner-process-restart-loop-detector.ts b/packages/cli/src/runners/task-runner-process-restart-loop-detector.ts new file mode 100644 index 0000000000..5431cde195 --- /dev/null +++ b/packages/cli/src/runners/task-runner-process-restart-loop-detector.ts @@ -0,0 +1,73 @@ +import { Time } from '@/constants'; +import { TaskRunnerRestartLoopError } from '@/runners/errors/task-runner-restart-loop-error'; +import type { TaskRunnerProcess } from '@/runners/task-runner-process'; +import { TypedEmitter } from '@/typed-emitter'; + +const MAX_RESTARTS = 5; +const RESTARTS_WINDOW = 2 * Time.seconds.toMilliseconds; + +type TaskRunnerProcessRestartLoopDetectorEventMap = { + 'restart-loop-detected': TaskRunnerRestartLoopError; +}; + +/** + * A class to monitor the task runner process for restart loops + */ +export class TaskRunnerProcessRestartLoopDetector extends TypedEmitter { + /** + * How many times the process needs to restart for it to be detected + * being in a loop. + */ + private readonly maxCount = MAX_RESTARTS; + + /** + * The time interval in which the process needs to restart `maxCount` times + * to be detected as being in a loop. + */ + private readonly restartsWindow = RESTARTS_WINDOW; + + private numRestarts = 0; + + /** Time when the first restart of a loop happened within a time window */ + private firstRestartedAt = Date.now(); + + constructor(private readonly taskRunnerProcess: TaskRunnerProcess) { + super(); + + this.taskRunnerProcess.on('exit', () => { + this.increment(); + + if (this.isMaxCountExceeded()) { + this.emit( + 'restart-loop-detected', + new TaskRunnerRestartLoopError(this.numRestarts, this.msSinceFirstIncrement()), + ); + } + }); + } + + /** + * Increments the counter + */ + private increment() { + const now = Date.now(); + if (now > this.firstRestartedAt + this.restartsWindow) { + this.reset(); + } + + this.numRestarts++; + } + + private reset() { + this.numRestarts = 0; + this.firstRestartedAt = Date.now(); + } + + private isMaxCountExceeded() { + return this.numRestarts >= this.maxCount; + } + + private msSinceFirstIncrement() { + return Date.now() - this.firstRestartedAt; + } +} diff --git a/packages/cli/src/runners/task-runner-server.ts b/packages/cli/src/runners/task-runner-server.ts index 18a48cf39d..62039faf74 100644 --- a/packages/cli/src/runners/task-runner-server.ts +++ b/packages/cli/src/runners/task-runner-server.ts @@ -133,11 +133,8 @@ export class TaskRunnerServer { // Augment errors sent to Sentry if (this.globalConfig.sentry.backendDsn) { - const { - Handlers: { requestHandler, errorHandler }, - } = await import('@sentry/node'); - app.use(requestHandler()); - app.use(errorHandler()); + const { setupExpressErrorHandler } = await import('@sentry/node'); + setupExpressErrorHandler(app); } } diff --git a/packages/cli/src/scaling/__tests__/job-processor.service.test.ts b/packages/cli/src/scaling/__tests__/job-processor.service.test.ts index 6a3fa5caa4..73264e6382 100644 --- a/packages/cli/src/scaling/__tests__/job-processor.service.test.ts +++ b/packages/cli/src/scaling/__tests__/job-processor.service.test.ts @@ -12,7 +12,14 @@ describe('JobProcessor', () => { executionRepository.findSingleExecution.mockResolvedValue( mock({ status: 'crashed' }), ); - const jobProcessor = new JobProcessor(mock(), executionRepository, mock(), mock(), mock()); + const jobProcessor = new JobProcessor( + mock(), + mock(), + executionRepository, + mock(), + mock(), + mock(), + ); const result = await jobProcessor.processJob(mock()); diff --git a/packages/cli/src/scaling/__tests__/scaling.service.test.ts b/packages/cli/src/scaling/__tests__/scaling.service.test.ts index 0b5f80da48..b400bf6dfb 100644 --- a/packages/cli/src/scaling/__tests__/scaling.service.test.ts +++ b/packages/cli/src/scaling/__tests__/scaling.service.test.ts @@ -5,7 +5,6 @@ import { InstanceSettings } from 'n8n-core'; import { ApplicationError } from 'n8n-workflow'; import Container from 'typedi'; -import type { OrchestrationService } from '@/services/orchestration.service'; import { mockInstance, mockLogger } from '@test/mocking'; import { JOB_TYPE_NAME, QUEUE_NAME } from '../constants'; @@ -47,7 +46,6 @@ describe('ScalingService', () => { }); const instanceSettings = Container.get(InstanceSettings); - const orchestrationService = mock({ isMultiMainSetupEnabled: false }); const jobProcessor = mock(); let scalingService: ScalingService; @@ -77,11 +75,12 @@ describe('ScalingService', () => { scalingService = new ScalingService( mockLogger(), mock(), + mock(), jobProcessor, globalConfig, mock(), instanceSettings, - orchestrationService, + mock(), mock(), ); diff --git a/packages/cli/src/scaling/job-processor.ts b/packages/cli/src/scaling/job-processor.ts index 6bf2524304..51b86c3922 100644 --- a/packages/cli/src/scaling/job-processor.ts +++ b/packages/cli/src/scaling/job-processor.ts @@ -1,12 +1,7 @@ import type { RunningJobSummary } from '@n8n/api-types'; -import { InstanceSettings, WorkflowExecute } from 'n8n-core'; +import { ErrorReporter, InstanceSettings, WorkflowExecute } from 'n8n-core'; import type { ExecutionStatus, IExecuteResponsePromiseData, IRun } from 'n8n-workflow'; -import { - BINARY_ENCODING, - ApplicationError, - Workflow, - ErrorReporterProxy as ErrorReporter, -} from 'n8n-workflow'; +import { BINARY_ENCODING, ApplicationError, Workflow } from 'n8n-workflow'; import type PCancelable from 'p-cancelable'; import { Service } from 'typedi'; @@ -35,6 +30,7 @@ export class JobProcessor { constructor( private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly executionRepository: ExecutionRepository, private readonly workflowRepository: WorkflowRepository, private readonly nodeTypes: NodeTypes, @@ -155,7 +151,7 @@ export class JobProcessor { workflowExecute = new WorkflowExecute(additionalData, execution.mode, execution.data); workflowRun = workflowExecute.processRunExecutionData(workflow); } else { - ErrorReporter.info(`Worker found execution ${executionId} without data`); + this.errorReporter.info(`Worker found execution ${executionId} without data`); // Execute all nodes // Can execute without webhook so go on workflowExecute = new WorkflowExecute(additionalData, execution.mode); diff --git a/packages/cli/src/scaling/pubsub/publisher.service.ts b/packages/cli/src/scaling/pubsub/publisher.service.ts index 248a455e3e..fc007f76c0 100644 --- a/packages/cli/src/scaling/pubsub/publisher.service.ts +++ b/packages/cli/src/scaling/pubsub/publisher.service.ts @@ -4,6 +4,7 @@ import { Service } from 'typedi'; import config from '@/config'; import { Logger } from '@/logging/logger.service'; +import type { LogMetadata } from '@/logging/types'; import { RedisClientService } from '@/services/redis-client.service'; import type { PubSub } from './pubsub.types'; @@ -45,7 +46,7 @@ export class Publisher { // #region Publishing /** Publish a command into the `n8n.commands` channel. */ - async publishCommand(msg: Omit) { + async publishCommand(msg: PubSub.Command) { // @TODO: Once this class is only ever used in scaling mode, remove next line. if (config.getEnv('executions.mode') !== 'queue') return; @@ -59,7 +60,18 @@ export class Publisher { }), ); - this.logger.debug(`Published ${msg.command} to command channel`); + let msgName = msg.command; + + const metadata: LogMetadata = { msg: msg.command, channel: 'n8n.commands' }; + + if (msg.command === 'relay-execution-lifecycle-event') { + const { args, type } = msg.payload; + msgName += ` (${type})`; + metadata.type = type; + metadata.executionId = args.executionId; + } + + this.logger.debug(`Published pubsub msg: ${msgName}`, metadata); } /** Publish a response to a command into the `n8n.worker-response` channel. */ diff --git a/packages/cli/src/scaling/pubsub/pubsub.types.ts b/packages/cli/src/scaling/pubsub/pubsub.types.ts index eec0110201..501185d07e 100644 --- a/packages/cli/src/scaling/pubsub/pubsub.types.ts +++ b/packages/cli/src/scaling/pubsub/pubsub.types.ts @@ -23,10 +23,14 @@ export namespace PubSub { // ---------------------------------- type _ToCommand = { - senderId: string; - targets?: string[]; command: CommandKey; + /** Host ID of the sender, added during publishing. */ + senderId?: string; + + /** Host IDs of the receivers. */ + targets?: string[]; + /** Whether the command should be sent to the sender as well. */ selfSend?: boolean; diff --git a/packages/cli/src/scaling/pubsub/subscriber.service.ts b/packages/cli/src/scaling/pubsub/subscriber.service.ts index ed673fc4e4..248c1198d2 100644 --- a/packages/cli/src/scaling/pubsub/subscriber.service.ts +++ b/packages/cli/src/scaling/pubsub/subscriber.service.ts @@ -7,6 +7,7 @@ import { Service } from 'typedi'; import config from '@/config'; import { EventService } from '@/events/event.service'; import { Logger } from '@/logging/logger.service'; +import type { LogMetadata } from '@/logging/types'; import { RedisClientService } from '@/services/redis-client.service'; import type { PubSub } from './pubsub.types'; @@ -72,7 +73,7 @@ export class Subscriber { }); if (!msg) { - this.logger.error(`Received malformed message via channel ${channel}`, { + this.logger.error('Received malformed pubsub message', { msg: str, channel, }); @@ -89,12 +90,18 @@ export class Subscriber { return null; } - const msgName = 'command' in msg ? msg.command : msg.response; + let msgName = 'command' in msg ? msg.command : msg.response; - this.logger.debug(`Received message ${msgName} via channel ${channel}`, { - msg, - channel, - }); + const metadata: LogMetadata = { msg: msgName, channel }; + + if ('command' in msg && msg.command === 'relay-execution-lifecycle-event') { + const { args, type } = msg.payload; + msgName += ` (${type})`; + metadata.type = type; + metadata.executionId = args.executionId; + } + + this.logger.debug(`Received pubsub msg: ${msgName}`, metadata); return msg; } diff --git a/packages/cli/src/scaling/scaling.service.ts b/packages/cli/src/scaling/scaling.service.ts index f7731e26c2..ebc8e4499c 100644 --- a/packages/cli/src/scaling/scaling.service.ts +++ b/packages/cli/src/scaling/scaling.service.ts @@ -1,13 +1,6 @@ import { GlobalConfig } from '@n8n/config'; -import { InstanceSettings } from 'n8n-core'; -import { - ApplicationError, - BINARY_ENCODING, - sleep, - jsonStringify, - ErrorReporterProxy, - ensureError, -} from 'n8n-workflow'; +import { ErrorReporter, InstanceSettings } from 'n8n-core'; +import { ApplicationError, BINARY_ENCODING, sleep, jsonStringify, ensureError } from 'n8n-workflow'; import type { IExecuteResponsePromiseData } from 'n8n-workflow'; import { strict } from 'node:assert'; import Container, { Service } from 'typedi'; @@ -43,6 +36,7 @@ export class ScalingService { constructor( private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly activeExecutions: ActiveExecutions, private readonly jobProcessor: JobProcessor, private readonly globalConfig: GlobalConfig, @@ -72,9 +66,11 @@ export class ScalingService { this.registerListeners(); - if (this.instanceSettings.isLeader) this.scheduleQueueRecovery(); + const { isLeader, isMultiMain } = this.instanceSettings; - if (this.orchestrationService.isMultiMainSetupEnabled) { + if (isLeader) this.scheduleQueueRecovery(); + + if (isMultiMain) { this.orchestrationService.multiMainSetup .on('leader-takeover', () => this.scheduleQueueRecovery()) .on('leader-stepdown', () => this.stopQueueRecovery()); @@ -119,7 +115,7 @@ export class ScalingService { await job.progress(msg); - ErrorReporterProxy.error(error, { executionId }); + this.errorReporter.error(error, { executionId }); throw error; } @@ -133,7 +129,7 @@ export class ScalingService { } private async stopMain() { - if (this.orchestrationService.isSingleMainSetup) { + if (this.instanceSettings.isSingleMain) { await this.queue.pause(true, true); // no more jobs will be picked up this.logger.debug('Queue paused'); } @@ -379,7 +375,7 @@ export class ScalingService { return ( this.globalConfig.endpoints.metrics.includeQueueMetrics && this.instanceSettings.instanceType === 'main' && - !this.orchestrationService.isMultiMainSetupEnabled + this.instanceSettings.isSingleMain ); } diff --git a/packages/cli/src/server.ts b/packages/cli/src/server.ts index 06d2dbe4f8..74a1311444 100644 --- a/packages/cli/src/server.ts +++ b/packages/cli/src/server.ts @@ -32,7 +32,6 @@ import { setupPushServer, setupPushHandler, Push } from '@/push'; import type { APIRequest } from '@/requests'; import * as ResponseHelper from '@/response-helper'; import type { FrontendService } from '@/services/frontend.service'; -import { OrchestrationService } from '@/services/orchestration.service'; import '@/controllers/active-workflows.controller'; import '@/controllers/annotation-tags.controller.ee'; @@ -79,7 +78,6 @@ export class Server extends AbstractServer { constructor( private readonly loadNodesAndCredentials: LoadNodesAndCredentials, - private readonly orchestrationService: OrchestrationService, private readonly postHogClient: PostHogClient, private readonly eventService: EventService, private readonly instanceSettings: InstanceSettings, @@ -111,7 +109,7 @@ export class Server extends AbstractServer { } private async registerAdditionalControllers() { - if (!inProduction && this.orchestrationService.isMultiMainSetupEnabled) { + if (!inProduction && this.instanceSettings.isMultiMain) { await import('@/controllers/debug.controller'); } diff --git a/packages/cli/src/services/__tests__/credentials-tester.service.test.ts b/packages/cli/src/services/__tests__/credentials-tester.service.test.ts index 4da925c532..60c49b7773 100644 --- a/packages/cli/src/services/__tests__/credentials-tester.service.test.ts +++ b/packages/cli/src/services/__tests__/credentials-tester.service.test.ts @@ -8,7 +8,13 @@ import { CredentialsTester } from '@/services/credentials-tester.service'; describe('CredentialsTester', () => { const credentialTypes = mock(); const nodeTypes = mock(); - const credentialsTester = new CredentialsTester(mock(), credentialTypes, nodeTypes, mock()); + const credentialsTester = new CredentialsTester( + mock(), + mock(), + credentialTypes, + nodeTypes, + mock(), + ); beforeEach(() => { jest.clearAllMocks(); diff --git a/packages/cli/src/services/credentials-tester.service.ts b/packages/cli/src/services/credentials-tester.service.ts index 30504e464b..4a999d6541 100644 --- a/packages/cli/src/services/credentials-tester.service.ts +++ b/packages/cli/src/services/credentials-tester.service.ts @@ -4,7 +4,7 @@ /* eslint-disable @typescript-eslint/no-unsafe-return */ /* eslint-disable @typescript-eslint/no-unsafe-call */ import get from 'lodash/get'; -import { NodeExecuteFunctions } from 'n8n-core'; +import { ErrorReporter, NodeExecuteFunctions, RoutingNode } from 'n8n-core'; import type { ICredentialsDecrypted, ICredentialTestFunction, @@ -23,14 +23,7 @@ import type { ICredentialTestFunctions, IDataObject, } from 'n8n-workflow'; -import { - VersionedNodeType, - NodeHelpers, - RoutingNode, - Workflow, - ErrorReporterProxy as ErrorReporter, - ApplicationError, -} from 'n8n-workflow'; +import { VersionedNodeType, NodeHelpers, Workflow, ApplicationError } from 'n8n-workflow'; import { Service } from 'typedi'; import { CredentialTypes } from '@/credential-types'; @@ -75,6 +68,7 @@ const mockNodeTypes: INodeTypes = { export class CredentialsTester { constructor( private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly credentialTypes: CredentialTypes, private readonly nodeTypes: NodeTypes, private readonly credentialsHelper: CredentialsHelper, @@ -312,11 +306,10 @@ export class CredentialsTester { runIndex, nodeTypeCopy, { node, data: {}, source: null }, - NodeExecuteFunctions, credentialsDecrypted, ); } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); // Do not fail any requests to allow custom error messages and // make logic easier if (error.cause?.response) { diff --git a/packages/cli/src/services/dynamic-node-parameters.service.ts b/packages/cli/src/services/dynamic-node-parameters.service.ts index eb6ecc5f67..a20d63b5fa 100644 --- a/packages/cli/src/services/dynamic-node-parameters.service.ts +++ b/packages/cli/src/services/dynamic-node-parameters.service.ts @@ -1,4 +1,4 @@ -import { LoadOptionsContext, NodeExecuteFunctions } from 'n8n-core'; +import { LoadOptionsContext, RoutingNode } from 'n8n-core'; import type { ILoadOptions, ILoadOptionsFunctions, @@ -18,7 +18,7 @@ import type { NodeParameterValueType, IDataObject, } from 'n8n-workflow'; -import { Workflow, RoutingNode, ApplicationError } from 'n8n-workflow'; +import { Workflow, ApplicationError } from 'n8n-workflow'; import { Service } from 'typedi'; import { NodeTypes } from '@/node-types'; @@ -105,13 +105,11 @@ export class DynamicNodeParametersService { main: [[{ json: {} }]], }; - const optionsData = await routingNode.runNode( - inputData, - runIndex, - tempNode, - { node, source: null, data: {} }, - NodeExecuteFunctions, - ); + const optionsData = await routingNode.runNode(inputData, runIndex, tempNode, { + node, + source: null, + data: {}, + }); if (optionsData?.length === 0) { return []; diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index 79d04b2263..1645e98304 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -5,7 +5,6 @@ import { mkdir } from 'fs/promises'; import uniq from 'lodash/uniq'; import { InstanceSettings } from 'n8n-core'; import type { ICredentialType, INodeTypeBaseDescription } from 'n8n-workflow'; -import fs from 'node:fs'; import path from 'path'; import { Container, Service } from 'typedi'; @@ -83,7 +82,7 @@ export class FrontendService { this.settings = { inE2ETests, - isDocker: this.isDocker(), + isDocker: this.instanceSettings.isDocker, databaseType: this.globalConfig.database.type, previewMode: process.env.N8N_PREVIEW_MODE === 'true', endpointForm: this.globalConfig.endpoints.form, @@ -231,7 +230,7 @@ export class FrontendService { blockFileAccessToN8nFiles: this.securityConfig.blockFileAccessToN8nFiles, }, betaFeatures: this.frontendConfig.betaFeatures, - virtualSchemaView: config.getEnv('virtualSchemaView'), + easyAIWorkflowOnboarded: false, }; } @@ -274,6 +273,11 @@ export class FrontendService { } this.settings.banners.dismissed = dismissedBanners; + try { + this.settings.easyAIWorkflowOnboarded = config.getEnv('easyAIWorkflowOnboarded') ?? false; + } catch { + this.settings.easyAIWorkflowOnboarded = false; + } const isS3Selected = config.getEnv('binaryDataManager.mode') === 's3'; const isS3Available = config.getEnv('binaryDataManager.availableModes').includes('s3'); @@ -387,20 +391,4 @@ export class FrontendService { } } } - - /** - * Whether this instance is running inside a Docker container. - * - * Based on: https://github.com/sindresorhus/is-docker - */ - private isDocker() { - try { - return ( - fs.existsSync('/.dockerenv') || - fs.readFileSync('/proc/self/cgroup', 'utf8').includes('docker') - ); - } catch { - return false; - } - } } diff --git a/packages/cli/src/services/orchestration.service.ts b/packages/cli/src/services/orchestration.service.ts index 19da88e412..adf03d97bf 100644 --- a/packages/cli/src/services/orchestration.service.ts +++ b/packages/cli/src/services/orchestration.service.ts @@ -22,29 +22,6 @@ export class OrchestrationService { isInitialized = false; - private isMultiMainSetupLicensed = false; - - setMultiMainSetupLicensed(newState: boolean) { - this.isMultiMainSetupLicensed = newState; - } - - get isMultiMainSetupEnabled() { - return ( - config.getEnv('executions.mode') === 'queue' && - this.globalConfig.multiMainSetup.enabled && - this.instanceSettings.instanceType === 'main' && - this.isMultiMainSetupLicensed - ); - } - - get isSingleMainSetup() { - return !this.isMultiMainSetupEnabled; - } - - sanityCheck() { - return this.isInitialized && config.get('executions.mode') === 'queue'; - } - async init() { if (this.isInitialized) return; @@ -56,7 +33,7 @@ export class OrchestrationService { this.subscriber = Container.get(Subscriber); } - if (this.isMultiMainSetupEnabled) { + if (this.instanceSettings.isMultiMain) { await this.multiMainSetup.init(); } else { this.instanceSettings.markAsLeader(); @@ -69,7 +46,7 @@ export class OrchestrationService { async shutdown() { if (!this.isInitialized) return; - if (this.isMultiMainSetupEnabled) await this.multiMainSetup.shutdown(); + if (this.instanceSettings.isMultiMain) await this.multiMainSetup.shutdown(); this.publisher.shutdown(); this.subscriber.shutdown(); diff --git a/packages/cli/src/services/pruning/__tests__/pruning.service.test.ts b/packages/cli/src/services/pruning/__tests__/pruning.service.test.ts index 42fe73dd5e..050d1b82d9 100644 --- a/packages/cli/src/services/pruning/__tests__/pruning.service.test.ts +++ b/packages/cli/src/services/pruning/__tests__/pruning.service.test.ts @@ -17,11 +17,10 @@ describe('PruningService', () => { it('should start pruning on main instance that is the leader', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: true }), + mock({ isLeader: true, isMultiMain: true }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), mock(), @@ -36,11 +35,10 @@ describe('PruningService', () => { it('should not start pruning on main instance that is a follower', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: false }), + mock({ isLeader: false, isMultiMain: true }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), mock(), @@ -55,11 +53,10 @@ describe('PruningService', () => { it('should register leadership events if main on multi-main setup', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: true }), + mock({ isLeader: true, isMultiMain: true }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock({ on: jest.fn() }), }), mock(), @@ -85,11 +82,10 @@ describe('PruningService', () => { it('should return `true` based on config if leader main', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: true, instanceType: 'main' }), + mock({ isLeader: true, instanceType: 'main', isMultiMain: true }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), mock({ pruneData: true }), @@ -101,11 +97,10 @@ describe('PruningService', () => { it('should return `false` based on config if leader main', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: true, instanceType: 'main' }), + mock({ isLeader: true, instanceType: 'main', isMultiMain: true }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), mock({ pruneData: false }), @@ -117,11 +112,10 @@ describe('PruningService', () => { it('should return `false` if non-main even if config is enabled', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: false, instanceType: 'worker' }), + mock({ isLeader: false, instanceType: 'worker', isMultiMain: true }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), mock({ pruneData: true }), @@ -133,11 +127,15 @@ describe('PruningService', () => { it('should return `false` if follower main even if config is enabled', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: false, isFollower: true, instanceType: 'main' }), + mock({ + isLeader: false, + isFollower: true, + instanceType: 'main', + isMultiMain: true, + }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), mock({ pruneData: true }), @@ -151,11 +149,10 @@ describe('PruningService', () => { it('should not start pruning if service is disabled', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: true, instanceType: 'main' }), + mock({ isLeader: true, instanceType: 'main', isMultiMain: true }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), mock({ pruneData: false }), @@ -179,11 +176,10 @@ describe('PruningService', () => { it('should start pruning if service is enabled and DB is migrated', () => { const pruningService = new PruningService( mockLogger(), - mock({ isLeader: true, instanceType: 'main' }), + mock({ isLeader: true, instanceType: 'main', isMultiMain: true }), mock(), mock(), mock({ - isMultiMainSetupEnabled: true, multiMainSetup: mock(), }), mock({ pruneData: true }), diff --git a/packages/cli/src/services/pruning/pruning.service.ts b/packages/cli/src/services/pruning/pruning.service.ts index 3006d3fbd9..a7bc56725d 100644 --- a/packages/cli/src/services/pruning/pruning.service.ts +++ b/packages/cli/src/services/pruning/pruning.service.ts @@ -51,7 +51,7 @@ export class PruningService { if (this.instanceSettings.isLeader) this.startPruning(); - if (this.orchestrationService.isMultiMainSetupEnabled) { + if (this.instanceSettings.isMultiMain) { this.orchestrationService.multiMainSetup.on('leader-takeover', () => this.startPruning()); this.orchestrationService.multiMainSetup.on('leader-stepdown', () => this.stopPruning()); } diff --git a/packages/cli/src/shutdown/__tests__/shutdown.service.test.ts b/packages/cli/src/shutdown/__tests__/shutdown.service.test.ts index 9c2f5b4887..26d6471584 100644 --- a/packages/cli/src/shutdown/__tests__/shutdown.service.test.ts +++ b/packages/cli/src/shutdown/__tests__/shutdown.service.test.ts @@ -1,5 +1,6 @@ import { mock } from 'jest-mock-extended'; -import { ApplicationError, ErrorReporterProxy } from 'n8n-workflow'; +import type { ErrorReporter } from 'n8n-core'; +import { ApplicationError } from 'n8n-workflow'; import Container from 'typedi'; import type { ServiceClass } from '@/shutdown/shutdown.service'; @@ -13,14 +14,13 @@ describe('ShutdownService', () => { let shutdownService: ShutdownService; let mockComponent: MockComponent; let onShutdownSpy: jest.SpyInstance; - let mockErrorReporterProxy: jest.SpyInstance; + const errorReporter = mock(); beforeEach(() => { - shutdownService = new ShutdownService(mock()); + shutdownService = new ShutdownService(mock(), errorReporter); mockComponent = new MockComponent(); Container.set(MockComponent, mockComponent); onShutdownSpy = jest.spyOn(mockComponent, 'onShutdown'); - mockErrorReporterProxy = jest.spyOn(ErrorReporterProxy, 'error').mockImplementation(() => {}); }); describe('shutdown', () => { @@ -83,8 +83,8 @@ describe('ShutdownService', () => { shutdownService.shutdown(); await shutdownService.waitForShutdown(); - expect(mockErrorReporterProxy).toHaveBeenCalledTimes(1); - const error = mockErrorReporterProxy.mock.calls[0][0]; + expect(errorReporter.error).toHaveBeenCalledTimes(1); + const error = errorReporter.error.mock.calls[0][0] as ApplicationError; expect(error).toBeInstanceOf(ApplicationError); expect(error.message).toBe('Failed to shutdown gracefully'); expect(error.extra).toEqual({ diff --git a/packages/cli/src/shutdown/shutdown.service.ts b/packages/cli/src/shutdown/shutdown.service.ts index 1bedc3a7d4..8ff8570757 100644 --- a/packages/cli/src/shutdown/shutdown.service.ts +++ b/packages/cli/src/shutdown/shutdown.service.ts @@ -1,5 +1,5 @@ -import type { Class } from 'n8n-core'; -import { ApplicationError, ErrorReporterProxy, assert } from 'n8n-workflow'; +import { type Class, ErrorReporter } from 'n8n-core'; +import { ApplicationError, assert } from 'n8n-workflow'; import { Container, Service } from 'typedi'; import { LOWEST_SHUTDOWN_PRIORITY, HIGHEST_SHUTDOWN_PRIORITY } from '@/constants'; @@ -31,7 +31,10 @@ export class ShutdownService { private shutdownPromise: Promise | undefined; - constructor(private readonly logger: Logger) {} + constructor( + private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, + ) {} /** Registers given listener to be notified when the application is shutting down */ register(priority: number, handler: ShutdownHandler) { @@ -108,7 +111,7 @@ export class ShutdownService { await method.call(service); } catch (error) { assert(error instanceof Error); - ErrorReporterProxy.error(new ComponentShutdownError(name, error)); + this.errorReporter.error(new ComponentShutdownError(name, error)); } } } diff --git a/packages/cli/src/user-management/email/node-mailer.ts b/packages/cli/src/user-management/email/node-mailer.ts index 661c3fed7f..a35ab77318 100644 --- a/packages/cli/src/user-management/email/node-mailer.ts +++ b/packages/cli/src/user-management/email/node-mailer.ts @@ -1,6 +1,6 @@ import { GlobalConfig } from '@n8n/config'; import { pick } from 'lodash'; -import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; +import { ErrorReporter } from 'n8n-core'; import path from 'node:path'; import type { Transporter } from 'nodemailer'; import { createTransport } from 'nodemailer'; @@ -20,6 +20,7 @@ export class NodeMailer { constructor( globalConfig: GlobalConfig, private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, ) { const smtpConfig = globalConfig.userManagement.emails.smtp; const transportConfig: SMTPConnection.Options = pick(smtpConfig, ['host', 'port', 'secure']); @@ -66,7 +67,7 @@ export class NodeMailer { `Email sent successfully to the following recipients: ${mailData.emailRecipients.toString()}`, ); } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); this.logger.error('Failed to send email', { recipients: mailData.emailRecipients, error: error as Error, diff --git a/packages/cli/src/wait-tracker.ts b/packages/cli/src/wait-tracker.ts index 7035db3cbe..f42905ace1 100644 --- a/packages/cli/src/wait-tracker.ts +++ b/packages/cli/src/wait-tracker.ts @@ -2,6 +2,7 @@ import { InstanceSettings } from 'n8n-core'; import { ApplicationError, type IWorkflowExecutionDataProcess } from 'n8n-workflow'; import { Service } from 'typedi'; +import { ActiveExecutions } from '@/active-executions'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { Logger } from '@/logging/logger.service'; import { OrchestrationService } from '@/services/orchestration.service'; @@ -23,6 +24,7 @@ export class WaitTracker { private readonly logger: Logger, private readonly executionRepository: ExecutionRepository, private readonly ownershipService: OwnershipService, + private readonly activeExecutions: ActiveExecutions, private readonly workflowRunner: WorkflowRunner, private readonly orchestrationService: OrchestrationService, private readonly instanceSettings: InstanceSettings, @@ -38,12 +40,11 @@ export class WaitTracker { * @important Requires `OrchestrationService` to be initialized. */ init() { - const { isLeader } = this.instanceSettings; - const { isMultiMainSetupEnabled } = this.orchestrationService; + const { isLeader, isMultiMain } = this.instanceSettings; if (isLeader) this.startTracking(); - if (isMultiMainSetupEnabled) { + if (isMultiMain) { this.orchestrationService.multiMainSetup .on('leader-takeover', () => this.startTracking()) .on('leader-stepdown', () => this.stopTracking()); @@ -133,6 +134,14 @@ export class WaitTracker { // Start the execution again await this.workflowRunner.run(data, false, false, executionId); + + const { parentExecution } = fullExecutionData.data; + if (parentExecution) { + // on child execution completion, resume parent execution + void this.activeExecutions.getPostExecutePromise(executionId).then(() => { + void this.startExecution(parentExecution.executionId); + }); + } } stopTracking() { diff --git a/packages/cli/src/webhooks/__tests__/test-webhook-registrations.service.test.ts b/packages/cli/src/webhooks/__tests__/test-webhook-registrations.service.test.ts index 0642a5eaa5..b3b4515d68 100644 --- a/packages/cli/src/webhooks/__tests__/test-webhook-registrations.service.test.ts +++ b/packages/cli/src/webhooks/__tests__/test-webhook-registrations.service.test.ts @@ -1,7 +1,7 @@ import { mock } from 'jest-mock-extended'; +import type { InstanceSettings } from 'n8n-core'; import type { CacheService } from '@/services/cache/cache.service'; -import type { OrchestrationService } from '@/services/orchestration.service'; import type { TestWebhookRegistration } from '@/webhooks/test-webhook-registrations.service'; import { TestWebhookRegistrationsService } from '@/webhooks/test-webhook-registrations.service'; @@ -9,7 +9,7 @@ describe('TestWebhookRegistrationsService', () => { const cacheService = mock(); const registrations = new TestWebhookRegistrationsService( cacheService, - mock({ isMultiMainSetupEnabled: false }), + mock({ isMultiMain: false }), ); const registration = mock({ diff --git a/packages/cli/src/webhooks/__tests__/test-webhooks.test.ts b/packages/cli/src/webhooks/__tests__/test-webhooks.test.ts index 50f5bc2f12..a07d4fc0fd 100644 --- a/packages/cli/src/webhooks/__tests__/test-webhooks.test.ts +++ b/packages/cli/src/webhooks/__tests__/test-webhooks.test.ts @@ -18,6 +18,7 @@ import type { } from '@/webhooks/test-webhook-registrations.service'; import { TestWebhooks } from '@/webhooks/test-webhooks'; import * as WebhookHelpers from '@/webhooks/webhook-helpers'; +import type { WebhookService } from '@/webhooks/webhook.service'; import type { WebhookRequest } from '@/webhooks/webhook.types'; import * as AdditionalData from '@/workflow-execute-additional-data'; @@ -38,13 +39,20 @@ const webhook = mock({ userId, }); -const registrations = mock(); - -let testWebhooks: TestWebhooks; - describe('TestWebhooks', () => { + const registrations = mock(); + const webhookService = mock(); + + const testWebhooks = new TestWebhooks( + mock(), + mock(), + registrations, + mock(), + mock(), + webhookService, + ); + beforeAll(() => { - testWebhooks = new TestWebhooks(mock(), mock(), registrations, mock(), mock()); jest.useFakeTimers(); }); @@ -68,7 +76,7 @@ describe('TestWebhooks', () => { const needsWebhook = await testWebhooks.needsWebhook(args); const [registerOrder] = registrations.register.mock.invocationCallOrder; - const [createOrder] = workflow.createWebhookIfNotExists.mock.invocationCallOrder; + const [createOrder] = webhookService.createWebhookIfNotExists.mock.invocationCallOrder; expect(registerOrder).toBeLessThan(createOrder); expect(needsWebhook).toBe(true); @@ -132,11 +140,11 @@ describe('TestWebhooks', () => { // ASSERT const [registerOrder] = registrations.register.mock.invocationCallOrder; - const [createOrder] = workflow.createWebhookIfNotExists.mock.invocationCallOrder; + const [createOrder] = webhookService.createWebhookIfNotExists.mock.invocationCallOrder; expect(registerOrder).toBeLessThan(createOrder); expect(registrations.register.mock.calls[0][0].webhook.node).toBe(webhook2.node); - expect(workflow.createWebhookIfNotExists.mock.calls[0][0].node).toBe(webhook2.node); + expect(webhookService.createWebhookIfNotExists.mock.calls[0][1].node).toBe(webhook2.node); expect(needsWebhook).toBe(true); }); }); diff --git a/packages/cli/src/webhooks/__tests__/waiting-forms.test.ts b/packages/cli/src/webhooks/__tests__/waiting-forms.test.ts index bec6f95d7f..f342095b77 100644 --- a/packages/cli/src/webhooks/__tests__/waiting-forms.test.ts +++ b/packages/cli/src/webhooks/__tests__/waiting-forms.test.ts @@ -6,7 +6,7 @@ import { WaitingForms } from '@/webhooks/waiting-forms'; describe('WaitingForms', () => { const executionRepository = mock(); - const waitingWebhooks = new WaitingForms(mock(), mock(), executionRepository); + const waitingWebhooks = new WaitingForms(mock(), mock(), executionRepository, mock()); beforeEach(() => { jest.restoreAllMocks(); diff --git a/packages/cli/src/webhooks/__tests__/waiting-webhooks.test.ts b/packages/cli/src/webhooks/__tests__/waiting-webhooks.test.ts index 892d87e773..72fe654c55 100644 --- a/packages/cli/src/webhooks/__tests__/waiting-webhooks.test.ts +++ b/packages/cli/src/webhooks/__tests__/waiting-webhooks.test.ts @@ -10,7 +10,7 @@ import type { WaitingWebhookRequest } from '@/webhooks/webhook.types'; describe('WaitingWebhooks', () => { const executionRepository = mock(); - const waitingWebhooks = new WaitingWebhooks(mock(), mock(), executionRepository); + const waitingWebhooks = new WaitingWebhooks(mock(), mock(), executionRepository, mock()); beforeEach(() => { jest.restoreAllMocks(); diff --git a/packages/cli/src/webhooks/__tests__/webhook.service.test.ts b/packages/cli/src/webhooks/__tests__/webhook.service.test.ts index 534c36bca0..46ffb82dc6 100644 --- a/packages/cli/src/webhooks/__tests__/webhook.service.test.ts +++ b/packages/cli/src/webhooks/__tests__/webhook.service.test.ts @@ -1,11 +1,14 @@ +import { mock } from 'jest-mock-extended'; +import type { INode, INodeType, IWebhookData, IWorkflowExecuteAdditionalData } from 'n8n-workflow'; +import { Workflow } from 'n8n-workflow'; import { v4 as uuid } from 'uuid'; import config from '@/config'; import { WebhookEntity } from '@/databases/entities/webhook-entity'; -import { WebhookRepository } from '@/databases/repositories/webhook.repository'; -import { CacheService } from '@/services/cache/cache.service'; +import type { WebhookRepository } from '@/databases/repositories/webhook.repository'; +import type { NodeTypes } from '@/node-types'; +import type { CacheService } from '@/services/cache/cache.service'; import { WebhookService } from '@/webhooks/webhook.service'; -import { mockInstance } from '@test/mocking'; const createWebhook = (method: string, path: string, webhookId?: string, pathSegments?: number) => Object.assign(new WebhookEntity(), { @@ -16,9 +19,11 @@ const createWebhook = (method: string, path: string, webhookId?: string, pathSeg }) as WebhookEntity; describe('WebhookService', () => { - const webhookRepository = mockInstance(WebhookRepository); - const cacheService = mockInstance(CacheService); - const webhookService = new WebhookService(webhookRepository, cacheService); + const webhookRepository = mock(); + const cacheService = mock(); + const nodeTypes = mock(); + const webhookService = new WebhookService(mock(), webhookRepository, cacheService, nodeTypes); + const additionalData = mock(); beforeEach(() => { config.load(config.default); @@ -188,4 +193,171 @@ describe('WebhookService', () => { expect(webhookRepository.upsert).toHaveBeenCalledWith(mockWebhook, ['method', 'webhookPath']); }); }); + + describe('getNodeWebhooks()', () => { + const workflow = new Workflow({ + id: 'test-workflow', + nodes: [], + connections: {}, + active: true, + nodeTypes, + }); + + test('should return empty array if node is disabled', async () => { + const node = { disabled: true } as INode; + + const webhooks = webhookService.getNodeWebhooks(workflow, node, additionalData); + + expect(webhooks).toEqual([]); + }); + + test('should return webhooks for node with webhook definitions', async () => { + const node = { + name: 'Webhook', + type: 'n8n-nodes-base.webhook', + disabled: false, + } as INode; + + const nodeType = { + description: { + webhooks: [ + { + name: 'default', + httpMethod: 'GET', + path: '/webhook', + isFullPath: false, + restartWebhook: false, + }, + ], + }, + } as INodeType; + + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + + const webhooks = webhookService.getNodeWebhooks(workflow, node, additionalData); + + expect(webhooks).toHaveLength(1); + expect(webhooks[0]).toMatchObject({ + httpMethod: 'GET', + node: 'Webhook', + workflowId: 'test-workflow', + }); + }); + }); + + describe('createWebhookIfNotExists()', () => { + const workflow = new Workflow({ + id: 'test-workflow', + nodes: [ + mock({ + name: 'Webhook', + type: 'n8n-nodes-base.webhook', + typeVersion: 1, + parameters: {}, + }), + ], + connections: {}, + active: false, + nodeTypes, + }); + + const webhookData = mock({ + node: 'Webhook', + webhookDescription: { + name: 'default', + httpMethod: 'GET', + path: '/webhook', + }, + }); + + const defaultWebhookMethods = { + checkExists: jest.fn(), + create: jest.fn(), + }; + + const nodeType = mock({ + webhookMethods: { default: defaultWebhookMethods }, + }); + + test('should create webhook if it does not exist', async () => { + defaultWebhookMethods.checkExists.mockResolvedValue(false); + defaultWebhookMethods.create.mockResolvedValue(true); + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + + await webhookService.createWebhookIfNotExists(workflow, webhookData, 'trigger', 'init'); + + expect(defaultWebhookMethods.checkExists).toHaveBeenCalled(); + expect(defaultWebhookMethods.create).toHaveBeenCalled(); + }); + + test('should not create webhook if it already exists', async () => { + defaultWebhookMethods.checkExists.mockResolvedValue(true); + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + + await webhookService.createWebhookIfNotExists(workflow, webhookData, 'trigger', 'init'); + + expect(defaultWebhookMethods.checkExists).toHaveBeenCalled(); + expect(defaultWebhookMethods.create).not.toHaveBeenCalled(); + }); + + test('should handle case when webhook methods are not defined', async () => { + nodeTypes.getByNameAndVersion.mockReturnValue({} as INodeType); + + await webhookService.createWebhookIfNotExists(workflow, webhookData, 'trigger', 'init'); + // Test passes if no error is thrown when webhook methods are undefined + }); + }); + + describe('deleteWebhook()', () => { + test('should call runWebhookMethod with delete', async () => { + const workflow = mock(); + const webhookData = mock(); + const runWebhookMethodSpy = jest.spyOn(webhookService as any, 'runWebhookMethod'); + + await webhookService.deleteWebhook(workflow, webhookData, 'trigger', 'init'); + + expect(runWebhookMethodSpy).toHaveBeenCalledWith( + 'delete', + workflow, + webhookData, + 'trigger', + 'init', + ); + }); + }); + + describe('runWebhook()', () => { + const workflow = mock(); + const webhookData = mock(); + const node = mock(); + const responseData = { workflowData: [] }; + + test('should throw error if node does not have webhooks', async () => { + const nodeType = {} as INodeType; + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + + await expect( + webhookService.runWebhook(workflow, webhookData, node, additionalData, 'trigger', null), + ).rejects.toThrow('Node does not have any webhooks defined'); + }); + + test('should execute webhook and return response data', async () => { + const nodeType = mock({ + webhook: jest.fn().mockResolvedValue(responseData), + }); + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + + const result = await webhookService.runWebhook( + workflow, + webhookData, + node, + additionalData, + 'trigger', + null, + ); + + expect(result).toEqual(responseData); + expect(nodeType.webhook).toHaveBeenCalled(); + }); + }); }); diff --git a/packages/cli/src/webhooks/live-webhooks.ts b/packages/cli/src/webhooks/live-webhooks.ts index 458701caee..6d6fc9161d 100644 --- a/packages/cli/src/webhooks/live-webhooks.ts +++ b/packages/cli/src/webhooks/live-webhooks.ts @@ -1,5 +1,5 @@ import type { Response } from 'express'; -import { Workflow, NodeHelpers, CHAT_TRIGGER_NODE_TYPE } from 'n8n-workflow'; +import { Workflow, CHAT_TRIGGER_NODE_TYPE } from 'n8n-workflow'; import type { INode, IWebhookData, IHttpRequestMethods } from 'n8n-workflow'; import { Service } from 'typedi'; @@ -114,11 +114,9 @@ export class LiveWebhooks implements IWebhookManager { const additionalData = await WorkflowExecuteAdditionalData.getBase(); - const webhookData = NodeHelpers.getNodeWebhooks( - workflow, - workflow.getNode(webhook.node) as INode, - additionalData, - ).find((w) => w.httpMethod === httpMethod && w.path === webhook.webhookPath) as IWebhookData; + const webhookData = this.webhookService + .getNodeWebhooks(workflow, workflow.getNode(webhook.node) as INode, additionalData) + .find((w) => w.httpMethod === httpMethod && w.path === webhook.webhookPath) as IWebhookData; // Get the node which has the webhook defined to know where to start from and to // get additional data diff --git a/packages/cli/src/webhooks/test-webhook-registrations.service.ts b/packages/cli/src/webhooks/test-webhook-registrations.service.ts index 6a3e205f58..e25b3102db 100644 --- a/packages/cli/src/webhooks/test-webhook-registrations.service.ts +++ b/packages/cli/src/webhooks/test-webhook-registrations.service.ts @@ -1,10 +1,10 @@ +import { InstanceSettings } from 'n8n-core'; import type { IWebhookData } from 'n8n-workflow'; import { Service } from 'typedi'; import { TEST_WEBHOOK_TIMEOUT, TEST_WEBHOOK_TIMEOUT_BUFFER } from '@/constants'; import type { IWorkflowDb } from '@/interfaces'; import { CacheService } from '@/services/cache/cache.service'; -import { OrchestrationService } from '@/services/orchestration.service'; export type TestWebhookRegistration = { pushRef?: string; @@ -17,7 +17,7 @@ export type TestWebhookRegistration = { export class TestWebhookRegistrationsService { constructor( private readonly cacheService: CacheService, - private readonly orchestrationService: OrchestrationService, + private readonly instanceSettings: InstanceSettings, ) {} private readonly cacheKey = 'test-webhooks'; @@ -27,7 +27,7 @@ export class TestWebhookRegistrationsService { await this.cacheService.setHash(this.cacheKey, { [hashKey]: registration }); - if (!this.orchestrationService.isMultiMainSetupEnabled) return; + if (this.instanceSettings.isSingleMain) return; /** * Multi-main setup: In a manual webhook execution, the main process that diff --git a/packages/cli/src/webhooks/test-webhooks.ts b/packages/cli/src/webhooks/test-webhooks.ts index 2bdf94b312..ad642a17c3 100644 --- a/packages/cli/src/webhooks/test-webhooks.ts +++ b/packages/cli/src/webhooks/test-webhooks.ts @@ -1,5 +1,5 @@ import type express from 'express'; -import * as NodeExecuteFunctions from 'n8n-core'; +import { InstanceSettings } from 'n8n-core'; import { WebhookPathTakenError, Workflow } from 'n8n-workflow'; import type { IWebhookData, @@ -17,7 +17,6 @@ import type { IWorkflowDb } from '@/interfaces'; import { NodeTypes } from '@/node-types'; import { Push } from '@/push'; import { Publisher } from '@/scaling/pubsub/publisher.service'; -import { OrchestrationService } from '@/services/orchestration.service'; import { removeTrailingSlash } from '@/utils'; import type { TestWebhookRegistration } from '@/webhooks/test-webhook-registrations.service'; import { TestWebhookRegistrationsService } from '@/webhooks/test-webhook-registrations.service'; @@ -25,6 +24,7 @@ import * as WebhookHelpers from '@/webhooks/webhook-helpers'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; import type { WorkflowRequest } from '@/workflows/workflow.request'; +import { WebhookService } from './webhook.service'; import type { IWebhookResponseCallbackData, IWebhookManager, @@ -42,8 +42,9 @@ export class TestWebhooks implements IWebhookManager { private readonly push: Push, private readonly nodeTypes: NodeTypes, private readonly registrations: TestWebhookRegistrationsService, - private readonly orchestrationService: OrchestrationService, + private readonly instanceSettings: InstanceSettings, private readonly publisher: Publisher, + private readonly webhookService: WebhookService, ) {} private timeouts: { [webhookKey: string]: NodeJS.Timeout } = {}; @@ -155,7 +156,7 @@ export class TestWebhooks implements IWebhookManager { * the handler process commands the creator process to clear its test webhooks. */ if ( - this.orchestrationService.isMultiMainSetupEnabled && + this.instanceSettings.isMultiMain && pushRef && !this.push.getBackend().hasPushRef(pushRef) ) { @@ -314,7 +315,7 @@ export class TestWebhooks implements IWebhookManager { */ await this.registrations.register(registration); - await workflow.createWebhookIfNotExists(webhook, NodeExecuteFunctions, 'manual', 'manual'); + await this.webhookService.createWebhookIfNotExists(workflow, webhook, 'manual', 'manual'); cacheableWebhook.staticData = workflow.staticData; @@ -431,7 +432,7 @@ export class TestWebhooks implements IWebhookManager { if (staticData) workflow.staticData = staticData; - await workflow.deleteWebhook(webhook, NodeExecuteFunctions, 'internal', 'update'); + await this.webhookService.deleteWebhook(workflow, webhook, 'internal', 'update'); } await this.registrations.deregisterAll(); diff --git a/packages/cli/src/webhooks/waiting-webhooks.ts b/packages/cli/src/webhooks/waiting-webhooks.ts index 3176bbdf2d..6355709189 100644 --- a/packages/cli/src/webhooks/waiting-webhooks.ts +++ b/packages/cli/src/webhooks/waiting-webhooks.ts @@ -3,7 +3,6 @@ import { FORM_NODE_TYPE, type INodes, type IWorkflowBase, - NodeHelpers, SEND_AND_WAIT_OPERATION, WAIT_NODE_TYPE, Workflow, @@ -19,6 +18,7 @@ import { NodeTypes } from '@/node-types'; import * as WebhookHelpers from '@/webhooks/webhook-helpers'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; +import { WebhookService } from './webhook.service'; import type { IWebhookResponseCallbackData, IWebhookManager, @@ -38,6 +38,7 @@ export class WaitingWebhooks implements IWebhookManager { protected readonly logger: Logger, protected readonly nodeTypes: NodeTypes, private readonly executionRepository: ExecutionRepository, + private readonly webhookService: WebhookService, ) {} // TODO: implement `getWebhookMethods` for CORS support @@ -164,17 +165,15 @@ export class WaitingWebhooks implements IWebhookManager { } const additionalData = await WorkflowExecuteAdditionalData.getBase(); - const webhookData = NodeHelpers.getNodeWebhooks( - workflow, - workflowStartNode, - additionalData, - ).find( - (webhook) => - webhook.httpMethod === req.method && - webhook.path === (suffix ?? '') && - webhook.webhookDescription.restartWebhook === true && - (webhook.webhookDescription.isForm || false) === this.includeForms, - ); + const webhookData = this.webhookService + .getNodeWebhooks(workflow, workflowStartNode, additionalData) + .find( + (webhook) => + webhook.httpMethod === req.method && + webhook.path === (suffix ?? '') && + webhook.webhookDescription.restartWebhook === true && + (webhook.webhookDescription.isForm || false) === this.includeForms, + ); if (webhookData === undefined) { // If no data got found it means that the execution can not be started via a webhook. diff --git a/packages/cli/src/webhooks/webhook-helpers.ts b/packages/cli/src/webhooks/webhook-helpers.ts index 0dd8f576a4..6657089881 100644 --- a/packages/cli/src/webhooks/webhook-helpers.ts +++ b/packages/cli/src/webhooks/webhook-helpers.ts @@ -9,7 +9,7 @@ import { GlobalConfig } from '@n8n/config'; import type express from 'express'; import get from 'lodash/get'; -import { BinaryDataService, NodeExecuteFunctions } from 'n8n-core'; +import { BinaryDataService, ErrorReporter } from 'n8n-core'; import type { IBinaryData, IBinaryKeyData, @@ -33,11 +33,8 @@ import { ApplicationError, BINARY_ENCODING, createDeferredPromise, - ErrorReporterProxy as ErrorReporter, - ErrorReporterProxy, ExecutionCancelledError, FORM_NODE_TYPE, - NodeHelpers, NodeOperationError, } from 'n8n-workflow'; import { finished } from 'stream/promises'; @@ -48,16 +45,18 @@ import type { Project } from '@/databases/entities/project'; import { InternalServerError } from '@/errors/response-errors/internal-server.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { UnprocessableRequestError } from '@/errors/response-errors/unprocessable.error'; -import type { IExecutionDb, IWorkflowDb } from '@/interfaces'; +import type { IWorkflowDb } from '@/interfaces'; import { Logger } from '@/logging/logger.service'; import { parseBody } from '@/middlewares'; import { OwnershipService } from '@/services/ownership.service'; import { WorkflowStatisticsService } from '@/services/workflow-statistics.service'; +import { WaitTracker } from '@/wait-tracker'; import { createMultiFormDataParser } from '@/webhooks/webhook-form-data'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; import * as WorkflowHelpers from '@/workflow-helpers'; import { WorkflowRunner } from '@/workflow-runner'; +import { WebhookService } from './webhook.service'; import type { IWebhookResponseCallbackData, WebhookRequest } from './webhook.types'; /** @@ -89,7 +88,12 @@ export function getWorkflowWebhooks( } returnData.push.apply( returnData, - NodeHelpers.getNodeWebhooks(workflow, node, additionalData, ignoreRestartWebhooks), + Container.get(WebhookService).getNodeWebhooks( + workflow, + node, + additionalData, + ignoreRestartWebhooks, + ), ); } @@ -255,11 +259,11 @@ export async function executeWebhook( } try { - webhookResultData = await workflow.runWebhook( + webhookResultData = await Container.get(WebhookService).runWebhook( + workflow, webhookData, workflowStartNode, additionalData, - NodeExecuteFunctions, executionMode, runExecutionData ?? null, ); @@ -279,7 +283,7 @@ export async function executeWebhook( errorMessage = err.message; } - ErrorReporterProxy.error(err, { + Container.get(ErrorReporter).error(err, { extra: { nodeName: workflowStartNode.name, nodeType: workflowStartNode.type, @@ -520,7 +524,7 @@ export async function executeWebhook( didSendResponse = true; }) .catch(async (error) => { - ErrorReporter.error(error); + Container.get(ErrorReporter).error(error); Container.get(Logger).error( `Error with Webhook-Response for execution "${executionId}": "${error.message}"`, { executionId, workflowId: workflow.id }, @@ -548,11 +552,21 @@ export async function executeWebhook( { executionId }, ); + const activeExecutions = Container.get(ActiveExecutions); + + // Get a promise which resolves when the workflow did execute and send then response + const executePromise = activeExecutions.getPostExecutePromise(executionId); + + const { parentExecution } = runExecutionData; + if (parentExecution) { + // on child execution completion, resume parent execution + void executePromise.then(() => { + const waitTracker = Container.get(WaitTracker); + void waitTracker.startExecution(parentExecution.executionId); + }); + } + if (!didSendResponse) { - // Get a promise which resolves when the workflow did execute and send then response - const executePromise = Container.get(ActiveExecutions).getPostExecutePromise( - executionId, - ) as Promise; executePromise // eslint-disable-next-line complexity .then(async (data) => { diff --git a/packages/cli/src/webhooks/webhook.service.ts b/packages/cli/src/webhooks/webhook.service.ts index 8e72f0abf9..80b12b04cd 100644 --- a/packages/cli/src/webhooks/webhook.service.ts +++ b/packages/cli/src/webhooks/webhook.service.ts @@ -1,8 +1,23 @@ -import type { IHttpRequestMethods } from 'n8n-workflow'; +import { HookContext, WebhookContext } from 'n8n-core'; +import { ApplicationError, Node, NodeHelpers } from 'n8n-workflow'; +import type { + IHttpRequestMethods, + INode, + IRunExecutionData, + IWebhookData, + IWebhookResponseData, + IWorkflowExecuteAdditionalData, + WebhookSetupMethodNames, + Workflow, + WorkflowActivateMode, + WorkflowExecuteMode, +} from 'n8n-workflow'; import { Service } from 'typedi'; import type { WebhookEntity } from '@/databases/entities/webhook-entity'; import { WebhookRepository } from '@/databases/repositories/webhook.repository'; +import { Logger } from '@/logging/logger.service'; +import { NodeTypes } from '@/node-types'; import { CacheService } from '@/services/cache/cache.service'; type Method = NonNullable; @@ -10,8 +25,10 @@ type Method = NonNullable; @Service() export class WebhookService { constructor( - private webhookRepository: WebhookRepository, - private cacheService: CacheService, + private readonly logger: Logger, + private readonly webhookRepository: WebhookRepository, + private readonly cacheService: CacheService, + private readonly nodeTypes: NodeTypes, ) {} async populateCache() { @@ -118,4 +135,210 @@ export class WebhookService { .find({ select: ['method'], where: { webhookPath: path } }) .then((rows) => rows.map((r) => r.method)); } + + /** + * Returns all the webhooks which should be created for the give node + */ + getNodeWebhooks( + workflow: Workflow, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + ignoreRestartWebhooks = false, + ): IWebhookData[] { + if (node.disabled === true) { + // Node is disabled so webhooks will also not be enabled + return []; + } + + const nodeType = this.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + + if (nodeType.description.webhooks === undefined) { + // Node does not have any webhooks so return + return []; + } + + const workflowId = workflow.id || '__UNSAVED__'; + const mode = 'internal'; + + const returnData: IWebhookData[] = []; + for (const webhookDescription of nodeType.description.webhooks) { + if (ignoreRestartWebhooks && webhookDescription.restartWebhook === true) { + continue; + } + + let nodeWebhookPath = workflow.expression.getSimpleParameterValue( + node, + webhookDescription.path, + mode, + {}, + ); + if (nodeWebhookPath === undefined) { + this.logger.error( + `No webhook path could be found for node "${node.name}" in workflow "${workflowId}".`, + ); + continue; + } + + nodeWebhookPath = nodeWebhookPath.toString(); + + if (nodeWebhookPath.startsWith('/')) { + nodeWebhookPath = nodeWebhookPath.slice(1); + } + if (nodeWebhookPath.endsWith('/')) { + nodeWebhookPath = nodeWebhookPath.slice(0, -1); + } + + const isFullPath: boolean = workflow.expression.getSimpleParameterValue( + node, + webhookDescription.isFullPath, + 'internal', + {}, + undefined, + false, + ) as boolean; + const restartWebhook: boolean = workflow.expression.getSimpleParameterValue( + node, + webhookDescription.restartWebhook, + 'internal', + {}, + undefined, + false, + ) as boolean; + const path = NodeHelpers.getNodeWebhookPath( + workflowId, + node, + nodeWebhookPath, + isFullPath, + restartWebhook, + ); + + const webhookMethods = workflow.expression.getSimpleParameterValue( + node, + webhookDescription.httpMethod, + mode, + {}, + undefined, + 'GET', + ); + + if (webhookMethods === undefined) { + this.logger.error( + `The webhook "${path}" for node "${node.name}" in workflow "${workflowId}" could not be added because the httpMethod is not defined.`, + ); + continue; + } + + let webhookId: string | undefined; + if ((path.startsWith(':') || path.includes('/:')) && node.webhookId) { + webhookId = node.webhookId; + } + + String(webhookMethods) + .split(',') + .forEach((httpMethod) => { + if (!httpMethod) return; + returnData.push({ + httpMethod: httpMethod.trim() as IHttpRequestMethods, + node: node.name, + path, + webhookDescription, + workflowId, + workflowExecuteAdditionalData: additionalData, + webhookId, + }); + }); + } + + return returnData; + } + + async createWebhookIfNotExists( + workflow: Workflow, + webhookData: IWebhookData, + mode: WorkflowExecuteMode, + activation: WorkflowActivateMode, + ): Promise { + const webhookExists = await this.runWebhookMethod( + 'checkExists', + workflow, + webhookData, + mode, + activation, + ); + if (!webhookExists) { + // If webhook does not exist yet create it + await this.runWebhookMethod('create', workflow, webhookData, mode, activation); + } + } + + async deleteWebhook( + workflow: Workflow, + webhookData: IWebhookData, + mode: WorkflowExecuteMode, + activation: WorkflowActivateMode, + ) { + await this.runWebhookMethod('delete', workflow, webhookData, mode, activation); + } + + private async runWebhookMethod( + method: WebhookSetupMethodNames, + workflow: Workflow, + webhookData: IWebhookData, + mode: WorkflowExecuteMode, + activation: WorkflowActivateMode, + ): Promise { + const node = workflow.getNode(webhookData.node); + + if (!node) return; + + const nodeType = this.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + + const webhookFn = nodeType.webhookMethods?.[webhookData.webhookDescription.name]?.[method]; + if (webhookFn === undefined) return; + + const context = new HookContext( + workflow, + node, + webhookData.workflowExecuteAdditionalData, + mode, + activation, + webhookData, + ); + + return (await webhookFn.call(context)) as boolean; + } + + /** + * Executes the webhook data to see what it should return and if the + * workflow should be started or not + */ + async runWebhook( + workflow: Workflow, + webhookData: IWebhookData, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + runExecutionData: IRunExecutionData | null, + ): Promise { + const nodeType = this.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + if (nodeType.webhook === undefined) { + throw new ApplicationError('Node does not have any webhooks defined', { + extra: { nodeName: node.name }, + }); + } + + const context = new WebhookContext( + workflow, + node, + additionalData, + mode, + webhookData, + [], + runExecutionData ?? null, + ); + + return nodeType instanceof Node + ? await nodeType.webhook(context) + : ((await nodeType.webhook.call(context)) as IWebhookResponseData); + } } diff --git a/packages/cli/src/workflow-execute-additional-data.ts b/packages/cli/src/workflow-execute-additional-data.ts index 2588a442d9..a97bb3d3fa 100644 --- a/packages/cli/src/workflow-execute-additional-data.ts +++ b/packages/cli/src/workflow-execute-additional-data.ts @@ -1,19 +1,12 @@ /* eslint-disable @typescript-eslint/no-unsafe-argument */ /* eslint-disable @typescript-eslint/no-use-before-define */ - /* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ import type { PushType } from '@n8n/api-types'; import { GlobalConfig } from '@n8n/config'; import { stringify } from 'flatted'; -import { WorkflowExecute } from 'n8n-core'; -import { - ApplicationError, - ErrorReporterProxy as ErrorReporter, - NodeOperationError, - Workflow, - WorkflowHooks, -} from 'n8n-workflow'; +import { ErrorReporter, WorkflowExecute } from 'n8n-core'; +import { ApplicationError, NodeOperationError, Workflow, WorkflowHooks } from 'n8n-workflow'; import type { IDataObject, IExecuteData, @@ -215,7 +208,7 @@ export function executeErrorWorkflow( ); }) .catch((error: Error) => { - ErrorReporter.error(error); + Container.get(ErrorReporter).error(error); logger.error( `Could not execute ErrorWorkflow for execution ID ${this.executionId} because of error querying the workflow owner`, { @@ -423,7 +416,7 @@ function hookFunctionsSave(): IWorkflowExecuteHooks { newStaticData, ); } catch (e) { - ErrorReporter.error(e); + Container.get(ErrorReporter).error(e); logger.error( `There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (hookFunctionsSave)`, { executionId: this.executionId, workflowId: this.workflowData.id }, @@ -502,7 +495,7 @@ function hookFunctionsSave(): IWorkflowExecuteHooks { ); } } catch (error) { - ErrorReporter.error(error); + Container.get(ErrorReporter).error(error); logger.error(`Failed saving execution data to DB on execution ID ${this.executionId}`, { executionId: this.executionId, workflowId: this.workflowData.id, @@ -584,7 +577,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { newStaticData, ); } catch (e) { - ErrorReporter.error(e); + Container.get(ErrorReporter).error(e); logger.error( `There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: "${e.message}" (workflowExecuteAfter)`, { pushRef: this.pushRef, workflowId: this.workflowData.id }, @@ -653,7 +646,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { this.executionId, ]); } catch (error) { - ErrorReporter.error(error); + Container.get(ErrorReporter).error(error); Container.get(Logger).error( 'There was a problem running hook "workflow.postExecute"', error, @@ -709,6 +702,7 @@ export async function getRunData( waitingExecution: {}, waitingExecutionSource: {}, }, + parentExecution, }; return { @@ -944,6 +938,7 @@ async function startExecution( return { executionId, data: returnData!.data!.main, + waitTill: data.waitTill, }; } activeExecutions.finalizeExecution(executionId, data); @@ -1034,9 +1029,6 @@ export async function getBase( mode: WorkflowExecuteMode, envProviderState: EnvProviderState, executeData?: IExecuteData, - defaultReturnRunIndex?: number, - selfData?: IDataObject, - contextNodeName?: string, ) { return await Container.get(TaskManager).startTask( additionalData, @@ -1055,9 +1047,6 @@ export async function getBase( mode, envProviderState, executeData, - defaultReturnRunIndex, - selfData, - contextNodeName, ); }, logAiEvent: (eventName: keyof AiEventMap, payload: AiEventPayload) => diff --git a/packages/cli/src/workflow-helpers.ts b/packages/cli/src/workflow-helpers.ts index 7cbce6b8a2..addae4e290 100644 --- a/packages/cli/src/workflow-helpers.ts +++ b/packages/cli/src/workflow-helpers.ts @@ -7,9 +7,7 @@ import type { NodeApiError, WorkflowExecuteMode, WorkflowOperationError, - Workflow, NodeOperationError, - IWorkflowExecutionDataProcess, } from 'n8n-workflow'; import { Container } from 'typedi'; import { v4 as uuid } from 'uuid'; @@ -223,18 +221,6 @@ export async function replaceInvalidCredentials(workflow: WorkflowEntity): Promi return workflow; } -export function getExecutionStartNode(data: IWorkflowExecutionDataProcess, workflow: Workflow) { - let startNode; - if ( - data.startNodes?.length === 1 && - Object.keys(data.pinData ?? {}).includes(data.startNodes[0].name) - ) { - startNode = workflow.getNode(data.startNodes[0].name) ?? undefined; - } - - return startNode; -} - export async function getVariables(): Promise { const variables = await Container.get(VariablesService).getAllCached(); return Object.freeze( diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index 19ae201a8d..973d512e62 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -2,14 +2,7 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-shadow */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ -import * as a from 'assert/strict'; -import { - DirectedGraph, - InstanceSettings, - WorkflowExecute, - filterDisabledNodes, - recreateNodeExecutionStack, -} from 'n8n-core'; +import { ErrorReporter, InstanceSettings, WorkflowExecute } from 'n8n-core'; import type { ExecutionError, IDeferredPromise, @@ -19,13 +12,8 @@ import type { WorkflowExecuteMode, WorkflowHooks, IWorkflowExecutionDataProcess, - IRunExecutionData, -} from 'n8n-workflow'; -import { - ErrorReporterProxy as ErrorReporter, - ExecutionCancelledError, - Workflow, } from 'n8n-workflow'; +import { ExecutionCancelledError, Workflow } from 'n8n-workflow'; import PCancelable from 'p-cancelable'; import { Container, Service } from 'typedi'; @@ -39,12 +27,12 @@ import type { ScalingService } from '@/scaling/scaling.service'; import type { Job, JobData } from '@/scaling/scaling.types'; import { PermissionChecker } from '@/user-management/permission-checker'; import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data'; -import * as WorkflowHelpers from '@/workflow-helpers'; import { generateFailedExecutionFromError } from '@/workflow-helpers'; import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service'; import { ExecutionNotFoundError } from './errors/execution-not-found-error'; import { EventService } from './events/event.service'; +import { ManualExecutionService } from './manual-execution.service'; @Service() export class WorkflowRunner { @@ -54,6 +42,7 @@ export class WorkflowRunner { constructor( private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly activeExecutions: ActiveExecutions, private readonly executionRepository: ExecutionRepository, private readonly externalHooks: ExternalHooks, @@ -62,6 +51,7 @@ export class WorkflowRunner { private readonly permissionChecker: PermissionChecker, private readonly eventService: EventService, private readonly instanceSettings: InstanceSettings, + private readonly manualExecutionService: ManualExecutionService, ) {} /** The process did error */ @@ -81,7 +71,7 @@ export class WorkflowRunner { return; } - ErrorReporter.error(error, { executionId }); + this.errorReporter.error(error, { executionId }); const isQueueMode = config.getEnv('executions.mode') === 'queue'; @@ -192,14 +182,14 @@ export class WorkflowRunner { executionId, ]); } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); this.logger.error('There was a problem running hook "workflow.postExecute"', error); } } }) .catch((error) => { if (error instanceof ExecutionCancelledError) return; - ErrorReporter.error(error); + this.errorReporter.error(error); this.logger.error( 'There was a problem running internal hook "onWorkflowPostExecute"', error, @@ -295,88 +285,14 @@ export class WorkflowRunner { data.executionData, ); workflowExecution = workflowExecute.processRunExecutionData(workflow); - } else if (data.triggerToStartFrom?.data && data.startNodes && !data.destinationNode) { - this.logger.debug( - `Execution ID ${executionId} had triggerToStartFrom. Starting from that trigger.`, - { executionId }, - ); - const startNodes = data.startNodes.map((data) => { - const node = workflow.getNode(data.name); - a.ok(node, `Could not find a node named "${data.name}" in the workflow.`); - return node; - }); - const runData = { [data.triggerToStartFrom.name]: [data.triggerToStartFrom.data] }; - - const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack( - filterDisabledNodes(DirectedGraph.fromWorkflow(workflow)), - new Set(startNodes), - runData, - data.pinData ?? {}, - ); - const executionData: IRunExecutionData = { - resultData: { runData, pinData }, - executionData: { - contextData: {}, - metadata: {}, - nodeExecutionStack, - waitingExecution, - waitingExecutionSource, - }, - }; - - const workflowExecute = new WorkflowExecute(additionalData, 'manual', executionData); - workflowExecution = workflowExecute.processRunExecutionData(workflow); - } else if ( - data.runData === undefined || - data.startNodes === undefined || - data.startNodes.length === 0 - ) { - // Full Execution - // TODO: When the old partial execution logic is removed this block can - // be removed and the previous one can be merged into - // `workflowExecute.runPartialWorkflow2`. - // Partial executions then require either a destination node from which - // everything else can be derived, or a triggerToStartFrom with - // triggerData. - this.logger.debug(`Execution ID ${executionId} will run executing all nodes.`, { - executionId, - }); - // Execute all nodes - - const startNode = WorkflowHelpers.getExecutionStartNode(data, workflow); - - // Can execute without webhook so go on - const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); - workflowExecution = workflowExecute.run( - workflow, - startNode, - data.destinationNode, - data.pinData, - ); } else { - // Partial Execution - this.logger.debug(`Execution ID ${executionId} is a partial execution.`, { executionId }); - // Execute only the nodes between start and destination nodes - const workflowExecute = new WorkflowExecute(additionalData, data.executionMode); - - if (data.partialExecutionVersion === '1') { - workflowExecution = workflowExecute.runPartialWorkflow2( - workflow, - data.runData, - data.pinData, - data.dirtyNodeNames, - data.destinationNode, - ); - } else { - workflowExecution = workflowExecute.runPartialWorkflow( - workflow, - data.runData, - data.startNodes, - data.destinationNode, - data.pinData, - ); - } + workflowExecution = this.manualExecutionService.runManually( + data, + workflow, + additionalData, + executionId, + pinData, + ); } this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution); diff --git a/packages/cli/src/workflows/__tests__/workflow-execution.service.test.ts b/packages/cli/src/workflows/__tests__/workflow-execution.service.test.ts index 35228dcfd4..3d0bec39de 100644 --- a/packages/cli/src/workflows/__tests__/workflow-execution.service.test.ts +++ b/packages/cli/src/workflows/__tests__/workflow-execution.service.test.ts @@ -57,6 +57,7 @@ describe('WorkflowExecutionService', () => { mock(), mock(), mock(), + mock(), workflowRunner, mock(), mock(), diff --git a/packages/cli/src/workflows/workflow-execution.service.ts b/packages/cli/src/workflows/workflow-execution.service.ts index 1df4af2f76..27b673c245 100644 --- a/packages/cli/src/workflows/workflow-execution.service.ts +++ b/packages/cli/src/workflows/workflow-execution.service.ts @@ -1,4 +1,5 @@ import { GlobalConfig } from '@n8n/config'; +import { ErrorReporter } from 'n8n-core'; import type { IDeferredPromise, IExecuteData, @@ -11,11 +12,7 @@ import type { WorkflowExecuteMode, IWorkflowExecutionDataProcess, } from 'n8n-workflow'; -import { - SubworkflowOperationError, - Workflow, - ErrorReporterProxy as ErrorReporter, -} from 'n8n-workflow'; +import { SubworkflowOperationError, Workflow } from 'n8n-workflow'; import { Service } from 'typedi'; import type { Project } from '@/databases/entities/project'; @@ -36,6 +33,7 @@ import type { WorkflowRequest } from '@/workflows/workflow.request'; export class WorkflowExecutionService { constructor( private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly executionRepository: ExecutionRepository, private readonly workflowRepository: WorkflowRepository, private readonly nodeTypes: NodeTypes, @@ -293,7 +291,7 @@ export class WorkflowExecutionService { await this.workflowRunner.run(runData); } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); this.logger.error( // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access `Calling Error Workflow for "${workflowErrorData.workflow.id}": "${error.message}"`, diff --git a/packages/cli/src/workflows/workflow-static-data.service.ts b/packages/cli/src/workflows/workflow-static-data.service.ts index 10655b77c7..3e5159dc9a 100644 --- a/packages/cli/src/workflows/workflow-static-data.service.ts +++ b/packages/cli/src/workflows/workflow-static-data.service.ts @@ -1,5 +1,6 @@ import { GlobalConfig } from '@n8n/config'; -import { type IDataObject, type Workflow, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; +import { ErrorReporter } from 'n8n-core'; +import type { IDataObject, Workflow } from 'n8n-workflow'; import { Service } from 'typedi'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; @@ -11,6 +12,7 @@ export class WorkflowStaticDataService { constructor( private readonly globalConfig: GlobalConfig, private readonly logger: Logger, + private readonly errorReporter: ErrorReporter, private readonly workflowRepository: WorkflowRepository, ) {} @@ -33,7 +35,7 @@ export class WorkflowStaticDataService { await this.saveStaticDataById(workflow.id, workflow.staticData); workflow.staticData.__dataChanged = false; } catch (error) { - ErrorReporter.error(error); + this.errorReporter.error(error); this.logger.error( // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access `There was a problem saving the workflow with id "${workflow.id}" to save changed Data: "${error.message}"`, diff --git a/packages/cli/src/workflows/workflow.service.ts b/packages/cli/src/workflows/workflow.service.ts index bce8770303..7220e1a640 100644 --- a/packages/cli/src/workflows/workflow.service.ts +++ b/packages/cli/src/workflows/workflow.service.ts @@ -66,6 +66,20 @@ export class WorkflowService { let { workflows, count } = await this.workflowRepository.getMany(sharedWorkflowIds, options); if (hasSharing(workflows)) { + // Since we're filtering using project ID as part of the relation, + // we end up filtering out all the other relations, meaning that if + // it's shared to a project, it won't be able to find the home project. + // To solve this, we have to get all the relation now, even though + // we're deleting them later. + if (typeof options?.filter?.projectId === 'string' && options.filter.projectId !== '') { + const relations = await this.sharedWorkflowRepository.getAllRelationsForWorkflows( + workflows.map((c) => c.id), + ); + workflows.forEach((c) => { + c.shared = relations.filter((r) => r.workflowId === c.id); + }); + } + workflows = workflows.map((w) => this.ownershipService.addOwnedByAndSharedWith(w)); } @@ -75,8 +89,8 @@ export class WorkflowService { } workflows.forEach((w) => { - // @ts-expect-error: This is to emulate the old behaviour of removing the shared - // field as part of `addOwnedByAndSharedWith`. We need this field in `addScopes` + // This is to emulate the old behaviour of removing the shared field as + // part of `addOwnedByAndSharedWith`. We need this field in `addScopes` // though. So to avoid leaking the information we just delete it. delete w.shared; }); diff --git a/packages/cli/templates/form-trigger-completion.handlebars b/packages/cli/templates/form-trigger-completion.handlebars index 49520fd14c..a15855d371 100644 --- a/packages/cli/templates/form-trigger-completion.handlebars +++ b/packages/cli/templates/form-trigger-completion.handlebars @@ -69,6 +69,9 @@ {{/if}} + diff --git a/packages/cli/test/integration/active-workflow-manager.test.ts b/packages/cli/test/integration/active-workflow-manager.test.ts index 8ea790ade7..a3e4f657f2 100644 --- a/packages/cli/test/integration/active-workflow-manager.test.ts +++ b/packages/cli/test/integration/active-workflow-manager.test.ts @@ -1,6 +1,5 @@ import { mock } from 'jest-mock-extended'; -import type { InstanceSettings } from 'n8n-core'; -import { NodeApiError, NodeOperationError, Workflow } from 'n8n-workflow'; +import { NodeApiError, Workflow } from 'n8n-workflow'; import type { IWebhookData, WorkflowActivateMode } from 'n8n-workflow'; import { Container } from 'typedi'; @@ -10,7 +9,7 @@ import type { WebhookEntity } from '@/databases/entities/webhook-entity'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { ExecutionService } from '@/executions/execution.service'; import { ExternalHooks } from '@/external-hooks'; -import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; +import { Logger } from '@/logging/logger.service'; import { NodeTypes } from '@/node-types'; import { Push } from '@/push'; import { SecretsHelper } from '@/secrets-helpers'; @@ -22,29 +21,16 @@ import { WorkflowService } from '@/workflows/workflow.service'; import { createOwner } from './shared/db/users'; import { createWorkflow } from './shared/db/workflows'; import * as testDb from './shared/test-db'; +import * as utils from './shared/utils/'; import { mockInstance } from '../shared/mocking'; mockInstance(ActiveExecutions); +mockInstance(Logger); mockInstance(Push); mockInstance(SecretsHelper); mockInstance(ExecutionService); mockInstance(WorkflowService); -const loader = mockInstance(LoadNodesAndCredentials); - -Object.assign(loader.loadedNodes, { - 'n8n-nodes-base.scheduleTrigger': { - type: { - description: { - displayName: 'Schedule Trigger', - name: 'scheduleTrigger', - properties: [], - }, - trigger: async () => {}, - }, - }, -}); - const webhookService = mockInstance(WebhookService); const externalHooks = mockInstance(ExternalHooks); @@ -58,15 +44,17 @@ beforeAll(async () => { activeWorkflowManager = Container.get(ActiveWorkflowManager); + await utils.initNodeTypes(); + const owner = await createOwner(); createActiveWorkflow = async () => await createWorkflow({ active: true }, owner); createInactiveWorkflow = async () => await createWorkflow({ active: false }, owner); }); afterEach(async () => { - await testDb.truncate(['Workflow', 'Webhook']); await activeWorkflowManager.removeAll(); - jest.restoreAllMocks(); + await testDb.truncate(['Workflow', 'Webhook']); + jest.clearAllMocks(); }); afterAll(async () => { @@ -98,7 +86,7 @@ describe('init()', () => { await Promise.all([createActiveWorkflow(), createActiveWorkflow()]); const checkSpy = jest - .spyOn(Workflow.prototype, 'checkIfWorkflowCanBeActivated') + .spyOn(activeWorkflowManager, 'checkIfWorkflowCanBeActivated') .mockReturnValue(true); await activeWorkflowManager.init(); @@ -179,7 +167,6 @@ describe('remove()', () => { it('should remove all webhooks of a workflow from external service', async () => { const dbWorkflow = await createActiveWorkflow(); - const deleteWebhookSpy = jest.spyOn(Workflow.prototype, 'deleteWebhook'); jest .spyOn(WebhookHelpers, 'getWorkflowWebhooks') .mockReturnValue([mock({ path: 'some-path' })]); @@ -187,7 +174,7 @@ describe('remove()', () => { await activeWorkflowManager.init(); await activeWorkflowManager.remove(dbWorkflow.id); - expect(deleteWebhookSpy).toHaveBeenCalledTimes(1); + expect(webhookService.deleteWebhook).toHaveBeenCalledTimes(1); }); it('should stop running triggers and pollers', async () => { @@ -206,22 +193,22 @@ describe('remove()', () => { }); describe('executeErrorWorkflow()', () => { - it('should delegate to `WorkflowExecuteAdditionalData`', async () => { - const dbWorkflow = await createActiveWorkflow(); - const [node] = dbWorkflow.nodes; + // it('should delegate to `WorkflowExecuteAdditionalData`', async () => { + // const dbWorkflow = await createActiveWorkflow(); + // const [node] = dbWorkflow.nodes; - const executeSpy = jest.spyOn(AdditionalData, 'executeErrorWorkflow'); + // const executeSpy = jest.spyOn(AdditionalData, 'executeErrorWorkflow'); - await activeWorkflowManager.init(); + // await activeWorkflowManager.init(); - activeWorkflowManager.executeErrorWorkflow( - new NodeOperationError(node, 'Something went wrong'), - dbWorkflow, - 'trigger', - ); + // activeWorkflowManager.executeErrorWorkflow( + // new NodeOperationError(node, 'Something went wrong'), + // dbWorkflow, + // 'trigger', + // ); - expect(executeSpy).toHaveBeenCalledTimes(1); - }); + // expect(executeSpy).toHaveBeenCalledTimes(1); + // }); it('should be called on failure to activate due to 401', async () => { const dbWorkflow = await createActiveWorkflow(); @@ -271,80 +258,11 @@ describe('addWebhooks()', () => { const [node] = dbWorkflow.nodes; jest.spyOn(Workflow.prototype, 'getNode').mockReturnValue(node); - jest.spyOn(Workflow.prototype, 'checkIfWorkflowCanBeActivated').mockReturnValue(true); - jest.spyOn(Workflow.prototype, 'createWebhookIfNotExists').mockResolvedValue(undefined); + jest.spyOn(activeWorkflowManager, 'checkIfWorkflowCanBeActivated').mockReturnValue(true); + webhookService.createWebhookIfNotExists.mockResolvedValue(undefined); await activeWorkflowManager.addWebhooks(workflow, additionalData, 'trigger', 'init'); expect(webhookService.storeWebhook).toHaveBeenCalledTimes(1); }); }); - -describe('shouldAddWebhooks', () => { - describe('if leader', () => { - const activeWorkflowManager = new ActiveWorkflowManager( - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock({ isLeader: true, isFollower: false }), - mock(), - ); - - test('should return `true` for `init`', () => { - // ensure webhooks are populated on init: https://github.com/n8n-io/n8n/pull/8830 - const result = activeWorkflowManager.shouldAddWebhooks('init'); - expect(result).toBe(true); - }); - - test('should return `false` for `leadershipChange`', () => { - const result = activeWorkflowManager.shouldAddWebhooks('leadershipChange'); - expect(result).toBe(false); - }); - - test('should return `true` for `update` or `activate`', () => { - const modes = ['update', 'activate'] as WorkflowActivateMode[]; - for (const mode of modes) { - const result = activeWorkflowManager.shouldAddWebhooks(mode); - expect(result).toBe(true); - } - }); - }); - - describe('if follower', () => { - const activeWorkflowManager = new ActiveWorkflowManager( - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock(), - mock({ isLeader: false, isFollower: true }), - mock(), - ); - - test('should return `false` for `update` or `activate`', () => { - const modes = ['update', 'activate'] as WorkflowActivateMode[]; - for (const mode of modes) { - const result = activeWorkflowManager.shouldAddWebhooks(mode); - expect(result).toBe(false); - } - }); - }); -}); diff --git a/packages/cli/test/integration/environments/source-control-import.service.test.ts b/packages/cli/test/integration/environments/source-control-import.service.test.ts index 78732a99bc..4d2a3d668a 100644 --- a/packages/cli/test/integration/environments/source-control-import.service.test.ts +++ b/packages/cli/test/integration/environments/source-control-import.service.test.ts @@ -30,6 +30,7 @@ describe('SourceControlImportService', () => { mock(), mock(), mock(), + mock(), mock({ n8nFolder: '/some-path' }), ); diff --git a/packages/cli/test/integration/evaluation/test-definitions.api.test.ts b/packages/cli/test/integration/evaluation/test-definitions.api.test.ts index b644cbe4ea..fe977fbfd3 100644 --- a/packages/cli/test/integration/evaluation/test-definitions.api.test.ts +++ b/packages/cli/test/integration/evaluation/test-definitions.api.test.ts @@ -394,6 +394,57 @@ describe('PATCH /evaluation/test-definitions/:id', () => { expect(resp.statusCode).toBe(400); expect(resp.body.message).toBe('Annotation tag not found'); }); + + test('should update pinned nodes', async () => { + const newTest = Container.get(TestDefinitionRepository).create({ + name: 'test', + workflow: { id: workflowUnderTest.id }, + }); + await Container.get(TestDefinitionRepository).save(newTest); + + const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({ + mockedNodes: [ + { + name: 'Schedule Trigger', + }, + ], + }); + + expect(resp.statusCode).toBe(200); + expect(resp.body.data.mockedNodes).toEqual([{ name: 'Schedule Trigger' }]); + }); + + test('should return error if pinned nodes are invalid', async () => { + const newTest = Container.get(TestDefinitionRepository).create({ + name: 'test', + workflow: { id: workflowUnderTest.id }, + }); + await Container.get(TestDefinitionRepository).save(newTest); + + const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({ + mockedNodes: ['Simple string'], + }); + + expect(resp.statusCode).toBe(400); + }); + + test('should return error if pinned nodes are not in the workflow', async () => { + const newTest = Container.get(TestDefinitionRepository).create({ + name: 'test', + workflow: { id: workflowUnderTest.id }, + }); + await Container.get(TestDefinitionRepository).save(newTest); + + const resp = await authOwnerAgent.patch(`/evaluation/test-definitions/${newTest.id}`).send({ + mockedNodes: [ + { + name: 'Invalid Node', + }, + ], + }); + + expect(resp.statusCode).toBe(400); + }); }); describe('DELETE /evaluation/test-definitions/:id', () => { diff --git a/packages/cli/test/integration/public-api/workflows.test.ts b/packages/cli/test/integration/public-api/workflows.test.ts index 687b29da6a..5425455aca 100644 --- a/packages/cli/test/integration/public-api/workflows.test.ts +++ b/packages/cli/test/integration/public-api/workflows.test.ts @@ -528,8 +528,28 @@ describe('POST /workflows/:id/activate', () => { expect(response.statusCode).toBe(404); }); + test('should fail due to trying to activate a workflow without any nodes', async () => { + const workflow = await createWorkflow({ nodes: [] }, owner); + const response = await authOwnerAgent.post(`/workflows/${workflow.id}/activate`); + expect(response.statusCode).toBe(400); + }); + test('should fail due to trying to activate a workflow without a trigger', async () => { - const workflow = await createWorkflow({}, owner); + const workflow = await createWorkflow( + { + nodes: [ + { + id: 'uuid-1234', + name: 'Start', + parameters: {}, + position: [-20, 260], + type: 'n8n-nodes-base.start', + typeVersion: 1, + }, + ], + }, + owner, + ); const response = await authOwnerAgent.post(`/workflows/${workflow.id}/activate`); expect(response.statusCode).toBe(400); }); diff --git a/packages/cli/test/integration/runners/task-runner-module.external.test.ts b/packages/cli/test/integration/runners/task-runner-module.external.test.ts index bdabdf56ae..bb61dae6d4 100644 --- a/packages/cli/test/integration/runners/task-runner-module.external.test.ts +++ b/packages/cli/test/integration/runners/task-runner-module.external.test.ts @@ -1,4 +1,5 @@ import { TaskRunnersConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; import Container from 'typedi'; import { MissingAuthTokenError } from '@/runners/errors/missing-auth-token.error'; @@ -32,7 +33,7 @@ describe('TaskRunnerModule in external mode', () => { runnerConfig.enabled = true; runnerConfig.authToken = ''; - const module = new TaskRunnerModule(runnerConfig); + const module = new TaskRunnerModule(mock(), mock(), runnerConfig); await expect(module.start()).rejects.toThrowError(MissingAuthTokenError); }); diff --git a/packages/cli/test/integration/runners/task-runner-process.test.ts b/packages/cli/test/integration/runners/task-runner-process.test.ts index 3b6c8aec8c..b21ef68640 100644 --- a/packages/cli/test/integration/runners/task-runner-process.test.ts +++ b/packages/cli/test/integration/runners/task-runner-process.test.ts @@ -3,6 +3,7 @@ import Container from 'typedi'; import { TaskRunnerWsServer } from '@/runners/runner-ws-server'; import { TaskBroker } from '@/runners/task-broker.service'; import { TaskRunnerProcess } from '@/runners/task-runner-process'; +import { TaskRunnerProcessRestartLoopDetector } from '@/runners/task-runner-process-restart-loop-detector'; import { retryUntil } from '@test-integration/retry-until'; import { setupBrokerTestServer } from '@test-integration/utils/task-broker-test-server'; @@ -84,4 +85,33 @@ describe('TaskRunnerProcess', () => { expect(getNumRegisteredRunners()).toBe(1); expect(runnerProcess.pid).not.toBe(processId); }); + + it('should work together with restart loop detector', async () => { + // Arrange + const restartLoopDetector = new TaskRunnerProcessRestartLoopDetector(runnerProcess); + let restartLoopDetectedEventEmitted = false; + restartLoopDetector.once('restart-loop-detected', () => { + restartLoopDetectedEventEmitted = true; + }); + + // Act + await runnerProcess.start(); + + // Simulate a restart loop + for (let i = 0; i < 5; i++) { + await retryUntil(() => { + expect(runnerProcess.pid).toBeDefined(); + }); + + // @ts-expect-error private property + runnerProcess.process?.kill(); + + await new Promise((resolve) => { + runnerProcess.once('exit', resolve); + }); + } + + // Assert + expect(restartLoopDetectedEventEmitted).toBe(true); + }); }); diff --git a/packages/cli/test/integration/shared/retry-until.ts b/packages/cli/test/integration/shared/retry-until.ts index f469149b31..b6f375ecb4 100644 --- a/packages/cli/test/integration/shared/retry-until.ts +++ b/packages/cli/test/integration/shared/retry-until.ts @@ -8,7 +8,7 @@ */ export const retryUntil = async ( assertion: () => Promise | void, - { interval = 20, timeout = 1000 } = {}, + { intervalMs = 200, timeoutMs = 5000 } = {}, ) => { return await new Promise((resolve, reject) => { const startTime = Date.now(); @@ -18,13 +18,13 @@ export const retryUntil = async ( try { resolve(await assertion()); } catch (error) { - if (Date.now() - startTime > timeout) { + if (Date.now() - startTime > timeoutMs) { reject(error); } else { tryAgain(); } } - }, interval); + }, intervalMs); }; tryAgain(); diff --git a/packages/cli/test/integration/shared/utils/community-nodes.ts b/packages/cli/test/integration/shared/utils/community-nodes.ts index 7734f70762..d29a9361be 100644 --- a/packages/cli/test/integration/shared/utils/community-nodes.ts +++ b/packages/cli/test/integration/shared/utils/community-nodes.ts @@ -22,7 +22,7 @@ export const mockNode = (packageName: string) => { return Container.get(InstalledNodesRepository).create({ name: nodeName, - type: nodeName, + type: `${packageName}.${nodeName}`, latestVersion: COMMUNITY_NODE_VERSION.CURRENT, package: { packageName }, }); diff --git a/packages/cli/test/integration/shared/utils/index.ts b/packages/cli/test/integration/shared/utils/index.ts index 78de2c1b25..ba99e1ca07 100644 --- a/packages/cli/test/integration/shared/utils/index.ts +++ b/packages/cli/test/integration/shared/utils/index.ts @@ -1,10 +1,17 @@ -import { BinaryDataService } from 'n8n-core'; +import { mock } from 'jest-mock-extended'; +import { + BinaryDataService, + InstanceSettings, + UnrecognizedNodeTypeError, + type DirectoryLoader, +} from 'n8n-core'; import { Ftp } from 'n8n-nodes-base/credentials/Ftp.credentials'; import { GithubApi } from 'n8n-nodes-base/credentials/GithubApi.credentials'; import { Cron } from 'n8n-nodes-base/nodes/Cron/Cron.node'; +import { ScheduleTrigger } from 'n8n-nodes-base/nodes/Schedule/ScheduleTrigger.node'; import { Set } from 'n8n-nodes-base/nodes/Set/Set.node'; import { Start } from 'n8n-nodes-base/nodes/Start/Start.node'; -import { type INode } from 'n8n-workflow'; +import type { INodeTypeData, INode } from 'n8n-workflow'; import type request from 'supertest'; import { Container } from 'typedi'; import { v4 as uuid } from 'uuid'; @@ -16,7 +23,6 @@ import { SettingsRepository } from '@/databases/repositories/settings.repository import { ExecutionService } from '@/executions/execution.service'; import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials'; import { Push } from '@/push'; -import { OrchestrationService } from '@/services/orchestration.service'; import { mockInstance } from '../../../shared/mocking'; @@ -30,8 +36,8 @@ export { setupTestServer } from './test-server'; * Initialize node types. */ export async function initActiveWorkflowManager() { - mockInstance(OrchestrationService, { - isMultiMainSetupEnabled: false, + mockInstance(InstanceSettings, { + isMultiMain: false, }); mockInstance(Push); @@ -62,7 +68,8 @@ export async function initCredentialsTypes(): Promise { * Initialize node types. */ export async function initNodeTypes() { - Container.get(LoadNodesAndCredentials).loaded.nodes = { + ScheduleTrigger.prototype.trigger = async () => ({}); + const nodes: INodeTypeData = { 'n8n-nodes-base.start': { type: new Start(), sourcePath: '', @@ -75,7 +82,21 @@ export async function initNodeTypes() { type: new Set(), sourcePath: '', }, + 'n8n-nodes-base.scheduleTrigger': { + type: new ScheduleTrigger(), + sourcePath: '', + }, }; + const loader = mock(); + loader.getNode.mockImplementation((nodeType) => { + const node = nodes[`n8n-nodes-base.${nodeType}`]; + if (!node) throw new UnrecognizedNodeTypeError('n8n-nodes-base', nodeType); + return node; + }); + + const loadNodesAndCredentials = Container.get(LoadNodesAndCredentials); + loadNodesAndCredentials.loaders = { 'n8n-nodes-base': loader }; + loadNodesAndCredentials.loaded.nodes = nodes; } /** diff --git a/packages/cli/test/integration/variables.test.ts b/packages/cli/test/integration/variables.test.ts index f8cef37539..7dd8d00aae 100644 --- a/packages/cli/test/integration/variables.test.ts +++ b/packages/cli/test/integration/variables.test.ts @@ -4,6 +4,7 @@ import type { Variables } from '@/databases/entities/variables'; import { VariablesRepository } from '@/databases/repositories/variables.repository'; import { generateNanoId } from '@/databases/utils/generators'; import { VariablesService } from '@/environments/variables/variables.service.ee'; +import { CacheService } from '@/services/cache/cache.service'; import { createOwner, createUser } from './shared/db/users'; import * as testDb from './shared/test-db'; @@ -65,19 +66,41 @@ beforeEach(async () => { // ---------------------------------------- describe('GET /variables', () => { beforeEach(async () => { - await Promise.all([createVariable('test1', 'value1'), createVariable('test2', 'value2')]); + await Promise.all([ + createVariable('test1', 'value1'), + createVariable('test2', 'value2'), + createVariable('empty', ''), + ]); + }); + + test('should return an empty array if there is nothing in the cache', async () => { + const cacheService = Container.get(CacheService); + const spy = jest.spyOn(cacheService, 'get').mockResolvedValueOnce(undefined); + const response = await authOwnerAgent.get('/variables'); + expect(spy).toHaveBeenCalledTimes(1); + expect(response.statusCode).toBe(200); + expect(response.body.data.length).toBe(0); }); test('should return all variables for an owner', async () => { const response = await authOwnerAgent.get('/variables'); expect(response.statusCode).toBe(200); - expect(response.body.data.length).toBe(2); + expect(response.body.data.length).toBe(3); }); test('should return all variables for a member', async () => { const response = await authMemberAgent.get('/variables'); expect(response.statusCode).toBe(200); - expect(response.body.data.length).toBe(2); + expect(response.body.data.length).toBe(3); + }); + + describe('state:empty', () => { + test('only return empty variables', async () => { + const response = await authOwnerAgent.get('/variables').query({ state: 'empty' }); + expect(response.statusCode).toBe(200); + expect(response.body.data.length).toBe(1); + expect(response.body.data[0]).toMatchObject({ key: 'empty', value: '', type: 'string' }); + }); }); }); diff --git a/packages/cli/test/integration/workflows/workflows.controller.test.ts b/packages/cli/test/integration/workflows/workflows.controller.test.ts index 84c1505887..fb28918509 100644 --- a/packages/cli/test/integration/workflows/workflows.controller.test.ts +++ b/packages/cli/test/integration/workflows/workflows.controller.test.ts @@ -17,10 +17,14 @@ import { EnterpriseWorkflowService } from '@/workflows/workflow.service.ee'; import { mockInstance } from '../../shared/mocking'; import { saveCredential } from '../shared/db/credentials'; -import { createTeamProject, linkUserToProject } from '../shared/db/projects'; +import { createTeamProject, getPersonalProject, linkUserToProject } from '../shared/db/projects'; import { createTag } from '../shared/db/tags'; import { createManyUsers, createMember, createOwner } from '../shared/db/users'; -import { createWorkflow, shareWorkflowWithProjects } from '../shared/db/workflows'; +import { + createWorkflow, + shareWorkflowWithProjects, + shareWorkflowWithUsers, +} from '../shared/db/workflows'; import { randomCredentialPayload } from '../shared/random'; import * as testDb from '../shared/test-db'; import type { SuperAgentTest } from '../shared/types'; @@ -676,6 +680,21 @@ describe('GET /workflows', () => { expect(response2.body.data).toHaveLength(0); }); + + test('should return homeProject when filtering workflows by projectId', async () => { + const workflow = await createWorkflow({ name: 'First' }, owner); + await shareWorkflowWithUsers(workflow, [member]); + const pp = await getPersonalProject(member); + + const response = await authMemberAgent + .get('/workflows') + .query(`filter={ "projectId": "${pp.id}" }`) + .expect(200); + + expect(response.body.data).toHaveLength(1); + expect(response.body.data[0].id).toBe(workflow.id); + expect(response.body.data[0].homeProject).not.toBeNull(); + }); }); describe('select', () => { diff --git a/packages/core/bin/generate-known b/packages/core/bin/generate-known deleted file mode 100755 index 3784f15fb1..0000000000 --- a/packages/core/bin/generate-known +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/env node - -const path = require('path'); -const glob = require('fast-glob'); -const uniq = require('lodash/uniq'); -const { LoggerProxy, getCredentialsForNode } = require('n8n-workflow'); -const { packageDir, writeJSON } = require('./common'); -const { loadClassInIsolation } = require('../dist/ClassLoader'); - -LoggerProxy.init(console); - -const loadClass = (sourcePath) => { - try { - const [className] = path.parse(sourcePath).name.split('.'); - const filePath = path.resolve(packageDir, sourcePath); - const instance = loadClassInIsolation(filePath, className); - return { instance, sourcePath, className }; - } catch (e) { - LoggerProxy.warn(`Failed to load ${sourcePath}: ${e.message}`); - } -}; - -const generateKnownNodes = async () => { - const nodeClasses = glob - .sync('dist/nodes/**/*.node.js', { cwd: packageDir }) - .map(loadClass) - // Ignore node versions - .filter((nodeClass) => nodeClass && !/[vV]\d.node\.js$/.test(nodeClass.sourcePath)); - - const nodes = {}; - const nodesByCredential = {}; - - for (const { className, sourcePath, instance } of nodeClasses) { - const nodeName = instance.description.name; - nodes[nodeName] = { className, sourcePath }; - - for (const credential of getCredentialsForNode(instance)) { - if (!nodesByCredential[credential.name]) { - nodesByCredential[credential.name] = []; - } - - nodesByCredential[credential.name].push(nodeName); - } - } - - LoggerProxy.info(`Detected ${Object.keys(nodes).length} nodes`); - await writeJSON('known/nodes.json', nodes); - return { nodes, nodesByCredential }; -}; - -const generateKnownCredentials = async (nodesByCredential) => { - const credentialClasses = glob - .sync(`dist/credentials/**/*.credentials.js`, { cwd: packageDir }) - .map(loadClass) - .filter((data) => !!data); - - for (const { instance } of credentialClasses) { - if (Array.isArray(instance.extends)) { - for (const extendedCredential of instance.extends) { - nodesByCredential[extendedCredential] = [ - ...(nodesByCredential[extendedCredential] ?? []), - ...(nodesByCredential[instance.name] ?? []), - ]; - } - } - } - - const credentials = credentialClasses.reduce( - (credentials, { className, sourcePath, instance }) => { - const credentialName = instance.name; - const credential = { - className, - sourcePath, - }; - - if (Array.isArray(instance.extends)) { - credential.extends = instance.extends; - } - - if (nodesByCredential[credentialName]) { - credential.supportedNodes = Array.from(new Set(nodesByCredential[credentialName])); - } - - credentials[credentialName] = credential; - - return credentials; - }, - {}, - ); - - LoggerProxy.info(`Detected ${Object.keys(credentials).length} credentials`); - await writeJSON('known/credentials.json', credentials); - return credentials; -}; - -(async () => { - const { nodesByCredential } = await generateKnownNodes(); - await generateKnownCredentials(nodesByCredential); -})(); diff --git a/packages/core/bin/generate-ui-types b/packages/core/bin/generate-metadata similarity index 59% rename from packages/core/bin/generate-ui-types rename to packages/core/bin/generate-metadata index f73ca87a15..18dbca687a 100755 --- a/packages/core/bin/generate-ui-types +++ b/packages/core/bin/generate-metadata @@ -1,6 +1,6 @@ #!/usr/bin/env node -const { LoggerProxy, NodeHelpers } = require('n8n-workflow'); +const { LoggerProxy } = require('n8n-workflow'); const { PackageDirectoryLoader } = require('../dist/DirectoryLoader'); const { packageDir, writeJSON } = require('./common'); @@ -33,7 +33,7 @@ function findReferencedMethods(obj, refs = {}, latestName = '') { const loaderNodeTypes = Object.values(loader.nodeTypes); const definedMethods = loaderNodeTypes.reduce((acc, cur) => { - NodeHelpers.getVersionedNodeTypeAll(cur.type).forEach((type) => { + loader.getVersionedNodeTypeAll(cur.type).forEach((type) => { const methods = type.description?.__loadOptionsMethods; if (!methods) return; @@ -52,51 +52,21 @@ function findReferencedMethods(obj, refs = {}, latestName = '') { }, {}); const nodeTypes = loaderNodeTypes - .map((data) => { - const nodeType = NodeHelpers.getVersionedNodeType(data.type); - NodeHelpers.applySpecialNodeParameters(nodeType); - return data.type; - }) + .map(({ type }) => type) .flatMap((nodeType) => - NodeHelpers.getVersionedNodeTypeAll(nodeType).map((item) => { + loader.getVersionedNodeTypeAll(nodeType).map((item) => { const { __loadOptionsMethods, ...rest } = item.description; return rest; }), ); const knownCredentials = loader.known.credentials; - const credentialTypes = Object.values(loader.credentialTypes).map((data) => { - const credentialType = data.type; - const supportedNodes = knownCredentials[credentialType.name].supportedNodes ?? []; - if (supportedNodes.length > 0 && credentialType.httpRequestNode) { - credentialType.httpRequestNode.hidden = true; - } - - credentialType.supportedNodes = supportedNodes; - - if (!credentialType.iconUrl && !credentialType.icon) { - for (const supportedNode of supportedNodes) { - const nodeType = loader.nodeTypes[supportedNode]?.type.description; - - if (!nodeType) continue; - if (nodeType.icon) { - credentialType.icon = nodeType.icon; - credentialType.iconColor = nodeType.iconColor; - break; - } - if (nodeType.iconUrl) { - credentialType.iconUrl = nodeType.iconUrl; - break; - } - } - } - - return credentialType; - }); - + const credentialTypes = Object.values(loader.credentialTypes).map(({ type }) => type); const referencedMethods = findReferencedMethods(nodeTypes); await Promise.all([ + writeJSON('known/nodes.json', loader.known.nodes), + writeJSON('known/credentials.json', loader.known.credentials), writeJSON('types/credentials.json', credentialTypes), writeJSON('types/nodes.json', nodeTypes), writeJSON('methods/defined.json', definedMethods), diff --git a/packages/core/package.json b/packages/core/package.json index d334e118f1..7fdd5f99af 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,14 +1,13 @@ { "name": "n8n-core", - "version": "1.71.0", + "version": "1.72.0", "description": "Core functionality of n8n", "main": "dist/index", "types": "dist/index.d.ts", "bin": { "n8n-copy-icons": "./bin/copy-icons", - "n8n-generate-known": "./bin/generate-known", "n8n-generate-translations": "./bin/generate-translations", - "n8n-generate-ui-types": "./bin/generate-ui-types" + "n8n-generate-metadata": "./bin/generate-metadata" }, "scripts": { "clean": "rimraf dist .turbo", @@ -40,6 +39,7 @@ "@langchain/core": "catalog:", "@n8n/client-oauth2": "workspace:*", "@n8n/config": "workspace:*", + "@sentry/node": "catalog:", "aws4": "1.11.0", "axios": "catalog:", "concat-stream": "2.0.0", diff --git a/packages/core/src/ActiveWorkflows.ts b/packages/core/src/ActiveWorkflows.ts index 93e67488d5..b7604f9778 100644 --- a/packages/core/src/ActiveWorkflows.ts +++ b/packages/core/src/ActiveWorkflows.ts @@ -11,7 +11,6 @@ import type { } from 'n8n-workflow'; import { ApplicationError, - ErrorReporterProxy as ErrorReporter, LoggerProxy as Logger, toCronExpression, TriggerCloseError, @@ -20,12 +19,18 @@ import { } from 'n8n-workflow'; import { Service } from 'typedi'; +import { ErrorReporter } from './error-reporter'; import type { IWorkflowData } from './Interfaces'; import { ScheduledTaskManager } from './ScheduledTaskManager'; +import { TriggersAndPollers } from './TriggersAndPollers'; @Service() export class ActiveWorkflows { - constructor(private readonly scheduledTaskManager: ScheduledTaskManager) {} + constructor( + private readonly scheduledTaskManager: ScheduledTaskManager, + private readonly triggersAndPollers: TriggersAndPollers, + private readonly errorReporter: ErrorReporter, + ) {} private activeWorkflows: { [workflowId: string]: IWorkflowData } = {}; @@ -75,7 +80,8 @@ export class ActiveWorkflows { for (const triggerNode of triggerNodes) { try { - triggerResponse = await workflow.runTrigger( + triggerResponse = await this.triggersAndPollers.runTrigger( + workflow, triggerNode, getTriggerFunctions, additionalData, @@ -150,7 +156,7 @@ export class ActiveWorkflows { }); try { - const pollResponse = await workflow.runPoll(node, pollFunctions); + const pollResponse = await this.triggersAndPollers.runPoll(workflow, node, pollFunctions); if (pollResponse !== null) { pollFunctions.__emit(pollResponse); @@ -218,7 +224,7 @@ export class ActiveWorkflows { Logger.error( `There was a problem calling "closeFunction" on "${e.node.name}" in workflow "${workflowId}"`, ); - ErrorReporter.error(e, { extra: { workflowId } }); + this.errorReporter.error(e, { extra: { workflowId } }); return; } diff --git a/packages/core/src/Agent/index.ts b/packages/core/src/Agent/index.ts deleted file mode 100644 index ed842d99ee..0000000000 --- a/packages/core/src/Agent/index.ts +++ /dev/null @@ -1,61 +0,0 @@ -import type { - IExecuteFunctions, - Workflow, - IRunExecutionData, - INodeExecutionData, - ITaskDataConnections, - INode, - IWorkflowExecuteAdditionalData, - WorkflowExecuteMode, - INodeParameters, - IExecuteData, - IDataObject, - Result, -} from 'n8n-workflow'; -import { createEnvProviderState } from 'n8n-workflow'; - -export const createAgentStartJob = ( - additionalData: IWorkflowExecuteAdditionalData, - inputData: ITaskDataConnections, - node: INode, - workflow: Workflow, - runExecutionData: IRunExecutionData, - runIndex: number, - activeNodeName: string, - connectionInputData: INodeExecutionData[], - siblingParameters: INodeParameters, - mode: WorkflowExecuteMode, - executeData?: IExecuteData, - defaultReturnRunIndex?: number, - selfData?: IDataObject, - contextNodeName?: string, -): IExecuteFunctions['startJob'] => { - return async function startJob( - this: IExecuteFunctions, - jobType: string, - settings: unknown, - itemIndex: number, - ): Promise> { - return await additionalData.startAgentJob( - additionalData, - jobType, - settings, - this, - inputData, - node, - workflow, - runExecutionData, - runIndex, - itemIndex, - activeNodeName, - connectionInputData, - siblingParameters, - mode, - createEnvProviderState(), - executeData, - defaultReturnRunIndex, - selfData, - contextNodeName, - ); - }; -}; diff --git a/packages/core/src/Constants.ts b/packages/core/src/Constants.ts index d5fc12bffd..ceaf77566a 100644 --- a/packages/core/src/Constants.ts +++ b/packages/core/src/Constants.ts @@ -1,3 +1,6 @@ +import type { INodeProperties } from 'n8n-workflow'; +import { cronNodeOptions } from 'n8n-workflow'; + export const CUSTOM_EXTENSION_ENV = 'N8N_CUSTOM_EXTENSIONS'; export const PLACEHOLDER_EMPTY_EXECUTION_ID = '__UNKNOWN__'; export const PLACEHOLDER_EMPTY_WORKFLOW_ID = '__EMPTY__'; @@ -12,3 +15,30 @@ export const CONFIG_FILES = 'N8N_CONFIG_FILES'; export const BINARY_DATA_STORAGE_PATH = 'N8N_BINARY_DATA_STORAGE_PATH'; export const UM_EMAIL_TEMPLATES_INVITE = 'N8N_UM_EMAIL_TEMPLATES_INVITE'; export const UM_EMAIL_TEMPLATES_PWRESET = 'N8N_UM_EMAIL_TEMPLATES_PWRESET'; + +export const commonPollingParameters: INodeProperties[] = [ + { + displayName: 'Poll Times', + name: 'pollTimes', + type: 'fixedCollection', + typeOptions: { + multipleValues: true, + multipleValueButtonText: 'Add Poll Time', + }, + default: { item: [{ mode: 'everyMinute' }] }, + description: 'Time at which polling should occur', + placeholder: 'Add Poll Time', + options: cronNodeOptions, + }, +]; + +export const commonCORSParameters: INodeProperties[] = [ + { + displayName: 'Allowed Origins (CORS)', + name: 'allowedOrigins', + type: 'string', + default: '*', + description: + 'Comma-separated list of URLs allowed for cross-origin non-preflight requests. Use * (default) to allow all origins.', + }, +]; diff --git a/packages/core/src/CreateNodeAsTool.ts b/packages/core/src/CreateNodeAsTool.ts index c569f943ce..7c67b6ac58 100644 --- a/packages/core/src/CreateNodeAsTool.ts +++ b/packages/core/src/CreateNodeAsTool.ts @@ -1,11 +1,6 @@ import { DynamicStructuredTool } from '@langchain/core/tools'; -import type { - IExecuteFunctions, - INodeParameters, - INodeType, - ISupplyDataFunctions, -} from 'n8n-workflow'; -import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import type { IDataObject, INode, INodeType } from 'n8n-workflow'; +import { jsonParse, NodeOperationError } from 'n8n-workflow'; import { z } from 'zod'; type AllowedTypes = 'string' | 'number' | 'boolean' | 'json'; @@ -16,6 +11,12 @@ interface FromAIArgument { defaultValue?: string | number | boolean | Record; } +type ParserOptions = { + node: INode; + nodeType: INodeType; + handleToolInvocation: (toolArgs: IDataObject) => Promise; +}; + /** * AIParametersParser * @@ -23,15 +24,10 @@ interface FromAIArgument { * generating Zod schemas, and creating LangChain tools. */ class AIParametersParser { - private ctx: ISupplyDataFunctions; - /** * Constructs an instance of AIParametersParser. - * @param ctx The execution context. */ - constructor(ctx: ISupplyDataFunctions) { - this.ctx = ctx; - } + constructor(private readonly options: ParserOptions) {} /** * Generates a Zod schema based on the provided FromAIArgument placeholder. @@ -162,14 +158,14 @@ class AIParametersParser { } catch (error) { // If parsing fails, throw an ApplicationError with details throw new NodeOperationError( - this.ctx.getNode(), + this.options.node, `Failed to parse $fromAI arguments: ${argsString}: ${error}`, ); } } else { // Log an error if parentheses are unbalanced throw new NodeOperationError( - this.ctx.getNode(), + this.options.node, `Unbalanced parentheses while parsing $fromAI call: ${str.slice(startIndex)}`, ); } @@ -254,7 +250,7 @@ class AIParametersParser { const type = cleanArgs?.[2] || 'string'; if (!['string', 'number', 'boolean', 'json'].includes(type.toLowerCase())) { - throw new NodeOperationError(this.ctx.getNode(), `Invalid type: ${type}`); + throw new NodeOperationError(this.options.node, `Invalid type: ${type}`); } return { @@ -286,42 +282,21 @@ class AIParametersParser { } /** - * Generates a description for a node based on the provided parameters. - * @param node The node type. - * @param nodeParameters The parameters of the node. - * @returns A string description for the node. + * Retrieves and validates the Zod schema for the tool. + * + * This method: + * 1. Collects all $fromAI arguments from node parameters + * 2. Validates parameter keys against naming rules + * 3. Checks for duplicate keys and ensures consistency + * 4. Generates a Zod schema from the validated arguments + * + * @throws {NodeOperationError} When parameter keys are invalid or when duplicate keys have inconsistent definitions + * @returns {z.ZodObject} A Zod schema object representing the structure and validation rules for the node parameters */ - private getDescription(node: INodeType, nodeParameters: INodeParameters): string { - const manualDescription = nodeParameters.toolDescription as string; - - if (nodeParameters.descriptionType === 'auto') { - const resource = nodeParameters.resource as string; - const operation = nodeParameters.operation as string; - let description = node.description.description; - if (resource) { - description += `\n Resource: ${resource}`; - } - if (operation) { - description += `\n Operation: ${operation}`; - } - return description.trim(); - } - if (nodeParameters.descriptionType === 'manual') { - return manualDescription ?? node.description.description; - } - - return node.description.description; - } - - /** - * Creates a DynamicStructuredTool from a node. - * @param node The node type. - * @param nodeParameters The parameters of the node. - * @returns A DynamicStructuredTool instance. - */ - public createTool(node: INodeType, nodeParameters: INodeParameters): DynamicStructuredTool { + private getSchema() { + const { node } = this.options; const collectedArguments: FromAIArgument[] = []; - this.traverseNodeParameters(nodeParameters, collectedArguments); + this.traverseNodeParameters(node.parameters, collectedArguments); // Validate each collected argument const nameValidationRegex = /^[a-zA-Z0-9_-]{1,64}$/; @@ -331,7 +306,7 @@ class AIParametersParser { const isEmptyError = 'You must specify a key when using $fromAI()'; const isInvalidError = `Parameter key \`${argument.key}\` is invalid`; const error = new Error(argument.key.length === 0 ? isEmptyError : isInvalidError); - throw new NodeOperationError(this.ctx.getNode(), error, { + throw new NodeOperationError(node, error, { description: 'Invalid parameter key, must be between 1 and 64 characters long and only contain letters, numbers, underscores, and hyphens', }); @@ -348,7 +323,7 @@ class AIParametersParser { ) { // If not, throw an error for inconsistent duplicate keys throw new NodeOperationError( - this.ctx.getNode(), + node, `Duplicate key '${argument.key}' found with different description or type`, { description: @@ -377,43 +352,56 @@ class AIParametersParser { return acc; }, {}); - const schema = z.object(schemaObj).required(); - const description = this.getDescription(node, nodeParameters); - const nodeName = this.ctx.getNode().name.replace(/ /g, '_'); - const name = nodeName || node.description.name; + return z.object(schemaObj).required(); + } - const tool = new DynamicStructuredTool({ + /** + * Generates a description for a node based on the provided parameters. + * @param node The node type. + * @param nodeParameters The parameters of the node. + * @returns A string description for the node. + */ + private getDescription(): string { + const { node, nodeType } = this.options; + const manualDescription = node.parameters.toolDescription as string; + + if (node.parameters.descriptionType === 'auto') { + const resource = node.parameters.resource as string; + const operation = node.parameters.operation as string; + let description = nodeType.description.description; + if (resource) { + description += `\n Resource: ${resource}`; + } + if (operation) { + description += `\n Operation: ${operation}`; + } + return description.trim(); + } + if (node.parameters.descriptionType === 'manual') { + return manualDescription ?? nodeType.description.description; + } + + return nodeType.description.description; + } + + /** + * Creates a DynamicStructuredTool from a node. + * @returns A DynamicStructuredTool instance. + */ + public createTool(): DynamicStructuredTool { + const { node, nodeType } = this.options; + const schema = this.getSchema(); + const description = this.getDescription(); + const nodeName = node.name.replace(/ /g, '_'); + const name = nodeName || nodeType.description.name; + + return new DynamicStructuredTool({ name, description, schema, - func: async (functionArgs: z.infer) => { - const { index } = this.ctx.addInputData(NodeConnectionType.AiTool, [ - [{ json: functionArgs }], - ]); - - try { - // Execute the node with the proxied context - const result = await node.execute?.bind(this.ctx as IExecuteFunctions)(); - - // Process and map the results - const mappedResults = result?.[0]?.flatMap((item) => item.json); - - // Add output data to the context - this.ctx.addOutputData(NodeConnectionType.AiTool, index, [ - [{ json: { response: mappedResults } }], - ]); - - // Return the stringified results - return JSON.stringify(mappedResults); - } catch (error) { - const nodeError = new NodeOperationError(this.ctx.getNode(), error as Error); - this.ctx.addOutputData(NodeConnectionType.AiTool, index, nodeError); - return 'Error during node execution: ' + nodeError.description; - } - }, + func: async (toolArgs: z.infer) => + await this.options.handleToolInvocation(toolArgs), }); - - return tool; } } @@ -421,20 +409,8 @@ class AIParametersParser { * Converts node into LangChain tool by analyzing node parameters, * identifying placeholders using the $fromAI function, and generating a Zod schema. It then creates * a DynamicStructuredTool that can be used in LangChain workflows. - * - * @param ctx The execution context. - * @param node The node type. - * @param nodeParameters The parameters of the node. - * @returns An object containing the DynamicStructuredTool instance. */ -export function createNodeAsTool( - ctx: ISupplyDataFunctions, - node: INodeType, - nodeParameters: INodeParameters, -) { - const parser = new AIParametersParser(ctx); - - return { - response: parser.createTool(node, nodeParameters), - }; +export function createNodeAsTool(options: ParserOptions) { + const parser = new AIParametersParser(options); + return { response: parser.createTool() }; } diff --git a/packages/core/src/DirectoryLoader.ts b/packages/core/src/DirectoryLoader.ts index b0e77125a7..cd223da2dd 100644 --- a/packages/core/src/DirectoryLoader.ts +++ b/packages/core/src/DirectoryLoader.ts @@ -1,9 +1,12 @@ import glob from 'fast-glob'; +import uniqBy from 'lodash/uniqBy'; import type { CodexData, DocumentationLink, ICredentialType, ICredentialTypeData, + INodeCredentialDescription, + INodePropertyOptions, INodeType, INodeTypeBaseDescription, INodeTypeData, @@ -15,8 +18,7 @@ import type { import { ApplicationError, LoggerProxy as Logger, - getCredentialsForNode, - getVersionedNodeTypeAll, + applyDeclarativeNodeOptionParameters, jsonParse, } from 'n8n-workflow'; import { readFileSync } from 'node:fs'; @@ -24,7 +26,9 @@ import { readFile } from 'node:fs/promises'; import * as path from 'path'; import { loadClassInIsolation } from './ClassLoader'; -import { CUSTOM_NODES_CATEGORY } from './Constants'; +import { commonCORSParameters, commonPollingParameters, CUSTOM_NODES_CATEGORY } from './Constants'; +import { UnrecognizedCredentialTypeError } from './errors/unrecognized-credential-type.error'; +import { UnrecognizedNodeTypeError } from './errors/unrecognized-node-type.error'; import type { n8n } from './Interfaces'; function toJSON(this: ICredentialType) { @@ -34,11 +38,25 @@ function toJSON(this: ICredentialType) { }; } +type Codex = { + categories: string[]; + subcategories: { [subcategory: string]: string[] }; + resources: { + primaryDocumentation: DocumentationLink[]; + credentialDocumentation: DocumentationLink[]; + }; + alias: string[]; +}; + export type Types = { nodes: INodeTypeBaseDescription[]; credentials: ICredentialType[]; }; +/** + * Base class for loading n8n nodes and credentials from a directory. + * Handles the common functionality for resolving paths, loading classes, and managing node and credential types. + */ export abstract class DirectoryLoader { isLazyLoaded = false; @@ -58,7 +76,7 @@ export abstract class DirectoryLoader { // Stores the different versions with their individual descriptions types: Types = { nodes: [], credentials: [] }; - protected nodesByCredential: Record = {}; + readonly nodesByCredential: Record = {}; constructor( readonly directory: string, @@ -82,35 +100,40 @@ export abstract class DirectoryLoader { return path.resolve(this.directory, file); } - protected loadNodeFromFile(nodeName: string, filePath: string) { - let tempNode: INodeType | IVersionedNodeType; - let nodeVersion = 1; - const isCustom = this.packageName === 'CUSTOM'; - + private loadClass(sourcePath: string) { + const filePath = this.resolvePath(sourcePath); + const [className] = path.parse(sourcePath).name.split('.'); try { - tempNode = loadClassInIsolation(filePath, nodeName); - this.addCodex({ node: tempNode, filePath, isCustom }); + return loadClassInIsolation(filePath, className); } catch (error) { - Logger.error( - `Error loading node "${nodeName}" from: "${filePath}" - ${(error as Error).message}`, - ); - throw error; + throw error instanceof TypeError + ? new ApplicationError( + 'Class could not be found. Please check if the class is named correctly.', + { extra: { className } }, + ) + : error; } + } - const fullNodeName = `${this.packageName}.${tempNode.description.name}`; + /** Loads a nodes class from a file, fixes icons, and augments the codex */ + loadNodeFromFile(filePath: string) { + const tempNode = this.loadClass(filePath); + this.addCodex(tempNode, filePath); - if (this.includeNodes.length && !this.includeNodes.includes(fullNodeName)) { + const nodeType = tempNode.description.name; + const fullNodeType = `${this.packageName}.${nodeType}`; + + if (this.includeNodes.length && !this.includeNodes.includes(fullNodeType)) { return; } - if (this.excludeNodes.includes(fullNodeName)) { + if (this.excludeNodes.includes(fullNodeType)) { return; } - tempNode.description.name = fullNodeName; - this.fixIconPaths(tempNode.description, filePath); + let nodeVersion = 1; if ('nodeVersions' in tempNode) { for (const versionNode of Object.values(tempNode.nodeVersions)) { this.fixIconPaths(versionNode.description, filePath); @@ -118,85 +141,93 @@ export abstract class DirectoryLoader { for (const version of Object.values(tempNode.nodeVersions)) { this.addLoadOptionsMethods(version); + this.applySpecialNodeParameters(version); } const currentVersionNode = tempNode.nodeVersions[tempNode.currentVersion]; - this.addCodex({ node: currentVersionNode, filePath, isCustom }); + this.addCodex(currentVersionNode, filePath); nodeVersion = tempNode.currentVersion; if (currentVersionNode.hasOwnProperty('executeSingle')) { throw new ApplicationError( '"executeSingle" has been removed. Please update the code of this node to use "execute" instead.', - { extra: { nodeName: `${this.packageName}.${nodeName}` } }, + { extra: { nodeType: fullNodeType } }, ); } } else { this.addLoadOptionsMethods(tempNode); - // Short renaming to avoid type issues + this.applySpecialNodeParameters(tempNode); + // Short renaming to avoid type issues nodeVersion = Array.isArray(tempNode.description.version) ? tempNode.description.version.slice(-1)[0] : tempNode.description.version; } - this.known.nodes[fullNodeName] = { - className: nodeName, + this.known.nodes[nodeType] = { + className: tempNode.constructor.name, sourcePath: filePath, }; - this.nodeTypes[fullNodeName] = { + this.nodeTypes[nodeType] = { type: tempNode, sourcePath: filePath, }; this.loadedNodes.push({ - name: fullNodeName, + name: nodeType, version: nodeVersion, }); - getVersionedNodeTypeAll(tempNode).forEach(({ description }) => { + this.getVersionedNodeTypeAll(tempNode).forEach(({ description }) => { this.types.nodes.push(description); }); - for (const credential of getCredentialsForNode(tempNode)) { + for (const credential of this.getCredentialsForNode(tempNode)) { if (!this.nodesByCredential[credential.name]) { this.nodesByCredential[credential.name] = []; } - this.nodesByCredential[credential.name].push(fullNodeName); + this.nodesByCredential[credential.name].push(nodeType); } } - protected loadCredentialFromFile(credentialClassName: string, filePath: string): void { - let tempCredential: ICredentialType; - try { - tempCredential = loadClassInIsolation(filePath, credentialClassName); - - // Add serializer method "toJSON" to the class so that authenticate method (if defined) - // gets mapped to the authenticate attribute before it is sent to the client. - // The authenticate property is used by the client to decide whether or not to - // include the credential type in the predefined credentials (HTTP node) - Object.assign(tempCredential, { toJSON }); - - this.fixIconPaths(tempCredential, filePath); - } catch (e) { - if (e instanceof TypeError) { - throw new ApplicationError( - 'Class could not be found. Please check if the class is named correctly.', - { extra: { credentialClassName } }, - ); - } else { - throw e; - } + getNode(nodeType: string) { + const { + nodeTypes, + known: { nodes: knownNodes }, + } = this; + if (!(nodeType in nodeTypes) && nodeType in knownNodes) { + const { sourcePath } = knownNodes[nodeType]; + this.loadNodeFromFile(sourcePath); } - this.known.credentials[tempCredential.name] = { - className: credentialClassName, + if (nodeType in nodeTypes) { + return nodeTypes[nodeType]; + } + + throw new UnrecognizedNodeTypeError(this.packageName, nodeType); + } + + /** Loads a credential class from a file, and fixes icons */ + loadCredentialFromFile(filePath: string): void { + const tempCredential = this.loadClass(filePath); + // Add serializer method "toJSON" to the class so that authenticate method (if defined) + // gets mapped to the authenticate attribute before it is sent to the client. + // The authenticate property is used by the client to decide whether or not to + // include the credential type in the predefined credentials (HTTP node) + Object.assign(tempCredential, { toJSON }); + + this.fixIconPaths(tempCredential, filePath); + + const credentialType = tempCredential.name; + this.known.credentials[credentialType] = { + className: tempCredential.constructor.name, sourcePath: filePath, extends: tempCredential.extends, - supportedNodes: this.nodesByCredential[tempCredential.name], + supportedNodes: this.nodesByCredential[credentialType], }; - this.credentialTypes[tempCredential.name] = { + this.credentialTypes[credentialType] = { type: tempCredential, sourcePath: filePath, }; @@ -204,40 +235,79 @@ export abstract class DirectoryLoader { this.types.credentials.push(tempCredential); } + getCredential(credentialType: string) { + const { + credentialTypes, + known: { credentials: knownCredentials }, + } = this; + if (!(credentialType in credentialTypes) && credentialType in knownCredentials) { + const { sourcePath } = knownCredentials[credentialType]; + this.loadCredentialFromFile(sourcePath); + } + + if (credentialType in credentialTypes) { + return credentialTypes[credentialType]; + } + + throw new UnrecognizedCredentialTypeError(credentialType); + } + + /** + * Returns an array of credential descriptions that are supported by a node. + * For versioned nodes, combines and deduplicates credentials from all versions. + */ + getCredentialsForNode(object: IVersionedNodeType | INodeType): INodeCredentialDescription[] { + if ('nodeVersions' in object) { + const credentials = Object.values(object.nodeVersions).flatMap( + ({ description }) => description.credentials ?? [], + ); + return uniqBy(credentials, 'name'); + } + return object.description.credentials ?? []; + } + + /** + * Returns an array of all versions of a node type. + * For non-versioned nodes, returns an array with just that node. + * For versioned nodes, returns all available versions. + */ + getVersionedNodeTypeAll(object: IVersionedNodeType | INodeType): INodeType[] { + if ('nodeVersions' in object) { + const nodeVersions = Object.values(object.nodeVersions).map((element) => { + element.description.name = object.description.name; + element.description.codex = object.description.codex; + return element; + }); + return uniqBy(nodeVersions.reverse(), (node) => { + const { version } = node.description; + return Array.isArray(version) ? version.join(',') : version.toString(); + }); + } + return [object]; + } + /** * Retrieves `categories`, `subcategories` and alias (if defined) * from the codex data for the node at the given file path. */ private getCodex(filePath: string): CodexData { - type Codex = { - categories: string[]; - subcategories: { [subcategory: string]: string[] }; - resources: { - primaryDocumentation: DocumentationLink[]; - credentialDocumentation: DocumentationLink[]; - }; - alias: string[]; - }; - - const codexFilePath = `${filePath}on`; // .js to .json + const codexFilePath = this.resolvePath(`${filePath}on`); // .js to .json const { categories, subcategories, - resources: allResources, + resources: { primaryDocumentation, credentialDocumentation }, alias, } = module.require(codexFilePath) as Codex; - const resources = { - primaryDocumentation: allResources.primaryDocumentation, - credentialDocumentation: allResources.credentialDocumentation, - }; - return { ...(categories && { categories }), ...(subcategories && { subcategories }), - ...(resources && { resources }), ...(alias && { alias }), + resources: { + primaryDocumentation, + credentialDocumentation, + }, }; } @@ -245,15 +315,8 @@ export abstract class DirectoryLoader { * Adds a node codex `categories` and `subcategories` (if defined) * to a node description `codex` property. */ - private addCodex({ - node, - filePath, - isCustom, - }: { - node: INodeType | IVersionedNodeType; - filePath: string; - isCustom: boolean; - }) { + private addCodex(node: INodeType | IVersionedNodeType, filePath: string) { + const isCustom = this.packageName === 'CUSTOM'; try { let codex; @@ -273,7 +336,7 @@ export abstract class DirectoryLoader { node.description.codex = codex; } catch { - Logger.debug(`No codex available for: ${filePath.split('/').pop() ?? ''}`); + Logger.debug(`No codex available for: ${node.description.name}`); if (isCustom) { node.description.codex = { @@ -289,10 +352,27 @@ export abstract class DirectoryLoader { } } + private applySpecialNodeParameters(nodeType: INodeType): void { + const { properties, polling, supportsCORS } = nodeType.description; + if (polling) { + properties.unshift(...commonPollingParameters); + } + if (nodeType.webhook && supportsCORS) { + const optionsProperty = properties.find(({ name }) => name === 'options'); + if (optionsProperty) + optionsProperty.options = [ + ...commonCORSParameters, + ...(optionsProperty.options as INodePropertyOptions[]), + ]; + else properties.push(...commonCORSParameters); + } + + applyDeclarativeNodeOptionParameters(nodeType); + } + private getIconPath(icon: string, filePath: string) { const iconPath = path.join(path.dirname(filePath), icon.replace('file:', '')); - const relativePath = path.relative(this.directory, iconPath); - return `icons/${this.packageName}/${relativePath}`; + return `icons/${this.packageName}/${iconPath}`; } private fixIconPaths( @@ -305,14 +385,14 @@ export abstract class DirectoryLoader { if (typeof icon === 'string') { if (icon.startsWith('file:')) { obj.iconUrl = this.getIconPath(icon, filePath); - delete obj.icon; + obj.icon = undefined; } } else if (icon.light.startsWith('file:') && icon.dark.startsWith('file:')) { obj.iconUrl = { light: this.getIconPath(icon.light, filePath), dark: this.getIconPath(icon.dark, filePath), }; - delete obj.icon; + obj.icon = undefined; } } } @@ -331,8 +411,7 @@ export class CustomDirectoryLoader extends DirectoryLoader { }); for (const nodePath of nodes) { - const [fileName] = path.parse(nodePath).name.split('.'); - this.loadNodeFromFile(fileName, nodePath); + this.loadNodeFromFile(nodePath); } const credentials = await glob('**/*.credentials.js', { @@ -341,8 +420,7 @@ export class CustomDirectoryLoader extends DirectoryLoader { }); for (const credentialPath of credentials) { - const [fileName] = path.parse(credentialPath).name.split('.'); - this.loadCredentialFromFile(fileName, credentialPath); + this.loadCredentialFromFile(credentialPath); } } } @@ -363,33 +441,55 @@ export class PackageDirectoryLoader extends DirectoryLoader { const { nodes, credentials } = n8n; if (Array.isArray(nodes)) { - for (const node of nodes) { - const filePath = this.resolvePath(node); - const [nodeName] = path.parse(node).name.split('.'); - - this.loadNodeFromFile(nodeName, filePath); + for (const nodePath of nodes) { + this.loadNodeFromFile(nodePath); } } if (Array.isArray(credentials)) { - for (const credential of credentials) { - const filePath = this.resolvePath(credential); - const [credentialName] = path.parse(credential).name.split('.'); - - this.loadCredentialFromFile(credentialName, filePath); + for (const credentialPath of credentials) { + this.loadCredentialFromFile(credentialPath); } } + this.inferSupportedNodes(); + Logger.debug(`Loaded all credentials and nodes from ${this.packageName}`, { credentials: credentials?.length ?? 0, nodes: nodes?.length ?? 0, }); } - protected readJSONSync(file: string): T { - const filePath = this.resolvePath(file); - const fileString = readFileSync(filePath, 'utf8'); + private inferSupportedNodes() { + const knownCredentials = this.known.credentials; + for (const { type: credentialType } of Object.values(this.credentialTypes)) { + const supportedNodes = knownCredentials[credentialType.name].supportedNodes ?? []; + if (supportedNodes.length > 0 && credentialType.httpRequestNode) { + credentialType.httpRequestNode.hidden = true; + } + credentialType.supportedNodes = supportedNodes; + + if (!credentialType.iconUrl && !credentialType.icon) { + for (const supportedNode of supportedNodes) { + const nodeDescription = this.nodeTypes[supportedNode]?.type.description; + + if (!nodeDescription) continue; + if (nodeDescription.icon) { + credentialType.icon = nodeDescription.icon; + credentialType.iconColor = nodeDescription.iconColor; + break; + } + if (nodeDescription.iconUrl) { + credentialType.iconUrl = nodeDescription.iconUrl; + break; + } + } + } + } + } + + private parseJSON(fileString: string, filePath: string): T { try { return jsonParse(fileString); } catch (error) { @@ -397,15 +497,16 @@ export class PackageDirectoryLoader extends DirectoryLoader { } } + protected readJSONSync(file: string): T { + const filePath = this.resolvePath(file); + const fileString = readFileSync(filePath, 'utf8'); + return this.parseJSON(fileString, filePath); + } + protected async readJSON(file: string): Promise { const filePath = this.resolvePath(file); const fileString = await readFile(filePath, 'utf8'); - - try { - return jsonParse(fileString); - } catch (error) { - throw new ApplicationError('Failed to parse JSON', { extra: { filePath } }); - } + return this.parseJSON(fileString, filePath); } } @@ -415,10 +516,7 @@ export class PackageDirectoryLoader extends DirectoryLoader { export class LazyPackageDirectoryLoader extends PackageDirectoryLoader { override async loadAll() { try { - const knownNodes: typeof this.known.nodes = await this.readJSON('dist/known/nodes.json'); - for (const nodeName in knownNodes) { - this.known.nodes[`${this.packageName}.${nodeName}`] = knownNodes[nodeName]; - } + this.known.nodes = await this.readJSON('dist/known/nodes.json'); this.known.credentials = await this.readJSON('dist/known/credentials.json'); this.types.nodes = await this.readJSON('dist/types/nodes.json'); @@ -426,9 +524,10 @@ export class LazyPackageDirectoryLoader extends PackageDirectoryLoader { if (this.includeNodes.length) { const allowedNodes: typeof this.known.nodes = {}; - for (const nodeName of this.includeNodes) { - if (nodeName in this.known.nodes) { - allowedNodes[nodeName] = this.known.nodes[nodeName]; + for (const fullNodeType of this.includeNodes) { + const [packageName, nodeType] = fullNodeType.split('.'); + if (packageName === this.packageName && nodeType in this.known.nodes) { + allowedNodes[nodeType] = this.known.nodes[nodeType]; } } this.known.nodes = allowedNodes; @@ -439,8 +538,11 @@ export class LazyPackageDirectoryLoader extends PackageDirectoryLoader { } if (this.excludeNodes.length) { - for (const nodeName of this.excludeNodes) { - delete this.known.nodes[nodeName]; + for (const fullNodeType of this.excludeNodes) { + const [packageName, nodeType] = fullNodeType.split('.'); + if (packageName === this.packageName) { + delete this.known.nodes[nodeType]; + } } this.types.nodes = this.types.nodes.filter( diff --git a/packages/core/src/InstanceSettings.ts b/packages/core/src/InstanceSettings.ts index 7d38f21184..f611e034b3 100644 --- a/packages/core/src/InstanceSettings.ts +++ b/packages/core/src/InstanceSettings.ts @@ -1,10 +1,11 @@ import { createHash, randomBytes } from 'crypto'; -import { chmodSync, existsSync, mkdirSync, readFileSync, statSync, writeFileSync } from 'fs'; import { ApplicationError, jsonParse, ALPHABET, toResult } from 'n8n-workflow'; import { customAlphabet } from 'nanoid'; +import { chmodSync, existsSync, mkdirSync, readFileSync, statSync, writeFileSync } from 'node:fs'; import path from 'path'; import { Service } from 'typedi'; +import { Memoized } from './decorators'; import { InstanceSettingsConfig } from './InstanceSettingsConfig'; const nanoid = customAlphabet(ALPHABET, 16); @@ -86,6 +87,29 @@ export class InstanceSettings { */ readonly hostId: string; + private isMultiMainEnabled = false; + + private isMultiMainLicensed = false; + + /** Set whether multi-main mode is enabled. Does not imply licensed status. */ + setMultiMainEnabled(newState: boolean) { + this.isMultiMainEnabled = newState; + } + + setMultiMainLicensed(newState: boolean) { + this.isMultiMainLicensed = newState; + } + + /** Whether this `main` instance is running in multi-main mode. */ + get isMultiMain() { + return this.instanceType === 'main' && this.isMultiMainEnabled && this.isMultiMainLicensed; + } + + /** Whether this `main` instance is running in single-main mode. */ + get isSingleMain() { + return !this.isMultiMain; + } + get isLeader() { return this.instanceRole === 'leader'; } @@ -110,6 +134,22 @@ export class InstanceSettings { return this.settings.tunnelSubdomain; } + /** + * Whether this instance is running inside a Docker container. + * + * Based on: https://github.com/sindresorhus/is-docker + */ + @Memoized + get isDocker() { + try { + return ( + existsSync('/.dockerenv') || readFileSync('/proc/self/cgroup', 'utf8').includes('docker') + ); + } catch { + return false; + } + } + update(newSettings: WritableSettings) { this.save({ ...this.settings, ...newSettings }); } diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 7ff1645335..578752b3ef 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -13,13 +13,7 @@ import type { OAuth2CredentialData, } from '@n8n/client-oauth2'; import { ClientOAuth2 } from '@n8n/client-oauth2'; -import type { - AxiosError, - AxiosHeaders, - AxiosPromise, - AxiosRequestConfig, - AxiosResponse, -} from 'axios'; +import type { AxiosError, AxiosHeaders, AxiosRequestConfig, AxiosResponse } from 'axios'; import axios from 'axios'; import crypto, { createHmac } from 'crypto'; import FileType from 'file-type'; @@ -47,8 +41,6 @@ import type { IDataObject, IExecuteData, IExecuteFunctions, - IExecuteSingleFunctions, - IHookFunctions, IHttpRequestOptions, IN8nHttpFullResponse, IN8nHttpResponse, @@ -60,13 +52,9 @@ import type { IPollFunctions, IRequestOptions, IRunExecutionData, - ITaskData, ITaskDataConnections, - ITaskMetadata, ITriggerFunctions, - IWebhookData, IWebhookDescription, - IWebhookFunctions, IWorkflowDataProxyAdditionalKeys, IWorkflowExecuteAdditionalData, NodeExecutionWithMetadata, @@ -85,9 +73,10 @@ import type { DeduplicationScope, DeduplicationItemTypes, ICheckProcessedContextData, - ISupplyDataFunctions, WebhookType, SchedulingFunctions, + SupplyData, + AINodeConnectionType, } from 'n8n-workflow'; import { NodeConnectionType, @@ -129,15 +118,7 @@ import { DataDeduplicationService } from './data-deduplication-service'; import { InstanceSettings } from './InstanceSettings'; import type { IResponseError } from './Interfaces'; // eslint-disable-next-line import/no-cycle -import { - ExecuteContext, - ExecuteSingleContext, - HookContext, - PollContext, - SupplyDataContext, - TriggerContext, - WebhookContext, -} from './node-execution-context'; +import { PollContext, SupplyDataContext, TriggerContext } from './node-execution-context'; import { ScheduledTaskManager } from './ScheduledTaskManager'; import { SSHClientsManager } from './SSHClientsManager'; @@ -748,6 +729,26 @@ export async function binaryToString(body: Buffer | Readable, encoding?: string) return iconv.decode(buffer, encoding ?? 'utf-8'); } +export async function invokeAxios( + axiosConfig: AxiosRequestConfig, + authOptions: IRequestOptions['auth'] = {}, +) { + try { + return await axios(axiosConfig); + } catch (error) { + if (authOptions.sendImmediately !== false || !(error instanceof axios.AxiosError)) throw error; + // for digest-auth + const { response } = error; + if (response?.status !== 401 || !response.headers['www-authenticate']?.includes('nonce')) { + throw error; + } + const { auth } = axiosConfig; + delete axiosConfig.auth; + axiosConfig = digestAuthAxiosConfig(axiosConfig, response, auth); + return await axios(axiosConfig); + } +} + export async function proxyRequestToAxios( workflow: Workflow | undefined, additionalData: IWorkflowExecuteAdditionalData | undefined, @@ -768,29 +769,8 @@ export async function proxyRequestToAxios( axiosConfig = Object.assign(axiosConfig, await parseRequestObject(configObject)); - let requestFn: () => AxiosPromise; - if (configObject.auth?.sendImmediately === false) { - // for digest-auth - requestFn = async () => { - try { - return await axios(axiosConfig); - } catch (error) { - const { response } = error; - if (response?.status !== 401 || !response.headers['www-authenticate']?.includes('nonce')) { - throw error; - } - const { auth } = axiosConfig; - delete axiosConfig.auth; - axiosConfig = digestAuthAxiosConfig(axiosConfig, response, auth); - return await axios(axiosConfig); - } - }; - } else { - requestFn = async () => await axios(axiosConfig); - } - try { - const response = await requestFn(); + const response = await invokeAxios(axiosConfig, configObject.auth); let body = response.data; if (body instanceof IncomingMessage && axiosConfig.responseType === 'stream') { parseIncomingMessage(body); @@ -982,7 +962,7 @@ export async function httpRequest( ): Promise { removeEmptyBody(requestOptions); - let axiosRequest = convertN8nRequestToAxios(requestOptions); + const axiosRequest = convertN8nRequestToAxios(requestOptions); if ( axiosRequest.data === undefined || (axiosRequest.method !== undefined && axiosRequest.method.toUpperCase() === 'GET') @@ -990,23 +970,7 @@ export async function httpRequest( delete axiosRequest.data; } - let result: AxiosResponse; - try { - result = await axios(axiosRequest); - } catch (error) { - if (requestOptions.auth?.sendImmediately === false) { - const { response } = error; - if (response?.status !== 401 || !response.headers['www-authenticate']?.includes('nonce')) { - throw error; - } - - const { auth } = axiosRequest; - delete axiosRequest.auth; - axiosRequest = digestAuthAxiosConfig(axiosRequest, response, auth); - result = await axios(axiosRequest); - } - throw error; - } + const result = await invokeAxios(axiosRequest, requestOptions.auth); if (requestOptions.returnFullResponse) { return { @@ -2044,143 +2008,39 @@ export function getWebhookDescription( return undefined; } -// TODO: Change options to an object -export const addExecutionDataFunctions = async ( - type: 'input' | 'output', - nodeName: string, - data: INodeExecutionData[][] | ExecutionBaseError, - runExecutionData: IRunExecutionData, - connectionType: NodeConnectionType, - additionalData: IWorkflowExecuteAdditionalData, - sourceNodeName: string, - sourceNodeRunIndex: number, - currentNodeRunIndex: number, - metadata?: ITaskMetadata, -): Promise => { - if (connectionType === NodeConnectionType.Main) { - throw new ApplicationError('Setting type is not supported for main connection', { - extra: { type }, - }); - } - - let taskData: ITaskData | undefined; - if (type === 'input') { - taskData = { - startTime: new Date().getTime(), - executionTime: 0, - executionStatus: 'running', - source: [null], - }; - } else { - // At the moment we expect that there is always an input sent before the output - taskData = get( - runExecutionData, - ['resultData', 'runData', nodeName, currentNodeRunIndex], - undefined, - ); - if (taskData === undefined) { - return; - } - taskData.metadata = metadata; - } - taskData = taskData!; - - if (data instanceof Error) { - taskData.executionStatus = 'error'; - taskData.error = data; - } else { - if (type === 'output') { - taskData.executionStatus = 'success'; - } - taskData.data = { - [connectionType]: data, - } as ITaskDataConnections; - } - - if (type === 'input') { - if (!(data instanceof Error)) { - taskData.inputOverride = { - [connectionType]: data, - } as ITaskDataConnections; - } - - if (!runExecutionData.resultData.runData.hasOwnProperty(nodeName)) { - runExecutionData.resultData.runData[nodeName] = []; - } - - runExecutionData.resultData.runData[nodeName][currentNodeRunIndex] = taskData; - if (additionalData.sendDataToUI) { - additionalData.sendDataToUI('nodeExecuteBefore', { - executionId: additionalData.executionId, - nodeName, - }); - } - } else { - // Outputs - taskData.executionTime = new Date().getTime() - taskData.startTime; - - if (additionalData.sendDataToUI) { - additionalData.sendDataToUI('nodeExecuteAfter', { - executionId: additionalData.executionId, - nodeName, - data: taskData, - }); - } - - if (get(runExecutionData, 'executionData.metadata', undefined) === undefined) { - runExecutionData.executionData!.metadata = {}; - } - - let sourceTaskData = get(runExecutionData, ['executionData', 'metadata', sourceNodeName]); - - if (!sourceTaskData) { - runExecutionData.executionData!.metadata[sourceNodeName] = []; - sourceTaskData = runExecutionData.executionData!.metadata[sourceNodeName]; - } - - if (!sourceTaskData[sourceNodeRunIndex]) { - sourceTaskData[sourceNodeRunIndex] = { - subRun: [], - }; - } - - sourceTaskData[sourceNodeRunIndex]!.subRun!.push({ - node: nodeName, - runIndex: currentNodeRunIndex, - }); - } -}; - export async function getInputConnectionData( this: IAllExecuteFunctions, workflow: Workflow, runExecutionData: IRunExecutionData, - runIndex: number, + parentRunIndex: number, connectionInputData: INodeExecutionData[], - inputData: ITaskDataConnections, + parentInputData: ITaskDataConnections, additionalData: IWorkflowExecuteAdditionalData, executeData: IExecuteData, mode: WorkflowExecuteMode, closeFunctions: CloseFunction[], - inputName: NodeConnectionType, + connectionType: AINodeConnectionType, itemIndex: number, abortSignal?: AbortSignal, ): Promise { - const node = this.getNode(); - const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + const parentNode = this.getNode(); + const parentNodeType = workflow.nodeTypes.getByNameAndVersion( + parentNode.type, + parentNode.typeVersion, + ); - const inputs = NodeHelpers.getNodeInputs(workflow, node, nodeType.description); + const inputs = NodeHelpers.getNodeInputs(workflow, parentNode, parentNodeType.description); let inputConfiguration = inputs.find((input) => { if (typeof input === 'string') { - return input === inputName; + return input === connectionType; } - return input.type === inputName; + return input.type === connectionType; }); if (inputConfiguration === undefined) { throw new ApplicationError('Node does not have input of type', { - extra: { nodeName: node.name, inputName }, + extra: { nodeName: parentNode.name, connectionType }, }); } @@ -2190,28 +2050,39 @@ export async function getInputConnectionData( } as INodeInputConfiguration; } - const parentNodes = workflow.getParentNodes(node.name, inputName, 1); - if (parentNodes.length === 0) { + const connectedNodes = workflow + .getParentNodes(parentNode.name, connectionType, 1) + .map((nodeName) => workflow.getNode(nodeName) as INode) + .filter((connectedNode) => connectedNode.disabled !== true); + + if (connectedNodes.length === 0) { if (inputConfiguration.required) { throw new NodeOperationError( - node, - `A ${inputConfiguration?.displayName ?? inputName} sub-node must be connected`, + parentNode, + `A ${inputConfiguration?.displayName ?? connectionType} sub-node must be connected and enabled`, ); } return inputConfiguration.maxConnections === 1 ? undefined : []; } - const constParentNodes = parentNodes - .map((nodeName) => { - return workflow.getNode(nodeName) as INode; - }) - .filter((connectedNode) => connectedNode.disabled !== true) - .map(async (connectedNode) => { - const nodeType = workflow.nodeTypes.getByNameAndVersion( - connectedNode.type, - connectedNode.typeVersion, - ); - const context = new SupplyDataContext( + if ( + inputConfiguration.maxConnections !== undefined && + connectedNodes.length > inputConfiguration.maxConnections + ) { + throw new NodeOperationError( + parentNode, + `Only ${inputConfiguration.maxConnections} ${connectionType} sub-nodes are/is allowed to be connected`, + ); + } + + const nodes: SupplyData[] = []; + for (const connectedNode of connectedNodes) { + const connectedNodeType = workflow.nodeTypes.getByNameAndVersion( + connectedNode.type, + connectedNode.typeVersion, + ); + const contextFactory = (runIndex: number, inputData: ITaskDataConnections) => + new SupplyDataContext( workflow, connectedNode, additionalData, @@ -2220,29 +2091,64 @@ export async function getInputConnectionData( runIndex, connectionInputData, inputData, + connectionType, executeData, closeFunctions, abortSignal, ); - if (!nodeType.supplyData) { - if (nodeType.description.outputs.includes(NodeConnectionType.AiTool)) { - nodeType.supplyData = async function (this: ISupplyDataFunctions) { - return createNodeAsTool(this, nodeType, this.getNode().parameters); - }; - } else { - throw new ApplicationError('Node does not have a `supplyData` method defined', { - extra: { nodeName: connectedNode.name }, - }); - } - } + if (!connectedNodeType.supplyData) { + if (connectedNodeType.description.outputs.includes(NodeConnectionType.AiTool)) { + /** + * This keeps track of how many times this specific AI tool node has been invoked. + * It is incremented on every invocation of the tool to keep the output of each invocation separate from each other. + */ + let toolRunIndex = 0; + const supplyData = createNodeAsTool({ + node: connectedNode, + nodeType: connectedNodeType, + handleToolInvocation: async (toolArgs) => { + const runIndex = toolRunIndex++; + const context = contextFactory(runIndex, {}); + context.addInputData(NodeConnectionType.AiTool, [[{ json: toolArgs }]]); + try { + // Execute the sub-node with the proxied context + const result = await connectedNodeType.execute?.call( + context as unknown as IExecuteFunctions, + ); + + // Process and map the results + const mappedResults = result?.[0]?.flatMap((item) => item.json); + + // Add output data to the context + context.addOutputData(NodeConnectionType.AiTool, runIndex, [ + [{ json: { response: mappedResults } }], + ]); + + // Return the stringified results + return JSON.stringify(mappedResults); + } catch (error) { + const nodeError = new NodeOperationError(connectedNode, error as Error); + context.addOutputData(NodeConnectionType.AiTool, runIndex, nodeError); + return 'Error during node execution: ' + nodeError.description; + } + }, + }); + nodes.push(supplyData); + } else { + throw new ApplicationError('Node does not have a `supplyData` method defined', { + extra: { nodeName: connectedNode.name }, + }); + } + } else { + const context = contextFactory(parentRunIndex, parentInputData); try { - const response = await nodeType.supplyData.call(context, itemIndex); - if (response.closeFunction) { - closeFunctions.push(response.closeFunction); + const supplyData = await connectedNodeType.supplyData.call(context, itemIndex); + if (supplyData.closeFunction) { + closeFunctions.push(supplyData.closeFunction); } - return response; + nodes.push(supplyData); } catch (error) { // Propagate errors from sub-nodes if (error.functionality === 'configuration-node') throw error; @@ -2253,20 +2159,16 @@ export async function getInputConnectionData( } let currentNodeRunIndex = 0; - if (runExecutionData.resultData.runData.hasOwnProperty(node.name)) { - currentNodeRunIndex = runExecutionData.resultData.runData[node.name].length; + if (runExecutionData.resultData.runData.hasOwnProperty(parentNode.name)) { + currentNodeRunIndex = runExecutionData.resultData.runData[parentNode.name].length; } // Display the error on the node which is causing it - await addExecutionDataFunctions( + await context.addExecutionDataFunctions( 'input', - connectedNode.name, error, - runExecutionData, - inputName, - additionalData, - node.name, - runIndex, + connectionType, + parentNode.name, currentNodeRunIndex, ); @@ -2277,25 +2179,7 @@ export async function getInputConnectionData( description: error.message, }); } - }); - - // Validate the inputs - const nodes = await Promise.all(constParentNodes); - - if (inputConfiguration.required && nodes.length === 0) { - throw new NodeOperationError( - node, - `A ${inputConfiguration?.displayName ?? inputName} sub-node must be connected`, - ); - } - if ( - inputConfiguration.maxConnections !== undefined && - nodes.length > inputConfiguration.maxConnections - ) { - throw new NodeOperationError( - node, - `Only ${inputConfiguration.maxConnections} ${inputName} sub-nodes are/is allowed to be connected`, - ); + } } return inputConfiguration.maxConnections === 1 @@ -2856,68 +2740,6 @@ export function getExecuteTriggerFunctions( return new TriggerContext(workflow, node, additionalData, mode, activation); } -/** - * Returns the execute functions regular nodes have access to. - */ -export function getExecuteFunctions( - workflow: Workflow, - runExecutionData: IRunExecutionData, - runIndex: number, - connectionInputData: INodeExecutionData[], - inputData: ITaskDataConnections, - node: INode, - additionalData: IWorkflowExecuteAdditionalData, - executeData: IExecuteData, - mode: WorkflowExecuteMode, - closeFunctions: CloseFunction[], - abortSignal?: AbortSignal, -): IExecuteFunctions { - return new ExecuteContext( - workflow, - node, - additionalData, - mode, - runExecutionData, - runIndex, - connectionInputData, - inputData, - executeData, - closeFunctions, - abortSignal, - ); -} - -/** - * Returns the execute functions regular nodes have access to when single-function is defined. - */ -export function getExecuteSingleFunctions( - workflow: Workflow, - runExecutionData: IRunExecutionData, - runIndex: number, - connectionInputData: INodeExecutionData[], - inputData: ITaskDataConnections, - node: INode, - itemIndex: number, - additionalData: IWorkflowExecuteAdditionalData, - executeData: IExecuteData, - mode: WorkflowExecuteMode, - abortSignal?: AbortSignal, -): IExecuteSingleFunctions { - return new ExecuteSingleContext( - workflow, - node, - additionalData, - mode, - runExecutionData, - runIndex, - connectionInputData, - inputData, - itemIndex, - executeData, - abortSignal, - ); -} - export function getCredentialTestFunctions(): ICredentialTestFunctions { return { helpers: { @@ -2928,41 +2750,3 @@ export function getCredentialTestFunctions(): ICredentialTestFunctions { }, }; } - -/** - * Returns the execute functions regular nodes have access to in hook-function. - */ -export function getExecuteHookFunctions( - workflow: Workflow, - node: INode, - additionalData: IWorkflowExecuteAdditionalData, - mode: WorkflowExecuteMode, - activation: WorkflowActivateMode, - webhookData?: IWebhookData, -): IHookFunctions { - return new HookContext(workflow, node, additionalData, mode, activation, webhookData); -} - -/** - * Returns the execute functions regular nodes have access to when webhook-function is defined. - */ -// TODO: check where it is used and make sure close functions are called -export function getExecuteWebhookFunctions( - workflow: Workflow, - node: INode, - additionalData: IWorkflowExecuteAdditionalData, - mode: WorkflowExecuteMode, - webhookData: IWebhookData, - closeFunctions: CloseFunction[], - runExecutionData: IRunExecutionData | null, -): IWebhookFunctions { - return new WebhookContext( - workflow, - node, - additionalData, - mode, - webhookData, - closeFunctions, - runExecutionData, - ); -} diff --git a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts index e8695743a1..63d95367ab 100644 --- a/packages/core/src/PartialExecutionUtils/DirectedGraph.ts +++ b/packages/core/src/PartialExecutionUtils/DirectedGraph.ts @@ -42,6 +42,10 @@ export class DirectedGraph { private connections: Map = new Map(); + hasNode(nodeName: string) { + return this.nodes.has(nodeName); + } + getNodes() { return new Map(this.nodes.entries()); } diff --git a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts index c8feb20e11..5d769004a5 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/DirectedGraph.test.ts @@ -43,8 +43,8 @@ describe('DirectedGraph', () => { }); // ┌─────┐ ┌─────┐──► null - // │node1├───►│node2| ┌─────┐ - // └─────┘ └─────┘──►│node3| + // │node1├───►│node2│ ┌─────┐ + // └─────┘ └─────┘──►│node3│ // └─────┘ // test('linear workflow with null connections', () => { @@ -472,4 +472,24 @@ describe('DirectedGraph', () => { expect(graph).toEqual(expectedGraph); }); }); + + describe('hasNode', () => { + test("returns node if it's part of the graph", () => { + // ARRANGE + const node = createNodeData({ name: 'node' }); + const graph = new DirectedGraph().addNodes(node); + + // ACT & ASSERT + expect(graph.hasNode(node.name)).toBe(true); + }); + + test('returns undefined if there is no node with that name in the graph', () => { + // ARRANGE + const node = createNodeData({ name: 'node' }); + const graph = new DirectedGraph().addNodes(node); + + // ACT & ASSERT + expect(graph.hasNode(node.name + 'foo')).toBe(false); + }); + }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts index 5daea46ef6..959ab78845 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/cleanRunData.test.ts @@ -84,4 +84,31 @@ describe('cleanRunData', () => { // TODO: Find out if this is a desirable result in milestone 2 expect(newRunData).toEqual({}); }); + + // ┌─────┐ ┌─────┐ + // │node1├───►│node2│ + // └─────┘ └─────┘ + test('removes run data of nodes that are not in the subgraph', () => { + // ARRANGE + const node1 = createNodeData({ name: 'Node1' }); + const node2 = createNodeData({ name: 'Node2' }); + const graph = new DirectedGraph() + .addNodes(node1, node2) + .addConnections({ from: node1, to: node2 }); + // not part of the graph + const node3 = createNodeData({ name: 'Node3' }); + const runData: IRunData = { + [node1.name]: [toITaskData([{ data: { value: 1 } }])], + [node2.name]: [toITaskData([{ data: { value: 2 } }])], + [node3.name]: [toITaskData([{ data: { value: 3 } }])], + }; + + // ACT + const newRunData = cleanRunData(runData, graph, new Set([node2])); + + // ASSERT + expect(newRunData).toEqual({ + [node1.name]: [toITaskData([{ data: { value: 1 } }])], + }); + }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts index 8dee8dff1b..f2a99fdb92 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/findStartNodes.test.ts @@ -442,4 +442,126 @@ describe('findStartNodes', () => { expect(startNodes.size).toBe(1); expect(startNodes).toContainEqual(node2); }); + + describe('custom loop logic', () => { + test('if the last run of loop node has no data (null) on the done output, then the loop is the start node', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const loop = createNodeData({ name: 'loop', type: 'n8n-nodes-base.splitInBatches' }); + const inLoop = createNodeData({ name: 'inLoop' }); + const afterLoop = createNodeData({ name: 'afterLoop' }); + const graph = new DirectedGraph() + .addNodes(trigger, loop, inLoop, afterLoop) + .addConnections( + { from: trigger, to: loop }, + { from: loop, outputIndex: 1, to: inLoop }, + { from: inLoop, to: loop }, + { from: loop, to: afterLoop }, + ); + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { name: 'trigger' } }])], + [loop.name]: [ + // only output on the `loop` branch, but no output on the `done` + // branch + toITaskData([{ outputIndex: 1, data: { name: 'loop' } }]), + ], + [inLoop.name]: [toITaskData([{ data: { name: 'inLoop' } }])], + }; + + // ACT + const startNodes = findStartNodes({ + graph, + trigger, + destination: afterLoop, + runData, + pinData: {}, + }); + + // ASSERT + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(loop); + }); + + test('if the last run of loop node has no data (empty array) on the done output, then the loop is the start node', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const loop = createNodeData({ name: 'loop', type: 'n8n-nodes-base.splitInBatches' }); + const inLoop = createNodeData({ name: 'inLoop' }); + const afterLoop = createNodeData({ name: 'afterLoop' }); + const graph = new DirectedGraph() + .addNodes(trigger, loop, inLoop, afterLoop) + .addConnections( + { from: trigger, to: loop }, + { from: loop, outputIndex: 1, to: inLoop }, + { from: inLoop, to: loop }, + { from: loop, to: afterLoop }, + ); + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { name: 'trigger' } }])], + [loop.name]: [ + // This is handcrafted because `toITaskData` does not allow inserting + // an empty array like the first element of `main` below. But the + // execution engine creates ITaskData like this. + { + executionStatus: 'success', + executionTime: 0, + startTime: 0, + source: [], + data: { main: [[], [{ json: { name: 'loop' } }]] }, + }, + ], + [inLoop.name]: [toITaskData([{ data: { name: 'inLoop' } }])], + }; + + // ACT + const startNodes = findStartNodes({ + graph, + trigger, + destination: afterLoop, + runData, + pinData: {}, + }); + + // ASSERT + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(loop); + }); + + test('if the loop has data on the done output in the last run it does not become a start node', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const loop = createNodeData({ name: 'loop', type: 'n8n-nodes-base.splitInBatches' }); + const inLoop = createNodeData({ name: 'inLoop' }); + const afterLoop = createNodeData({ name: 'afterLoop' }); + const graph = new DirectedGraph() + .addNodes(trigger, loop, inLoop, afterLoop) + .addConnections( + { from: trigger, to: loop }, + { from: loop, outputIndex: 1, to: inLoop }, + { from: inLoop, to: loop }, + { from: loop, to: afterLoop }, + ); + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { name: 'trigger' } }])], + [loop.name]: [ + toITaskData([{ outputIndex: 1, data: { name: 'loop' } }]), + toITaskData([{ outputIndex: 0, data: { name: 'done' } }]), + ], + [inLoop.name]: [toITaskData([{ data: { name: 'inLoop' } }])], + }; + + // ACT + const startNodes = findStartNodes({ + graph, + trigger, + destination: afterLoop, + runData, + pinData: {}, + }); + + // ASSERT + expect(startNodes.size).toBe(1); + expect(startNodes).toContainEqual(afterLoop); + }); + }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/helpers.ts b/packages/core/src/PartialExecutionUtils/__tests__/helpers.ts index 6a6c8a88db..74976bba3e 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/helpers.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/helpers.ts @@ -5,7 +5,7 @@ interface StubNode { name: string; parameters?: INodeParameters; disabled?: boolean; - type?: string; + type?: 'n8n-nodes-base.manualTrigger' | 'n8n-nodes-base.splitInBatches' | (string & {}); } export function createNodeData(stubData: StubNode): INode { diff --git a/packages/core/src/PartialExecutionUtils/cleanRunData.ts b/packages/core/src/PartialExecutionUtils/cleanRunData.ts index bcd60c423b..6ed5db6100 100644 --- a/packages/core/src/PartialExecutionUtils/cleanRunData.ts +++ b/packages/core/src/PartialExecutionUtils/cleanRunData.ts @@ -23,5 +23,13 @@ export function cleanRunData( } } + // Remove run data for all nodes that are not part of the subgraph + for (const nodeName of Object.keys(newRunData)) { + if (!graph.hasNode(nodeName)) { + // remove run data for node that is not part of the graph + delete newRunData[nodeName]; + } + } + return newRunData; } diff --git a/packages/core/src/PartialExecutionUtils/findStartNodes.ts b/packages/core/src/PartialExecutionUtils/findStartNodes.ts index b3f4f95399..1c1c0b9fc7 100644 --- a/packages/core/src/PartialExecutionUtils/findStartNodes.ts +++ b/packages/core/src/PartialExecutionUtils/findStartNodes.ts @@ -1,7 +1,7 @@ -import type { INode, IPinData, IRunData } from 'n8n-workflow'; +import { NodeConnectionType, type INode, type IPinData, type IRunData } from 'n8n-workflow'; import type { DirectedGraph } from './DirectedGraph'; -import { getIncomingData } from './getIncomingData'; +import { getIncomingData, getIncomingDataFromAnyRun } from './getIncomingData'; /** * A node is dirty if either of the following is true: @@ -73,6 +73,25 @@ function findStartNodesRecursive( return startNodes; } + // If the current node is a loop node, check if the `done` output has data on + // the last run. If it doesn't the loop wasn't fully executed and needs to be + // re-run from the start. Thus the loop node become the start node. + if (current.type === 'n8n-nodes-base.splitInBatches') { + const nodeRunData = getIncomingData( + runData, + current.name, + // last run + -1, + NodeConnectionType.Main, + 0, + ); + + if (nodeRunData === null || nodeRunData.length === 0) { + startNodes.add(current); + return startNodes; + } + } + // If we detect a cycle stop following the branch, there is no start node on // this branch. if (seen.has(current)) { @@ -82,19 +101,16 @@ function findStartNodesRecursive( // Recurse with every direct child that is part of the sub graph. const outGoingConnections = graph.getDirectChildConnections(current); for (const outGoingConnection of outGoingConnections) { - const nodeRunData = getIncomingData( + const nodeRunData = getIncomingDataFromAnyRun( runData, outGoingConnection.from.name, - // NOTE: It's always 0 until I fix the bug that removes the run data for - // old runs. The FE only sends data for one run for each node. - 0, outGoingConnection.type, outGoingConnection.outputIndex, ); // If the node has multiple outputs, only follow the outputs that have run data. const hasNoRunData = - nodeRunData === null || nodeRunData === undefined || nodeRunData.length === 0; + nodeRunData === null || nodeRunData === undefined || nodeRunData.data.length === 0; if (hasNoRunData) { continue; } diff --git a/packages/core/src/PartialExecutionUtils/getIncomingData.ts b/packages/core/src/PartialExecutionUtils/getIncomingData.ts index acac8ad22d..a6a66ee25b 100644 --- a/packages/core/src/PartialExecutionUtils/getIncomingData.ts +++ b/packages/core/src/PartialExecutionUtils/getIncomingData.ts @@ -1,4 +1,3 @@ -import * as a from 'assert'; import type { INodeExecutionData, IRunData, NodeConnectionType } from 'n8n-workflow'; export function getIncomingData( @@ -7,18 +6,8 @@ export function getIncomingData( runIndex: number, connectionType: NodeConnectionType, outputIndex: number, -): INodeExecutionData[] | null | undefined { - a.ok(runData[nodeName], `Can't find node with name '${nodeName}' in runData.`); - a.ok( - runData[nodeName][runIndex], - `Can't find a run for index '${runIndex}' for node name '${nodeName}'`, - ); - a.ok( - runData[nodeName][runIndex].data, - `Can't find data for index '${runIndex}' for node name '${nodeName}'`, - ); - - return runData[nodeName][runIndex].data[connectionType][outputIndex]; +): INodeExecutionData[] | null { + return runData[nodeName]?.at(runIndex)?.data?.[connectionType].at(outputIndex) ?? null; } function getRunIndexLength(runData: IRunData, nodeName: string) { diff --git a/packages/workflow/src/RoutingNode.ts b/packages/core/src/RoutingNode.ts similarity index 97% rename from packages/workflow/src/RoutingNode.ts rename to packages/core/src/RoutingNode.ts index db3b180972..1c041735d8 100644 --- a/packages/workflow/src/RoutingNode.ts +++ b/packages/core/src/RoutingNode.ts @@ -1,25 +1,18 @@ /* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-argument */ - /* eslint-disable @typescript-eslint/prefer-nullish-coalescing */ - /* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ - import get from 'lodash/get'; import merge from 'lodash/merge'; import set from 'lodash/set'; -import url from 'node:url'; - -import { NodeApiError } from './errors/node-api.error'; -import { NodeOperationError } from './errors/node-operation.error'; +import { NodeHelpers, NodeApiError, NodeOperationError, sleep } from 'n8n-workflow'; import type { ICredentialDataDecryptedObject, ICredentialsDecrypted, IHttpRequestOptions, IN8nHttpFullResponse, INode, - INodeExecuteFunctions, INodeExecutionData, INodeParameters, INodePropertyOptions, @@ -43,10 +36,11 @@ import type { CloseFunction, INodeCredentialDescription, IExecutePaginationFunctions, -} from './Interfaces'; -import * as NodeHelpers from './NodeHelpers'; -import { sleep } from './utils'; -import type { Workflow } from './Workflow'; + Workflow, +} from 'n8n-workflow'; +import url from 'node:url'; + +import { ExecuteContext, ExecuteSingleContext } from './node-execution-context'; export class RoutingNode { additionalData: IWorkflowExecuteAdditionalData; @@ -83,7 +77,6 @@ export class RoutingNode { runIndex: number, nodeType: INodeType, executeData: IExecuteData, - nodeExecuteFunctions: INodeExecuteFunctions, credentialsDecrypted?: ICredentialsDecrypted, abortSignal?: AbortSignal, ): Promise { @@ -91,16 +84,16 @@ export class RoutingNode { const returnData: INodeExecutionData[] = []; const closeFunctions: CloseFunction[] = []; - const executeFunctions = nodeExecuteFunctions.getExecuteFunctions( + const executeFunctions = new ExecuteContext( this.workflow, + this.node, + this.additionalData, + this.mode, this.runExecutionData, runIndex, this.connectionInputData, inputData, - this.node, - this.additionalData, executeData, - this.mode, closeFunctions, abortSignal, ); @@ -136,6 +129,7 @@ export class RoutingNode { credentials = (await executeFunctions.getCredentials( credentialDescription.name, + 0, )) || {}; } catch (error) { if (credentialDescription.required) { @@ -168,20 +162,22 @@ export class RoutingNode { } } + const thisArgs = new ExecuteSingleContext( + this.workflow, + this.node, + this.additionalData, + this.mode, + this.runExecutionData, + runIndex, + this.connectionInputData, + inputData, + itemIndex, + executeData, + abortSignal, + ); + itemContext.push({ - thisArgs: nodeExecuteFunctions.getExecuteSingleFunctions( - this.workflow, - this.runExecutionData, - runIndex, - this.connectionInputData, - inputData, - this.node, - itemIndex, - this.additionalData, - executeData, - this.mode, - abortSignal, - ), + thisArgs, requestData: { options: { qs: {}, @@ -308,6 +304,7 @@ export class RoutingNode { } const promisesResponses = await Promise.allSettled(requestPromises); + // eslint-disable-next-line @typescript-eslint/no-explicit-any let responseData: any; for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { responseData = promisesResponses.shift(); diff --git a/packages/core/src/TriggersAndPollers.ts b/packages/core/src/TriggersAndPollers.ts new file mode 100644 index 0000000000..b77926e136 --- /dev/null +++ b/packages/core/src/TriggersAndPollers.ts @@ -0,0 +1,116 @@ +import { ApplicationError } from 'n8n-workflow'; +import type { + Workflow, + INode, + INodeExecutionData, + IPollFunctions, + IGetExecuteTriggerFunctions, + IWorkflowExecuteAdditionalData, + WorkflowExecuteMode, + WorkflowActivateMode, + ITriggerResponse, + IDeferredPromise, + IExecuteResponsePromiseData, + IRun, +} from 'n8n-workflow'; +import { Service } from 'typedi'; + +@Service() +export class TriggersAndPollers { + /** + * Runs the given trigger node so that it can trigger the workflow when the node has data. + */ + async runTrigger( + workflow: Workflow, + node: INode, + getTriggerFunctions: IGetExecuteTriggerFunctions, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + activation: WorkflowActivateMode, + ): Promise { + const triggerFunctions = getTriggerFunctions(workflow, node, additionalData, mode, activation); + + const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + + if (!nodeType.trigger) { + throw new ApplicationError('Node type does not have a trigger function defined', { + extra: { nodeName: node.name }, + tags: { nodeType: node.type }, + }); + } + + if (mode === 'manual') { + // In manual mode we do not just start the trigger function we also + // want to be able to get informed as soon as the first data got emitted + const triggerResponse = await nodeType.trigger.call(triggerFunctions); + + // Add the manual trigger response which resolves when the first time data got emitted + triggerResponse!.manualTriggerResponse = new Promise((resolve, reject) => { + triggerFunctions.emit = ( + (resolveEmit) => + ( + data: INodeExecutionData[][], + responsePromise?: IDeferredPromise, + donePromise?: IDeferredPromise, + ) => { + additionalData.hooks!.hookFunctions.sendResponse = [ + async (response: IExecuteResponsePromiseData): Promise => { + if (responsePromise) { + responsePromise.resolve(response); + } + }, + ]; + + if (donePromise) { + additionalData.hooks!.hookFunctions.workflowExecuteAfter?.unshift( + async (runData: IRun): Promise => { + return donePromise.resolve(runData); + }, + ); + } + + resolveEmit(data); + } + )(resolve); + triggerFunctions.emitError = ( + (rejectEmit) => + (error: Error, responsePromise?: IDeferredPromise) => { + additionalData.hooks!.hookFunctions.sendResponse = [ + async (): Promise => { + if (responsePromise) { + responsePromise.reject(error); + } + }, + ]; + + rejectEmit(error); + } + )(reject); + }); + + return triggerResponse; + } + // In all other modes simply start the trigger + return await nodeType.trigger.call(triggerFunctions); + } + + /** + * Runs the given poller node so that it can trigger the workflow when the node has data. + */ + async runPoll( + workflow: Workflow, + node: INode, + pollFunctions: IPollFunctions, + ): Promise { + const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + + if (!nodeType.poll) { + throw new ApplicationError('Node type does not have a poll function defined', { + extra: { nodeName: node.name }, + tags: { nodeType: node.type }, + }); + } + + return await nodeType.poll.call(pollFunctions); + } +} diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index 515f58a657..6f2cce0b3d 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -37,6 +37,9 @@ import type { StartNodeData, NodeExecutionHint, NodeInputConnections, + IRunNodeResponse, + IWorkflowIssues, + INodeIssues, } from 'n8n-workflow'; import { LoggerProxy as Logger, @@ -46,11 +49,14 @@ import { ApplicationError, NodeExecutionOutput, sleep, - ErrorReporterProxy, ExecutionCancelledError, + Node, } from 'n8n-workflow'; import PCancelable from 'p-cancelable'; +import Container from 'typedi'; +import { ErrorReporter } from './error-reporter'; +import { ExecuteContext, PollContext } from './node-execution-context'; import * as NodeExecuteFunctions from './NodeExecuteFunctions'; import { DirectedGraph, @@ -62,6 +68,8 @@ import { handleCycles, filterDisabledNodes, } from './PartialExecutionUtils'; +import { RoutingNode } from './RoutingNode'; +import { TriggersAndPollers } from './TriggersAndPollers'; export class WorkflowExecute { private status: ExecutionStatus = 'new'; @@ -354,24 +362,24 @@ export class WorkflowExecute { } // 2. Find the Subgraph - const graph = DirectedGraph.fromWorkflow(workflow); - const subgraph = findSubgraph({ graph: filterDisabledNodes(graph), destination, trigger }); - const filteredNodes = subgraph.getNodes(); + let graph = DirectedGraph.fromWorkflow(workflow); + graph = findSubgraph({ graph: filterDisabledNodes(graph), destination, trigger }); + const filteredNodes = graph.getNodes(); // 3. Find the Start Nodes runData = omit(runData, dirtyNodeNames); - let startNodes = findStartNodes({ graph: subgraph, trigger, destination, runData, pinData }); + let startNodes = findStartNodes({ graph, trigger, destination, runData, pinData }); // 4. Detect Cycles // 5. Handle Cycles startNodes = handleCycles(graph, startNodes, trigger); // 6. Clean Run Data - const newRunData: IRunData = cleanRunData(runData, graph, startNodes); + runData = cleanRunData(runData, graph, startNodes); // 7. Recreate Execution Stack const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(subgraph, new Set(startNodes), runData, pinData ?? {}); + recreateNodeExecutionStack(graph, new Set(startNodes), runData, pinData ?? {}); // 8. Execute this.status = 'running'; @@ -381,7 +389,7 @@ export class WorkflowExecute { runNodeFilter: Array.from(filteredNodes.values()).map((node) => node.name), }, resultData: { - runData: newRunData, + runData, pinData, }, executionData: { @@ -393,7 +401,7 @@ export class WorkflowExecute { }, }; - return this.processRunExecutionData(subgraph.toWorkflow({ ...workflow })); + return this.processRunExecutionData(graph.toWorkflow({ ...workflow })); } /** @@ -883,6 +891,280 @@ export class WorkflowExecute { } } + /** + * Checks if everything in the workflow is complete + * and ready to be executed. If it returns null everything + * is fine. If there are issues it returns the issues + * which have been found for the different nodes. + * TODO: Does currently not check for credential issues! + */ + checkReadyForExecution( + workflow: Workflow, + inputData: { + startNode?: string; + destinationNode?: string; + pinDataNodeNames?: string[]; + } = {}, + ): IWorkflowIssues | null { + const workflowIssues: IWorkflowIssues = {}; + + let checkNodes: string[] = []; + if (inputData.destinationNode) { + // If a destination node is given we have to check all the nodes + // leading up to it + checkNodes = workflow.getParentNodes(inputData.destinationNode); + checkNodes.push(inputData.destinationNode); + } else if (inputData.startNode) { + // If a start node is given we have to check all nodes which + // come after it + checkNodes = workflow.getChildNodes(inputData.startNode); + checkNodes.push(inputData.startNode); + } + + for (const nodeName of checkNodes) { + let nodeIssues: INodeIssues | null = null; + const node = workflow.nodes[nodeName]; + + if (node.disabled === true) { + continue; + } + + const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + + if (nodeType === undefined) { + // Node type is not known + nodeIssues = { + typeUnknown: true, + }; + } else { + nodeIssues = NodeHelpers.getNodeParametersIssues( + nodeType.description.properties, + node, + inputData.pinDataNodeNames, + ); + } + + if (nodeIssues !== null) { + workflowIssues[node.name] = nodeIssues; + } + } + + if (Object.keys(workflowIssues).length === 0) { + return null; + } + + return workflowIssues; + } + + /** Executes the given node */ + // eslint-disable-next-line complexity + async runNode( + workflow: Workflow, + executionData: IExecuteData, + runExecutionData: IRunExecutionData, + runIndex: number, + additionalData: IWorkflowExecuteAdditionalData, + mode: WorkflowExecuteMode, + abortSignal?: AbortSignal, + ): Promise { + const { node } = executionData; + let inputData = executionData.data; + + if (node.disabled === true) { + // If node is disabled simply pass the data through + // return NodeRunHelpers. + if (inputData.hasOwnProperty('main') && inputData.main.length > 0) { + // If the node is disabled simply return the data from the first main input + if (inputData.main[0] === null) { + return { data: undefined }; + } + return { data: [inputData.main[0]] }; + } + return { data: undefined }; + } + + const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + + let connectionInputData: INodeExecutionData[] = []; + if (nodeType.execute || (!nodeType.poll && !nodeType.trigger && !nodeType.webhook)) { + // Only stop if first input is empty for execute runs. For all others run anyways + // because then it is a trigger node. As they only pass data through and so the input-data + // becomes output-data it has to be possible. + + if (inputData.main?.length > 0) { + // We always use the data of main input and the first input for execute + connectionInputData = inputData.main[0] as INodeExecutionData[]; + } + + const forceInputNodeExecution = workflow.settings.executionOrder !== 'v1'; + if (!forceInputNodeExecution) { + // If the nodes do not get force executed data of some inputs may be missing + // for that reason do we use the data of the first one that contains any + for (const mainData of inputData.main) { + if (mainData?.length) { + connectionInputData = mainData; + break; + } + } + } + + if (connectionInputData.length === 0) { + // No data for node so return + return { data: undefined }; + } + } + + if ( + runExecutionData.resultData.lastNodeExecuted === node.name && + runExecutionData.resultData.error !== undefined + ) { + // The node did already fail. So throw an error here that it displays and logs it correctly. + // Does get used by webhook and trigger nodes in case they throw an error that it is possible + // to log the error and display in Editor-UI. + if ( + runExecutionData.resultData.error.name === 'NodeOperationError' || + runExecutionData.resultData.error.name === 'NodeApiError' + ) { + throw runExecutionData.resultData.error; + } + + const error = new Error(runExecutionData.resultData.error.message); + error.stack = runExecutionData.resultData.error.stack; + throw error; + } + + if (node.executeOnce === true) { + // If node should be executed only once so use only the first input item + const newInputData: ITaskDataConnections = {}; + for (const connectionType of Object.keys(inputData)) { + newInputData[connectionType] = inputData[connectionType].map((input) => { + // eslint-disable-next-line @typescript-eslint/prefer-optional-chain + return input && input.slice(0, 1); + }); + } + inputData = newInputData; + } + + if (nodeType.execute) { + const closeFunctions: CloseFunction[] = []; + const context = new ExecuteContext( + workflow, + node, + additionalData, + mode, + runExecutionData, + runIndex, + connectionInputData, + inputData, + executionData, + closeFunctions, + abortSignal, + ); + + const data = + nodeType instanceof Node + ? await nodeType.execute(context) + : await nodeType.execute.call(context); + + const closeFunctionsResults = await Promise.allSettled( + closeFunctions.map(async (fn) => await fn()), + ); + + const closingErrors = closeFunctionsResults + .filter((result): result is PromiseRejectedResult => result.status === 'rejected') + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + .map((result) => result.reason); + + if (closingErrors.length > 0) { + if (closingErrors[0] instanceof Error) throw closingErrors[0]; + throw new ApplicationError("Error on execution node's close function(s)", { + extra: { nodeName: node.name }, + tags: { nodeType: node.type }, + cause: closingErrors, + }); + } + + return { data }; + } else if (nodeType.poll) { + if (mode === 'manual') { + // In manual mode run the poll function + const context = new PollContext(workflow, node, additionalData, mode, 'manual'); + return { data: await nodeType.poll.call(context) }; + } + // In any other mode pass data through as it already contains the result of the poll + return { data: inputData.main as INodeExecutionData[][] }; + } else if (nodeType.trigger) { + if (mode === 'manual') { + // In manual mode start the trigger + const triggerResponse = await Container.get(TriggersAndPollers).runTrigger( + workflow, + node, + NodeExecuteFunctions.getExecuteTriggerFunctions, + additionalData, + mode, + 'manual', + ); + + if (triggerResponse === undefined) { + return { data: null }; + } + + let closeFunction; + if (triggerResponse.closeFunction) { + // In manual mode we return the trigger closeFunction. That allows it to be called directly + // but we do not have to wait for it to finish. That is important for things like queue-nodes. + // There the full close will may be delayed till a message gets acknowledged after the execution. + // If we would not be able to wait for it to close would it cause problems with "own" mode as the + // process would be killed directly after it and so the acknowledge would not have been finished yet. + closeFunction = triggerResponse.closeFunction; + + // Manual testing of Trigger nodes creates an execution. If the execution is cancelled, `closeFunction` should be called to cleanup any open connections/consumers + abortSignal?.addEventListener('abort', closeFunction); + } + + if (triggerResponse.manualTriggerFunction !== undefined) { + // If a manual trigger function is defined call it and wait till it did run + await triggerResponse.manualTriggerFunction(); + } + + const response = await triggerResponse.manualTriggerResponse!; + + if (response.length === 0) { + return { data: null, closeFunction }; + } + + return { data: response, closeFunction }; + } + // For trigger nodes in any mode except "manual" do we simply pass the data through + return { data: inputData.main as INodeExecutionData[][] }; + } else if (nodeType.webhook) { + // For webhook nodes always simply pass the data through + return { data: inputData.main as INodeExecutionData[][] }; + } else { + // For nodes which have routing information on properties + + const routingNode = new RoutingNode( + workflow, + node, + connectionInputData, + runExecutionData ?? null, + additionalData, + mode, + ); + + return { + data: await routingNode.runNode( + inputData, + runIndex, + nodeType, + executionData, + undefined, + abortSignal, + ), + }; + } + } + /** * Runs the given execution data. * @@ -908,7 +1190,7 @@ export class WorkflowExecute { const pinDataNodeNames = Object.keys(this.runExecutionData.resultData.pinData ?? {}); - const workflowIssues = workflow.checkReadyForExecution({ + const workflowIssues = this.checkReadyForExecution(workflow, { startNode, destinationNode, pinDataNodeNames, @@ -1018,8 +1300,8 @@ export class WorkflowExecute { // Update the pairedItem information on items const newTaskDataConnections: ITaskDataConnections = {}; - for (const inputName of Object.keys(executionData.data)) { - newTaskDataConnections[inputName] = executionData.data[inputName].map( + for (const connectionType of Object.keys(executionData.data)) { + newTaskDataConnections[connectionType] = executionData.data[connectionType].map( (input, inputIndex) => { if (input === null) { return input; @@ -1170,12 +1452,12 @@ export class WorkflowExecute { workflowId: workflow.id, }); - let runNodeData = await workflow.runNode( + let runNodeData = await this.runNode( + workflow, executionData, this.runExecutionData, runIndex, this.additionalData, - NodeExecuteFunctions, this.mode, this.abortController.signal, ); @@ -1187,12 +1469,12 @@ export class WorkflowExecute { while (didContinueOnFail && tryIndex !== maxTries - 1) { await sleep(waitBetweenTries); - runNodeData = await workflow.runNode( + runNodeData = await this.runNode( + workflow, executionData, this.runExecutionData, runIndex, this.additionalData, - NodeExecuteFunctions, this.mode, this.abortController.signal, ); @@ -1229,19 +1511,20 @@ export class WorkflowExecute { const closeFunctions: CloseFunction[] = []; // Create a WorkflowDataProxy instance that we can get the data of the // item which did error - const executeFunctions = NodeExecuteFunctions.getExecuteFunctions( + const executeFunctions = new ExecuteContext( workflow, + executionData.node, + this.additionalData, + this.mode, this.runExecutionData, runIndex, [], executionData.data, - executionData.node, - this.additionalData, executionData, - this.mode, closeFunctions, this.abortController.signal, ); + const dataProxy = executeFunctions.getWorkflowDataProxy(0); // Loop over all outputs except the error output as it would not contain data by default @@ -1408,7 +1691,7 @@ export class WorkflowExecute { } } - if (nodeSuccessData === null && !this.runExecutionData.waitTill!) { + if (nodeSuccessData === null && !this.runExecutionData.waitTill) { // If null gets returned it means that the node did succeed // but did not have any data. So the branch should end // (meaning the nodes afterwards should not be processed) @@ -1428,7 +1711,7 @@ export class WorkflowExecute { toReport = error; } if (toReport) { - ErrorReporterProxy.error(toReport, { + Container.get(ErrorReporter).error(toReport, { extra: { nodeName: executionNode.name, nodeType: executionNode.type, @@ -1535,7 +1818,7 @@ export class WorkflowExecute { this.runExecutionData.resultData.runData[executionNode.name].push(taskData); - if (this.runExecutionData.waitTill!) { + if (this.runExecutionData.waitTill) { await this.executeHook('nodeExecuteAfter', [ executionNode.name, taskData, @@ -1916,7 +2199,7 @@ export class WorkflowExecute { if (executionError.message?.includes('canceled')) { fullRunData.status = 'canceled'; } - } else if (this.runExecutionData.waitTill!) { + } else if (this.runExecutionData.waitTill) { // eslint-disable-next-line @typescript-eslint/restrict-template-expressions Logger.debug(`Workflow execution will wait until ${this.runExecutionData.waitTill}`, { workflowId: workflow.id, diff --git a/packages/core/src/decorators/__tests__/memoized.test.ts b/packages/core/src/decorators/__tests__/memoized.test.ts new file mode 100644 index 0000000000..f29ea4d469 --- /dev/null +++ b/packages/core/src/decorators/__tests__/memoized.test.ts @@ -0,0 +1,153 @@ +import { AssertionError, ok } from 'node:assert'; +import { setFlagsFromString } from 'node:v8'; +import { runInNewContext } from 'node:vm'; + +import { Memoized } from '../memoized'; + +describe('Memoized Decorator', () => { + class TestClass { + private computeCount = 0; + + constructor(private readonly value: number = 42) {} + + @Memoized + get expensiveComputation() { + this.computeCount++; + return this.value * 2; + } + + getComputeCount() { + return this.computeCount; + } + } + + it('should only compute the value once', () => { + const instance = new TestClass(); + + // First access should compute + expect(instance.expensiveComputation).toBe(84); + expect(instance.getComputeCount()).toBe(1); + + // Second access should use cached value + expect(instance.expensiveComputation).toBe(84); + expect(instance.getComputeCount()).toBe(1); + + // Third access should still use cached value + expect(instance.expensiveComputation).toBe(84); + expect(instance.getComputeCount()).toBe(1); + }); + + it('should cache values independently for different instances', () => { + const instance1 = new TestClass(10); + const instance2 = new TestClass(20); + + expect(instance1.expensiveComputation).toBe(20); + expect(instance2.expensiveComputation).toBe(40); + + expect(instance1.getComputeCount()).toBe(1); + expect(instance2.getComputeCount()).toBe(1); + }); + + it('should throw error when used on non-getter', () => { + expect(() => { + class InvalidClass { + // @ts-expect-error this code will fail at compile time and at runtime + @Memoized + public normalProperty = 42; + } + new InvalidClass(); + }).toThrow(AssertionError); + }); + + it('should make cached value non-enumerable', () => { + const instance = new TestClass(); + instance.expensiveComputation; // Access to trigger caching + + const propertyNames = Object.keys(instance); + expect(propertyNames).not.toContain('expensiveComputation'); + }); + + it('should not allow reconfiguring the cached value', () => { + const instance = new TestClass(); + instance.expensiveComputation; // Access to trigger caching + + expect(() => { + Object.defineProperty(instance, 'expensiveComputation', { + value: 999, + configurable: true, + }); + }).toThrow(); + }); + + it('should work when child class references memoized getter in parent class', () => { + class ParentClass { + protected computeCount = 0; + + @Memoized + get parentValue() { + this.computeCount++; + return 42; + } + + getComputeCount() { + return this.computeCount; + } + } + + class ChildClass extends ParentClass { + get childValue() { + return this.parentValue * 2; + } + } + + const child = new ChildClass(); + + expect(child.childValue).toBe(84); + expect(child.getComputeCount()).toBe(1); + + expect(child.childValue).toBe(84); + expect(child.getComputeCount()).toBe(1); + }); + + it('should have correct property descriptor after memoization', () => { + const instance = new TestClass(); + + // Before accessing (original getter descriptor) + const beforeDescriptor = Object.getOwnPropertyDescriptor( + TestClass.prototype, + 'expensiveComputation', + ); + expect(beforeDescriptor?.configurable).toBe(true); + expect(beforeDescriptor?.enumerable).toBe(false); + expect(typeof beforeDescriptor?.get).toBe('function'); + expect(beforeDescriptor?.set).toBeUndefined(); + + // After accessing (memoized value descriptor) + instance.expensiveComputation; // Trigger memoization + const afterDescriptor = Object.getOwnPropertyDescriptor(instance, 'expensiveComputation'); + expect(afterDescriptor?.configurable).toBe(false); + expect(afterDescriptor?.enumerable).toBe(false); + expect(afterDescriptor?.writable).toBe(false); + expect(afterDescriptor?.value).toBe(84); + expect(afterDescriptor?.get).toBeUndefined(); + }); + + it('should not prevent garbage collection of instances', async () => { + setFlagsFromString('--expose_gc'); + const gc = runInNewContext('gc') as unknown as () => void; + + let instance: TestClass | undefined = new TestClass(); + const weakRef = new WeakRef(instance); + instance.expensiveComputation; + + // Remove the strong reference + instance = undefined; + + // Wait for garbage collection, forcing it if needed + await new Promise((resolve) => setTimeout(resolve, 10)); + gc(); + + const ref = weakRef.deref(); + ok(!ref, 'GC did not collect the instance ref'); + }); +}); diff --git a/packages/core/src/decorators/index.ts b/packages/core/src/decorators/index.ts new file mode 100644 index 0000000000..2bf9174d7a --- /dev/null +++ b/packages/core/src/decorators/index.ts @@ -0,0 +1 @@ +export { Memoized } from './memoized'; diff --git a/packages/core/src/decorators/memoized.ts b/packages/core/src/decorators/memoized.ts new file mode 100644 index 0000000000..3ff5c56619 --- /dev/null +++ b/packages/core/src/decorators/memoized.ts @@ -0,0 +1,41 @@ +import assert from 'node:assert'; + +/** + * A decorator that implements memoization for class property getters. + * + * The decorated getter will only be executed once and its value cached for subsequent access + * + * @example + * class Example { + * @Memoized + * get computedValue() { + * // This will only run once and the result will be cached + * return heavyComputation(); + * } + * } + * + * @throws If decorator is used on something other than a getter + */ +export function Memoized( + target: object, + propertyKey: string | symbol, + descriptor?: TypedPropertyDescriptor, +): TypedPropertyDescriptor { + const originalGetter = descriptor?.get; + assert(originalGetter, '@Memoized can only be used on getters'); + + // Replace the original getter for the first call + descriptor.get = function (this: typeof target.constructor): T { + const value = originalGetter.call(this); + // Add a property on the class instance to stop reading from the getter on class prototype + Object.defineProperty(this, propertyKey, { + value, + configurable: false, + enumerable: false, + writable: false, + }); + return value; + }; + + return descriptor; +} diff --git a/packages/core/src/error-reporter.ts b/packages/core/src/error-reporter.ts new file mode 100644 index 0000000000..b6fc936daa --- /dev/null +++ b/packages/core/src/error-reporter.ts @@ -0,0 +1,162 @@ +import type { NodeOptions } from '@sentry/node'; +import { close } from '@sentry/node'; +import type { ErrorEvent, EventHint } from '@sentry/types'; +import { AxiosError } from 'axios'; +import { ApplicationError, LoggerProxy, type ReportingOptions } from 'n8n-workflow'; +import { createHash } from 'node:crypto'; +import { Service } from 'typedi'; + +import type { InstanceType } from './InstanceSettings'; + +@Service() +export class ErrorReporter { + /** Hashes of error stack traces, to deduplicate error reports. */ + private seenErrors = new Set(); + + private report: (error: Error | string, options?: ReportingOptions) => void; + + constructor() { + // eslint-disable-next-line @typescript-eslint/unbound-method + this.report = this.defaultReport; + } + + private defaultReport(error: Error | string, options?: ReportingOptions) { + if (error instanceof Error) { + let e = error; + + const { executionId } = options ?? {}; + const context = executionId ? ` (execution ${executionId})` : ''; + + do { + const msg = [e.message + context, e.stack ? `\n${e.stack}\n` : ''].join(''); + const meta = e instanceof ApplicationError ? e.extra : undefined; + LoggerProxy.error(msg, meta); + e = e.cause as Error; + } while (e); + } + } + + async shutdown(timeoutInMs = 1000) { + await close(timeoutInMs); + } + + async init(instanceType: InstanceType | 'task_runner', dsn: string) { + process.on('uncaughtException', (error) => { + this.error(error); + }); + + if (!dsn) return; + + // Collect longer stacktraces + Error.stackTraceLimit = 50; + + const { + N8N_VERSION: release, + ENVIRONMENT: environment, + DEPLOYMENT_NAME: serverName, + } = process.env; + + const { init, captureException, setTag } = await import('@sentry/node'); + const { requestDataIntegration, rewriteFramesIntegration } = await import('@sentry/node'); + + const enabledIntegrations = [ + 'InboundFilters', + 'FunctionToString', + 'LinkedErrors', + 'OnUnhandledRejection', + 'ContextLines', + ]; + + init({ + dsn, + release, + environment, + enableTracing: false, + serverName, + beforeBreadcrumb: () => null, + beforeSend: this.beforeSend.bind(this) as NodeOptions['beforeSend'], + integrations: (integrations) => [ + ...integrations.filter(({ name }) => enabledIntegrations.includes(name)), + rewriteFramesIntegration({ root: process.cwd() }), + requestDataIntegration({ + include: { + cookies: false, + data: false, + headers: false, + query_string: false, + url: true, + user: false, + }, + }), + ], + }); + + setTag('server_type', instanceType); + + this.report = (error, options) => captureException(error, options); + } + + async beforeSend(event: ErrorEvent, { originalException }: EventHint) { + if (!originalException) return null; + + if (originalException instanceof Promise) { + originalException = await originalException.catch((error) => error as Error); + } + + if (originalException instanceof AxiosError) return null; + + if ( + originalException instanceof Error && + originalException.name === 'QueryFailedError' && + ['SQLITE_FULL', 'SQLITE_IOERR'].some((errMsg) => originalException.message.includes(errMsg)) + ) { + return null; + } + + if (originalException instanceof ApplicationError) { + const { level, extra, tags } = originalException; + if (level === 'warning') return null; + event.level = level; + if (extra) event.extra = { ...event.extra, ...extra }; + if (tags) event.tags = { ...event.tags, ...tags }; + } + + if ( + originalException instanceof Error && + 'cause' in originalException && + originalException.cause instanceof Error && + 'level' in originalException.cause && + originalException.cause.level === 'warning' + ) { + // handle underlying errors propagating from dependencies like ai-assistant-sdk + return null; + } + + if (originalException instanceof Error && originalException.stack) { + const eventHash = createHash('sha1').update(originalException.stack).digest('base64'); + if (this.seenErrors.has(eventHash)) return null; + this.seenErrors.add(eventHash); + } + + return event; + } + + error(e: unknown, options?: ReportingOptions) { + const toReport = this.wrap(e); + if (toReport) this.report(toReport, options); + } + + warn(warning: Error | string, options?: ReportingOptions) { + this.error(warning, { ...options, level: 'warning' }); + } + + info(msg: string, options?: ReportingOptions) { + this.report(msg, { ...options, level: 'info' }); + } + + private wrap(e: unknown) { + if (e instanceof Error) return e; + if (typeof e === 'string') return new ApplicationError(e); + return; + } +} diff --git a/packages/core/src/errors/index.ts b/packages/core/src/errors/index.ts index c280ecb30d..38cd481c25 100644 --- a/packages/core/src/errors/index.ts +++ b/packages/core/src/errors/index.ts @@ -3,3 +3,5 @@ export { DisallowedFilepathError } from './disallowed-filepath.error'; export { InvalidModeError } from './invalid-mode.error'; export { InvalidManagerError } from './invalid-manager.error'; export { InvalidExecutionMetadataError } from './invalid-execution-metadata.error'; +export { UnrecognizedCredentialTypeError } from './unrecognized-credential-type.error'; +export { UnrecognizedNodeTypeError } from './unrecognized-node-type.error'; diff --git a/packages/core/src/errors/unrecognized-credential-type.error.ts b/packages/core/src/errors/unrecognized-credential-type.error.ts new file mode 100644 index 0000000000..c60b576bca --- /dev/null +++ b/packages/core/src/errors/unrecognized-credential-type.error.ts @@ -0,0 +1,9 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class UnrecognizedCredentialTypeError extends ApplicationError { + severity = 'warning'; + + constructor(credentialType: string) { + super(`Unrecognized credential type: ${credentialType}`); + } +} diff --git a/packages/cli/src/errors/unrecognized-node-type.error.ts b/packages/core/src/errors/unrecognized-node-type.error.ts similarity index 55% rename from packages/cli/src/errors/unrecognized-node-type.error.ts rename to packages/core/src/errors/unrecognized-node-type.error.ts index 1ca5281de5..6ca5e34e40 100644 --- a/packages/cli/src/errors/unrecognized-node-type.error.ts +++ b/packages/core/src/errors/unrecognized-node-type.error.ts @@ -3,7 +3,7 @@ import { ApplicationError } from 'n8n-workflow'; export class UnrecognizedNodeTypeError extends ApplicationError { severity = 'warning'; - constructor(nodeType: string) { - super(`Unrecognized node type: ${nodeType}".`); + constructor(packageName: string, nodeType: string) { + super(`Unrecognized node type: ${packageName}.${nodeType}`); } } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 9c141867de..f2f2149b60 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,17 +1,18 @@ import * as NodeExecuteFunctions from './NodeExecuteFunctions'; +export * from './decorators'; export * from './errors'; export * from './ActiveWorkflows'; export * from './BinaryData/BinaryData.service'; export * from './BinaryData/types'; export { Cipher } from './Cipher'; -export * from './ClassLoader'; export * from './Constants'; export * from './Credentials'; export * from './DirectoryLoader'; export * from './Interfaces'; export { InstanceSettings, InstanceType } from './InstanceSettings'; export * from './NodeExecuteFunctions'; +export * from './RoutingNode'; export * from './WorkflowExecute'; export { NodeExecuteFunctions }; export * from './data-deduplication-service'; @@ -22,3 +23,4 @@ export { isStoredMode as isValidNonDefaultMode } from './BinaryData/utils'; export * from './ExecutionMetadata'; export * from './node-execution-context'; export * from './PartialExecutionUtils'; +export { ErrorReporter } from './error-reporter'; diff --git a/packages/core/src/node-execution-context/__tests__/execute-context.test.ts b/packages/core/src/node-execution-context/__tests__/execute-context.test.ts index 723bab24f3..a888a5a7ff 100644 --- a/packages/core/src/node-execution-context/__tests__/execute-context.test.ts +++ b/packages/core/src/node-execution-context/__tests__/execute-context.test.ts @@ -14,7 +14,7 @@ import type { INodeTypes, ICredentialDataDecryptedObject, } from 'n8n-workflow'; -import { ApplicationError, ExpressionError } from 'n8n-workflow'; +import { ApplicationError, ExpressionError, NodeConnectionType } from 'n8n-workflow'; import { describeCommonTests } from './shared-tests'; import { ExecuteContext } from '../execute-context'; @@ -92,33 +92,39 @@ describe('ExecuteContext', () => { describe('getInputData', () => { const inputIndex = 0; - const inputName = 'main'; + const connectionType = NodeConnectionType.Main; afterEach(() => { - inputData[inputName] = [[{ json: { test: 'data' } }]]; + inputData[connectionType] = [[{ json: { test: 'data' } }]]; }); it('should return the input data correctly', () => { const expectedData = [{ json: { test: 'data' } }]; - expect(executeContext.getInputData(inputIndex, inputName)).toEqual(expectedData); + expect(executeContext.getInputData(inputIndex, connectionType)).toEqual(expectedData); }); it('should return an empty array if the input name does not exist', () => { - const inputName = 'nonExistent'; - expect(executeContext.getInputData(inputIndex, inputName)).toEqual([]); + const connectionType = 'nonExistent'; + expect(executeContext.getInputData(inputIndex, connectionType as NodeConnectionType)).toEqual( + [], + ); }); it('should throw an error if the input index is out of range', () => { const inputIndex = 2; - expect(() => executeContext.getInputData(inputIndex, inputName)).toThrow(ApplicationError); + expect(() => executeContext.getInputData(inputIndex, connectionType)).toThrow( + ApplicationError, + ); }); it('should throw an error if the input index was not set', () => { inputData.main[inputIndex] = null; - expect(() => executeContext.getInputData(inputIndex, inputName)).toThrow(ApplicationError); + expect(() => executeContext.getInputData(inputIndex, connectionType)).toThrow( + ApplicationError, + ); }); }); diff --git a/packages/core/src/node-execution-context/__tests__/execute-single-context.test.ts b/packages/core/src/node-execution-context/__tests__/execute-single-context.test.ts index e62c2b0f46..6c1b9f1089 100644 --- a/packages/core/src/node-execution-context/__tests__/execute-single-context.test.ts +++ b/packages/core/src/node-execution-context/__tests__/execute-single-context.test.ts @@ -14,7 +14,7 @@ import type { INodeTypes, ICredentialDataDecryptedObject, } from 'n8n-workflow'; -import { ApplicationError } from 'n8n-workflow'; +import { ApplicationError, NodeConnectionType } from 'n8n-workflow'; import { describeCommonTests } from './shared-tests'; import { ExecuteSingleContext } from '../execute-single-context'; @@ -91,29 +91,31 @@ describe('ExecuteSingleContext', () => { describe('getInputData', () => { const inputIndex = 0; - const inputName = 'main'; + const connectionType = NodeConnectionType.Main; afterEach(() => { - inputData[inputName] = [[{ json: { test: 'data' } }]]; + inputData[connectionType] = [[{ json: { test: 'data' } }]]; }); it('should return the input data correctly', () => { const expectedData = { json: { test: 'data' } }; - expect(executeSingleContext.getInputData(inputIndex, inputName)).toEqual(expectedData); + expect(executeSingleContext.getInputData(inputIndex, connectionType)).toEqual(expectedData); }); it('should return an empty object if the input name does not exist', () => { - const inputName = 'nonExistent'; + const connectionType = 'nonExistent'; const expectedData = { json: {} }; - expect(executeSingleContext.getInputData(inputIndex, inputName)).toEqual(expectedData); + expect( + executeSingleContext.getInputData(inputIndex, connectionType as NodeConnectionType), + ).toEqual(expectedData); }); it('should throw an error if the input index is out of range', () => { const inputIndex = 1; - expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow( + expect(() => executeSingleContext.getInputData(inputIndex, connectionType)).toThrow( ApplicationError, ); }); @@ -121,7 +123,7 @@ describe('ExecuteSingleContext', () => { it('should throw an error if the input index was not set', () => { inputData.main[inputIndex] = null; - expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow( + expect(() => executeSingleContext.getInputData(inputIndex, connectionType)).toThrow( ApplicationError, ); }); @@ -129,7 +131,7 @@ describe('ExecuteSingleContext', () => { it('should throw an error if the value of input with given index was not set', () => { delete inputData.main[inputIndex]![itemIndex]; - expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow( + expect(() => executeSingleContext.getInputData(inputIndex, connectionType)).toThrow( ApplicationError, ); }); diff --git a/packages/core/src/node-execution-context/__tests__/node-execution-context.test.ts b/packages/core/src/node-execution-context/__tests__/node-execution-context.test.ts index 95ed62337e..0231873984 100644 --- a/packages/core/src/node-execution-context/__tests__/node-execution-context.test.ts +++ b/packages/core/src/node-execution-context/__tests__/node-execution-context.test.ts @@ -79,7 +79,7 @@ describe('NodeExecutionContext', () => { const result = testContext.getChildNodes('Test Node'); - expect(result).toEqual([ + expect(result).toMatchObject([ { name: 'Child Node 1', type: 'testType1', typeVersion: 1 }, { name: 'Child Node 2', type: 'testType2', typeVersion: 2 }, ]); @@ -98,7 +98,7 @@ describe('NodeExecutionContext', () => { const result = testContext.getParentNodes('Test Node'); - expect(result).toEqual([ + expect(result).toMatchObject([ { name: 'Parent Node 1', type: 'testType1', typeVersion: 1 }, { name: 'Parent Node 2', type: 'testType2', typeVersion: 2 }, ]); diff --git a/packages/core/src/node-execution-context/__tests__/shared-tests.ts b/packages/core/src/node-execution-context/__tests__/shared-tests.ts index c7262554d0..9992507bdd 100644 --- a/packages/core/src/node-execution-context/__tests__/shared-tests.ts +++ b/packages/core/src/node-execution-context/__tests__/shared-tests.ts @@ -1,4 +1,4 @@ -import { captor, mock } from 'jest-mock-extended'; +import { captor, mock, type MockProxy } from 'jest-mock-extended'; import type { IRunExecutionData, ContextType, @@ -9,11 +9,21 @@ import type { ITaskMetadata, ISourceData, IExecuteData, + IWorkflowExecuteAdditionalData, + ExecuteWorkflowData, + RelatedExecution, + IExecuteWorkflowInfo, } from 'n8n-workflow'; -import { ApplicationError, NodeHelpers } from 'n8n-workflow'; +import { ApplicationError, NodeHelpers, WAIT_INDEFINITELY } from 'n8n-workflow'; +import Container from 'typedi'; + +import { BinaryDataService } from '@/BinaryData/BinaryData.service'; import type { BaseExecuteContext } from '../base-execute-context'; +const binaryDataService = mock(); +Container.set(BinaryDataService, binaryDataService); + export const describeCommonTests = ( context: BaseExecuteContext, { @@ -31,7 +41,7 @@ export const describeCommonTests = ( }, ) => { // @ts-expect-error `additionalData` is private - const { additionalData } = context; + const additionalData = context.additionalData as MockProxy; describe('getExecutionCancelSignal', () => { it('should return the abort signal', () => { @@ -178,4 +188,55 @@ export const describeCommonTests = ( resolveSimpleParameterValueSpy.mockRestore(); }); }); + + describe('putExecutionToWait', () => { + it('should set waitTill and execution status', async () => { + const waitTill = new Date(); + + await context.putExecutionToWait(waitTill); + + expect(runExecutionData.waitTill).toEqual(waitTill); + expect(additionalData.setExecutionStatus).toHaveBeenCalledWith('waiting'); + }); + }); + + describe('executeWorkflow', () => { + const data = [[{ json: { test: true } }]]; + const executeWorkflowData = mock(); + const workflowInfo = mock(); + const parentExecution: RelatedExecution = { + executionId: 'parent_execution_id', + workflowId: 'parent_workflow_id', + }; + + it('should execute workflow and return data', async () => { + additionalData.executeWorkflow.mockResolvedValue(executeWorkflowData); + binaryDataService.duplicateBinaryData.mockResolvedValue(data); + + const result = await context.executeWorkflow(workflowInfo, undefined, undefined, { + parentExecution, + }); + + expect(result.data).toEqual(data); + expect(binaryDataService.duplicateBinaryData).toHaveBeenCalledWith( + workflow.id, + additionalData.executionId, + executeWorkflowData.data, + ); + }); + + it('should put execution to wait if waitTill is returned', async () => { + const waitTill = new Date(); + additionalData.executeWorkflow.mockResolvedValue({ ...executeWorkflowData, waitTill }); + binaryDataService.duplicateBinaryData.mockResolvedValue(data); + + const result = await context.executeWorkflow(workflowInfo, undefined, undefined, { + parentExecution, + }); + + expect(additionalData.setExecutionStatus).toHaveBeenCalledWith('waiting'); + expect(runExecutionData.waitTill).toEqual(WAIT_INDEFINITELY); + expect(result.waitTill).toBe(waitTill); + }); + }); }; diff --git a/packages/core/src/node-execution-context/__tests__/supply-data-context.test.ts b/packages/core/src/node-execution-context/__tests__/supply-data-context.test.ts index 6c5a3849dd..99ee41c6fd 100644 --- a/packages/core/src/node-execution-context/__tests__/supply-data-context.test.ts +++ b/packages/core/src/node-execution-context/__tests__/supply-data-context.test.ts @@ -14,7 +14,7 @@ import type { INodeTypes, ICredentialDataDecryptedObject, } from 'n8n-workflow'; -import { ApplicationError } from 'n8n-workflow'; +import { ApplicationError, NodeConnectionType } from 'n8n-workflow'; import { describeCommonTests } from './shared-tests'; import { SupplyDataContext } from '../supply-data-context'; @@ -56,7 +56,8 @@ describe('SupplyDataContext', () => { const mode: WorkflowExecuteMode = 'manual'; const runExecutionData = mock(); const connectionInputData: INodeExecutionData[] = []; - const inputData: ITaskDataConnections = { main: [[{ json: { test: 'data' } }]] }; + const connectionType = NodeConnectionType.Main; + const inputData: ITaskDataConnections = { [connectionType]: [[{ json: { test: 'data' } }]] }; const executeData = mock(); const runIndex = 0; const closeFn = jest.fn(); @@ -71,6 +72,7 @@ describe('SupplyDataContext', () => { runIndex, connectionInputData, inputData, + connectionType, executeData, [closeFn], abortSignal, @@ -91,33 +93,38 @@ describe('SupplyDataContext', () => { describe('getInputData', () => { const inputIndex = 0; - const inputName = 'main'; afterEach(() => { - inputData[inputName] = [[{ json: { test: 'data' } }]]; + inputData[connectionType] = [[{ json: { test: 'data' } }]]; }); it('should return the input data correctly', () => { const expectedData = [{ json: { test: 'data' } }]; - expect(supplyDataContext.getInputData(inputIndex, inputName)).toEqual(expectedData); + expect(supplyDataContext.getInputData(inputIndex, connectionType)).toEqual(expectedData); }); it('should return an empty array if the input name does not exist', () => { - const inputName = 'nonExistent'; - expect(supplyDataContext.getInputData(inputIndex, inputName)).toEqual([]); + const connectionType = 'nonExistent'; + expect( + supplyDataContext.getInputData(inputIndex, connectionType as NodeConnectionType), + ).toEqual([]); }); it('should throw an error if the input index is out of range', () => { const inputIndex = 2; - expect(() => supplyDataContext.getInputData(inputIndex, inputName)).toThrow(ApplicationError); + expect(() => supplyDataContext.getInputData(inputIndex, connectionType)).toThrow( + ApplicationError, + ); }); it('should throw an error if the input index was not set', () => { inputData.main[inputIndex] = null; - expect(() => supplyDataContext.getInputData(inputIndex, inputName)).toThrow(ApplicationError); + expect(() => supplyDataContext.getInputData(inputIndex, connectionType)).toThrow( + ApplicationError, + ); }); }); diff --git a/packages/core/src/node-execution-context/base-execute-context.ts b/packages/core/src/node-execution-context/base-execute-context.ts index c13ba66dc2..8ecc658579 100644 --- a/packages/core/src/node-execution-context/base-execute-context.ts +++ b/packages/core/src/node-execution-context/base-execute-context.ts @@ -22,7 +22,13 @@ import type { ISourceData, AiEvent, } from 'n8n-workflow'; -import { ApplicationError, NodeHelpers, WorkflowDataProxy } from 'n8n-workflow'; +import { + ApplicationError, + NodeHelpers, + NodeConnectionType, + WAIT_INDEFINITELY, + WorkflowDataProxy, +} from 'n8n-workflow'; import { Container } from 'typedi'; import { BinaryDataService } from '@/BinaryData/BinaryData.service'; @@ -97,6 +103,13 @@ export class BaseExecuteContext extends NodeExecutionContext { ); } + async putExecutionToWait(waitTill: Date): Promise { + this.runExecutionData.waitTill = waitTill; + if (this.additionalData.setExecutionStatus) { + this.additionalData.setExecutionStatus('waiting'); + } + } + async executeWorkflow( workflowInfo: IExecuteWorkflowInfo, inputData?: INodeExecutionData[], @@ -106,23 +119,46 @@ export class BaseExecuteContext extends NodeExecutionContext { parentExecution?: RelatedExecution; }, ): Promise { - return await this.additionalData - .executeWorkflow(workflowInfo, this.additionalData, { - ...options, - parentWorkflowId: this.workflow.id?.toString(), - inputData, - parentWorkflowSettings: this.workflow.settings, - node: this.node, - parentCallbackManager, - }) - .then(async (result) => { - const data = await this.binaryDataService.duplicateBinaryData( - this.workflow.id, - this.additionalData.executionId!, - result.data, - ); - return { ...result, data }; + const result = await this.additionalData.executeWorkflow(workflowInfo, this.additionalData, { + ...options, + parentWorkflowId: this.workflow.id, + inputData, + parentWorkflowSettings: this.workflow.settings, + node: this.node, + parentCallbackManager, + }); + + // If a sub-workflow execution goes into the waiting state + if (result.waitTill) { + // then put the parent workflow execution also into the waiting state, + // but do not use the sub-workflow `waitTill` to avoid WaitTracker resuming the parent execution at the same time as the sub-workflow + await this.putExecutionToWait(WAIT_INDEFINITELY); + } + + const data = await this.binaryDataService.duplicateBinaryData( + this.workflow.id, + this.additionalData.executionId!, + result.data, + ); + return { ...result, data }; + } + + protected getInputItems(inputIndex: number, connectionType: NodeConnectionType) { + const inputData = this.inputData[connectionType]; + if (inputData.length < inputIndex) { + throw new ApplicationError('Could not get input with given index', { + extra: { inputIndex, connectionType }, }); + } + + const allItems = inputData[inputIndex] as INodeExecutionData[] | null | undefined; + if (allItems === null) { + throw new ApplicationError('Input index was not set', { + extra: { inputIndex, connectionType }, + }); + } + + return allItems; } getNodeInputs(): INodeInputConfiguration[] { @@ -145,12 +181,12 @@ export class BaseExecuteContext extends NodeExecutionContext { ); } - getInputSourceData(inputIndex = 0, inputName = 'main'): ISourceData { + getInputSourceData(inputIndex = 0, connectionType = NodeConnectionType.Main): ISourceData { if (this.executeData?.source === null) { // Should never happen as n8n sets it automatically throw new ApplicationError('Source data is missing'); } - return this.executeData.source[inputName][inputIndex]!; + return this.executeData.source[connectionType][inputIndex]!; } getWorkflowDataProxy(itemIndex: number): IWorkflowDataProxyData { diff --git a/packages/core/src/node-execution-context/execute-context.ts b/packages/core/src/node-execution-context/execute-context.ts index 514c9cf27f..954059d86d 100644 --- a/packages/core/src/node-execution-context/execute-context.ts +++ b/packages/core/src/node-execution-context/execute-context.ts @@ -1,7 +1,7 @@ import type { + AINodeConnectionType, CallbackManager, CloseFunction, - ExecutionBaseError, IExecuteData, IExecuteFunctions, IExecuteResponsePromiseData, @@ -10,15 +10,18 @@ import type { INodeExecutionData, IRunExecutionData, ITaskDataConnections, - ITaskMetadata, IWorkflowExecuteAdditionalData, - NodeConnectionType, + Result, Workflow, WorkflowExecuteMode, } from 'n8n-workflow'; -import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; +import { + ApplicationError, + createDeferredPromise, + createEnvProviderState, + NodeConnectionType, +} from 'n8n-workflow'; -import { createAgentStartJob } from '@/Agent'; // eslint-disable-next-line import/no-cycle import { returnJsonArray, @@ -26,7 +29,6 @@ import { normalizeItems, constructExecutionMetaData, getInputConnectionData, - addExecutionDataFunctions, assertBinaryData, getBinaryDataBuffer, copyBinaryFile, @@ -46,8 +48,6 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti readonly getNodeParameter: IExecuteFunctions['getNodeParameter']; - readonly startJob: IExecuteFunctions['startJob']; - constructor( workflow: Workflow, node: INode, @@ -122,23 +122,37 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti fallbackValue, options, )) as IExecuteFunctions['getNodeParameter']; + } - this.startJob = createAgentStartJob( + async startJob( + jobType: string, + settings: unknown, + itemIndex: number, + ): Promise> { + return await this.additionalData.startAgentJob( this.additionalData, + jobType, + settings, + this, this.inputData, this.node, this.workflow, this.runExecutionData, this.runIndex, + itemIndex, this.node.name, this.connectionInputData, {}, this.mode, + createEnvProviderState(), this.executeData, ); } - async getInputConnectionData(inputName: NodeConnectionType, itemIndex: number): Promise { + async getInputConnectionData( + connectionType: AINodeConnectionType, + itemIndex: number, + ): Promise { return await getInputConnectionData.call( this, this.workflow, @@ -150,40 +164,18 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti this.executeData, this.mode, this.closeFunctions, - inputName, + connectionType, itemIndex, this.abortSignal, ); } - getInputData(inputIndex = 0, inputName = 'main') { - if (!this.inputData.hasOwnProperty(inputName)) { + getInputData(inputIndex = 0, connectionType = NodeConnectionType.Main) { + if (!this.inputData.hasOwnProperty(connectionType)) { // Return empty array because else it would throw error when nothing is connected to input return []; } - - const inputData = this.inputData[inputName]; - // TODO: Check if nodeType has input with that index defined - if (inputData.length < inputIndex) { - throw new ApplicationError('Could not get input with given index', { - extra: { inputIndex, inputName }, - }); - } - - if (inputData[inputIndex] === null) { - throw new ApplicationError('Value of input was not set', { - extra: { inputIndex, inputName }, - }); - } - - return inputData[inputIndex]; - } - - async putExecutionToWait(waitTill: Date): Promise { - this.runExecutionData.waitTill = waitTill; - if (this.additionalData.setExecutionStatus) { - this.additionalData.setExecutionStatus('waiting'); - } + return super.getInputItems(inputIndex, connectionType) ?? []; } logNodeOutput(...args: unknown[]): void { @@ -201,60 +193,14 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti await this.additionalData.hooks?.executeHookFunctions('sendResponse', [response]); } - addInputData( - connectionType: NodeConnectionType, - data: INodeExecutionData[][] | ExecutionBaseError, - ): { index: number } { - const nodeName = this.node.name; - let currentNodeRunIndex = 0; - if (this.runExecutionData.resultData.runData.hasOwnProperty(nodeName)) { - currentNodeRunIndex = this.runExecutionData.resultData.runData[nodeName].length; - } - - void addExecutionDataFunctions( - 'input', - nodeName, - data, - this.runExecutionData, - connectionType, - this.additionalData, - nodeName, - this.runIndex, - currentNodeRunIndex, - ).catch((error) => { - this.logger.warn( - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - `There was a problem logging input data of node "${nodeName}": ${error.message}`, - ); - }); - - return { index: currentNodeRunIndex }; + /** @deprecated use ISupplyDataFunctions.addInputData */ + addInputData(): { index: number } { + throw new ApplicationError('addInputData should not be called on IExecuteFunctions'); } - addOutputData( - connectionType: NodeConnectionType, - currentNodeRunIndex: number, - data: INodeExecutionData[][] | ExecutionBaseError, - metadata?: ITaskMetadata, - ): void { - const nodeName = this.node.name; - addExecutionDataFunctions( - 'output', - nodeName, - data, - this.runExecutionData, - connectionType, - this.additionalData, - nodeName, - this.runIndex, - currentNodeRunIndex, - metadata, - ).catch((error) => { - this.logger.warn( - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - `There was a problem logging output data of node "${nodeName}": ${error.message}`, - ); - }); + /** @deprecated use ISupplyDataFunctions.addOutputData */ + addOutputData(): void { + throw new ApplicationError('addOutputData should not be called on IExecuteFunctions'); } getParentCallbackManager(): CallbackManager | undefined { diff --git a/packages/core/src/node-execution-context/execute-single-context.ts b/packages/core/src/node-execution-context/execute-single-context.ts index 91c7fcf683..cb46ea9c91 100644 --- a/packages/core/src/node-execution-context/execute-single-context.ts +++ b/packages/core/src/node-execution-context/execute-single-context.ts @@ -11,7 +11,7 @@ import type { ITaskDataConnections, IExecuteData, } from 'n8n-workflow'; -import { ApplicationError, createDeferredPromise } from 'n8n-workflow'; +import { ApplicationError, createDeferredPromise, NodeConnectionType } from 'n8n-workflow'; // eslint-disable-next-line import/no-cycle import { @@ -76,31 +76,18 @@ export class ExecuteSingleContext extends BaseExecuteContext implements IExecute return super.evaluateExpression(expression, itemIndex); } - getInputData(inputIndex = 0, inputName = 'main') { - if (!this.inputData.hasOwnProperty(inputName)) { + getInputData(inputIndex = 0, connectionType = NodeConnectionType.Main) { + if (!this.inputData.hasOwnProperty(connectionType)) { // Return empty array because else it would throw error when nothing is connected to input return { json: {} }; } - // TODO: Check if nodeType has input with that index defined - if (this.inputData[inputName].length < inputIndex) { - throw new ApplicationError('Could not get input index', { - extra: { inputIndex, inputName }, - }); - } + const allItems = super.getInputItems(inputIndex, connectionType); - const allItems = this.inputData[inputName][inputIndex]; - - if (allItems === null || allItems === undefined) { - throw new ApplicationError('Input index was not set', { - extra: { inputIndex, inputName }, - }); - } - - const data = allItems[this.itemIndex]; - if (data === null || data === undefined) { + const data = allItems?.[this.itemIndex]; + if (data === undefined) { throw new ApplicationError('Value of input with given index was not set', { - extra: { inputIndex, inputName, itemIndex: this.itemIndex }, + extra: { inputIndex, connectionType, itemIndex: this.itemIndex }, }); } diff --git a/packages/core/src/node-execution-context/node-execution-context.ts b/packages/core/src/node-execution-context/node-execution-context.ts index c4bb0739fb..158b06d02e 100644 --- a/packages/core/src/node-execution-context/node-execution-context.ts +++ b/packages/core/src/node-execution-context/node-execution-context.ts @@ -86,6 +86,7 @@ export abstract class NodeExecutionContext implements Omit { + async getInputConnectionData( + connectionType: AINodeConnectionType, + itemIndex: number, + ): Promise { return await getInputConnectionData.call( this, this.workflow, @@ -116,36 +123,23 @@ export class SupplyDataContext extends BaseExecuteContext implements ISupplyData this.executeData, this.mode, this.closeFunctions, - inputName, + connectionType, itemIndex, this.abortSignal, ); } - getInputData(inputIndex = 0, inputName = 'main') { - if (!this.inputData.hasOwnProperty(inputName)) { + getInputData(inputIndex = 0, connectionType = this.connectionType) { + if (!this.inputData.hasOwnProperty(connectionType)) { // Return empty array because else it would throw error when nothing is connected to input return []; } - - // TODO: Check if nodeType has input with that index defined - if (this.inputData[inputName].length < inputIndex) { - throw new ApplicationError('Could not get input with given index', { - extra: { inputIndex, inputName }, - }); - } - - if (this.inputData[inputName][inputIndex] === null) { - throw new ApplicationError('Value of input was not set', { - extra: { inputIndex, inputName }, - }); - } - - return this.inputData[inputName][inputIndex]; + return super.getInputItems(inputIndex, connectionType) ?? []; } + /** @deprecated create a context object with inputData for every runIndex */ addInputData( - connectionType: NodeConnectionType, + connectionType: AINodeConnectionType, data: INodeExecutionData[][], ): { index: number } { const nodeName = this.node.name; @@ -154,15 +148,11 @@ export class SupplyDataContext extends BaseExecuteContext implements ISupplyData currentNodeRunIndex = this.runExecutionData.resultData.runData[nodeName].length; } - addExecutionDataFunctions( + this.addExecutionDataFunctions( 'input', - nodeName, data, - this.runExecutionData, connectionType, - this.additionalData, nodeName, - this.runIndex, currentNodeRunIndex, ).catch((error) => { this.logger.warn( @@ -176,22 +166,19 @@ export class SupplyDataContext extends BaseExecuteContext implements ISupplyData return { index: currentNodeRunIndex }; } + /** @deprecated Switch to WorkflowExecute to store output on runExecutionData.resultData.runData */ addOutputData( - connectionType: NodeConnectionType, + connectionType: AINodeConnectionType, currentNodeRunIndex: number, - data: INodeExecutionData[][], + data: INodeExecutionData[][] | ExecutionBaseError, metadata?: ITaskMetadata, ): void { const nodeName = this.node.name; - addExecutionDataFunctions( + this.addExecutionDataFunctions( 'output', - nodeName, data, - this.runExecutionData, connectionType, - this.additionalData, nodeName, - this.runIndex, currentNodeRunIndex, metadata, ).catch((error) => { @@ -203,4 +190,102 @@ export class SupplyDataContext extends BaseExecuteContext implements ISupplyData ); }); } + + async addExecutionDataFunctions( + type: 'input' | 'output', + data: INodeExecutionData[][] | ExecutionBaseError, + connectionType: AINodeConnectionType, + sourceNodeName: string, + currentNodeRunIndex: number, + metadata?: ITaskMetadata, + ): Promise { + const { + additionalData, + runExecutionData, + runIndex: sourceNodeRunIndex, + node: { name: nodeName }, + } = this; + + let taskData: ITaskData | undefined; + if (type === 'input') { + taskData = { + startTime: new Date().getTime(), + executionTime: 0, + executionStatus: 'running', + source: [null], + }; + } else { + // At the moment we expect that there is always an input sent before the output + taskData = get( + runExecutionData, + ['resultData', 'runData', nodeName, currentNodeRunIndex], + undefined, + ); + if (taskData === undefined) { + return; + } + taskData.metadata = metadata; + } + taskData = taskData!; + + if (data instanceof Error) { + taskData.executionStatus = 'error'; + taskData.error = data; + } else { + if (type === 'output') { + taskData.executionStatus = 'success'; + } + taskData.data = { + [connectionType]: data, + } as ITaskDataConnections; + } + + if (type === 'input') { + if (!(data instanceof Error)) { + this.inputData[connectionType] = data; + // TODO: remove inputOverride + taskData.inputOverride = { + [connectionType]: data, + } as ITaskDataConnections; + } + + if (!runExecutionData.resultData.runData.hasOwnProperty(nodeName)) { + runExecutionData.resultData.runData[nodeName] = []; + } + + runExecutionData.resultData.runData[nodeName][currentNodeRunIndex] = taskData; + await additionalData.hooks?.executeHookFunctions('nodeExecuteBefore', [nodeName]); + } else { + // Outputs + taskData.executionTime = new Date().getTime() - taskData.startTime; + + await additionalData.hooks?.executeHookFunctions('nodeExecuteAfter', [ + nodeName, + taskData, + this.runExecutionData, + ]); + + if (get(runExecutionData, 'executionData.metadata', undefined) === undefined) { + runExecutionData.executionData!.metadata = {}; + } + + let sourceTaskData = runExecutionData.executionData?.metadata?.[sourceNodeName]; + + if (!sourceTaskData) { + runExecutionData.executionData!.metadata[sourceNodeName] = []; + sourceTaskData = runExecutionData.executionData!.metadata[sourceNodeName]; + } + + if (!sourceTaskData[sourceNodeRunIndex]) { + sourceTaskData[sourceNodeRunIndex] = { + subRun: [], + }; + } + + sourceTaskData[sourceNodeRunIndex].subRun!.push({ + node: nodeName, + runIndex: currentNodeRunIndex, + }); + } + } } diff --git a/packages/core/src/node-execution-context/webhook-context.ts b/packages/core/src/node-execution-context/webhook-context.ts index e1dae9c1de..04d1df5e40 100644 --- a/packages/core/src/node-execution-context/webhook-context.ts +++ b/packages/core/src/node-execution-context/webhook-context.ts @@ -1,5 +1,6 @@ import type { Request, Response } from 'express'; import type { + AINodeConnectionType, CloseFunction, ICredentialDataDecryptedObject, IDataObject, @@ -11,7 +12,6 @@ import type { IWebhookData, IWebhookFunctions, IWorkflowExecuteAdditionalData, - NodeConnectionType, WebhookType, Workflow, WorkflowExecuteMode, @@ -138,7 +138,10 @@ export class WebhookContext extends NodeExecutionContext implements IWebhookFunc return this.webhookData.webhookDescription.name; } - async getInputConnectionData(inputName: NodeConnectionType, itemIndex: number): Promise { + async getInputConnectionData( + connectionType: AINodeConnectionType, + itemIndex: number, + ): Promise { // To be able to use expressions like "$json.sessionId" set the // body data the webhook received to what is normally used for // incoming node data. @@ -170,7 +173,7 @@ export class WebhookContext extends NodeExecutionContext implements IWebhookFunc executeData, this.mode, this.closeFunctions, - inputName, + connectionType, itemIndex, ); } diff --git a/packages/core/test/ClassLoader.test.ts b/packages/core/test/ClassLoader.test.ts new file mode 100644 index 0000000000..9527572662 --- /dev/null +++ b/packages/core/test/ClassLoader.test.ts @@ -0,0 +1,52 @@ +import vm from 'vm'; + +import { loadClassInIsolation } from '@/ClassLoader'; + +describe('ClassLoader', () => { + const filePath = '/path/to/TestClass.js'; + const className = 'TestClass'; + + class TestClass { + getValue(): string { + return 'test value'; + } + } + + jest.spyOn(vm, 'createContext').mockReturnValue({}); + + const runInContext = jest.fn().mockImplementation(() => new TestClass()); + const scriptSpy = jest.spyOn(vm, 'Script').mockImplementation(function (this: vm.Script) { + this.runInContext = runInContext; + return this; + }); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should create script with correct require statement', () => { + const instance = loadClassInIsolation(filePath, className); + + expect(scriptSpy).toHaveBeenCalledWith(`new (require('${filePath}').${className})()`); + expect(instance.getValue()).toBe('test value'); + }); + + it('should handle Windows-style paths', () => { + const originalPlatform = process.platform; + Object.defineProperty(process, 'platform', { value: 'win32' }); + + loadClassInIsolation('/path\\to\\TestClass.js', 'TestClass'); + + expect(scriptSpy).toHaveBeenCalledWith(`new (require('${filePath}').${className})()`); + + Object.defineProperty(process, 'platform', { value: originalPlatform }); + }); + + it('should throw error when script execution fails', () => { + runInContext.mockImplementationOnce(() => { + throw new Error('Script execution failed'); + }); + + expect(() => loadClassInIsolation(filePath, className)).toThrow('Script execution failed'); + }); +}); diff --git a/packages/core/test/CreateNodeAsTool.test.ts b/packages/core/test/CreateNodeAsTool.test.ts index 5c485b9837..fdc14269e1 100644 --- a/packages/core/test/CreateNodeAsTool.test.ts +++ b/packages/core/test/CreateNodeAsTool.test.ts @@ -1,5 +1,5 @@ -import type { IExecuteFunctions, INodeParameters, INodeType } from 'n8n-workflow'; -import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import { mock } from 'jest-mock-extended'; +import type { INodeType, ISupplyDataFunctions, INode } from 'n8n-workflow'; import { z } from 'zod'; import { createNodeAsTool } from '@/CreateNodeAsTool'; @@ -14,28 +14,29 @@ jest.mock('@langchain/core/tools', () => ({ })); describe('createNodeAsTool', () => { - let mockCtx: IExecuteFunctions; - let mockNode: INodeType; - let mockNodeParameters: INodeParameters; + const context = mock({ + getNodeParameter: jest.fn(), + addInputData: jest.fn(), + addOutputData: jest.fn(), + getNode: jest.fn(), + }); + const handleToolInvocation = jest.fn(); + const nodeType = mock({ + description: { + name: 'TestNode', + description: 'Test node description', + }, + }); + const node = mock({ name: 'Test_Node' }); + const options = { node, nodeType, handleToolInvocation }; beforeEach(() => { - // Setup mock objects - mockCtx = { - getNodeParameter: jest.fn(), - addInputData: jest.fn().mockReturnValue({ index: 0 }), - addOutputData: jest.fn(), - getNode: jest.fn().mockReturnValue({ name: 'Test_Node' }), - } as unknown as IExecuteFunctions; + jest.clearAllMocks(); + (context.addInputData as jest.Mock).mockReturnValue({ index: 0 }); + (context.getNode as jest.Mock).mockReturnValue(node); + (nodeType.execute as jest.Mock).mockResolvedValue([[{ json: { result: 'test' } }]]); - mockNode = { - description: { - name: 'TestNode', - description: 'Test node description', - }, - execute: jest.fn().mockResolvedValue([[{ json: { result: 'test' } }]]), - } as unknown as INodeType; - - mockNodeParameters = { + node.parameters = { param1: "={{$fromAI('param1', 'Test parameter', 'string') }}", param2: 'static value', nestedParam: { @@ -45,13 +46,11 @@ describe('createNodeAsTool', () => { resource: 'testResource', operation: 'testOperation', }; - - jest.clearAllMocks(); }); describe('Tool Creation and Basic Properties', () => { it('should create a DynamicStructuredTool with correct properties', () => { - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool).toBeDefined(); expect(tool.name).toBe('Test_Node'); @@ -62,10 +61,10 @@ describe('createNodeAsTool', () => { }); it('should use toolDescription if provided', () => { - mockNodeParameters.descriptionType = 'manual'; - mockNodeParameters.toolDescription = 'Custom tool description'; + node.parameters.descriptionType = 'manual'; + node.parameters.toolDescription = 'Custom tool description'; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.description).toBe('Custom tool description'); }); @@ -73,7 +72,7 @@ describe('createNodeAsTool', () => { describe('Schema Creation and Parameter Handling', () => { it('should create a schema based on fromAI arguments in nodeParameters', () => { - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema).toBeDefined(); expect(tool.schema.shape).toHaveProperty('param1'); @@ -82,14 +81,14 @@ describe('createNodeAsTool', () => { }); it('should handle fromAI arguments correctly', () => { - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.subparam).toBeInstanceOf(z.ZodString); }); it('should handle default values correctly', () => { - mockNodeParameters = { + node.parameters = { paramWithDefault: "={{ $fromAI('paramWithDefault', 'Parameter with default', 'string', 'default value') }}", numberWithDefault: @@ -98,7 +97,7 @@ describe('createNodeAsTool', () => { "={{ $fromAI('booleanWithDefault', 'Boolean with default', 'boolean', true) }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.paramWithDefault.description).toBe('Parameter with default'); expect(tool.schema.shape.numberWithDefault.description).toBe('Number with default'); @@ -106,7 +105,7 @@ describe('createNodeAsTool', () => { }); it('should handle nested parameters correctly', () => { - mockNodeParameters = { + node.parameters = { topLevel: "={{ $fromAI('topLevel', 'Top level parameter', 'string') }}", nested: { level1: "={{ $fromAI('level1', 'Nested level 1', 'string') }}", @@ -116,7 +115,7 @@ describe('createNodeAsTool', () => { }, }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.topLevel).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.level1).toBeInstanceOf(z.ZodString); @@ -124,14 +123,14 @@ describe('createNodeAsTool', () => { }); it('should handle array parameters correctly', () => { - mockNodeParameters = { + node.parameters = { arrayParam: [ "={{ $fromAI('item1', 'First item', 'string') }}", "={{ $fromAI('item2', 'Second item', 'number') }}", ], }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.item1).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.item2).toBeInstanceOf(z.ZodNumber); @@ -140,45 +139,37 @@ describe('createNodeAsTool', () => { describe('Error Handling and Edge Cases', () => { it('should handle error during node execution', async () => { - mockNode.execute = jest.fn().mockRejectedValue(new Error('Execution failed')); - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + nodeType.execute = jest.fn().mockRejectedValue(new Error('Execution failed')); + const tool = createNodeAsTool(options).response; + handleToolInvocation.mockReturnValue('Error during node execution: some random issue.'); const result = await tool.func({ param1: 'test value' }); expect(result).toContain('Error during node execution:'); - expect(mockCtx.addOutputData).toHaveBeenCalledWith( - NodeConnectionType.AiTool, - 0, - expect.any(NodeOperationError), - ); }); it('should throw an error for invalid parameter names', () => { - mockNodeParameters.invalidParam = "$fromAI('invalid param', 'Invalid parameter', 'string')"; + node.parameters.invalidParam = "$fromAI('invalid param', 'Invalid parameter', 'string')"; - expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( - 'Parameter key `invalid param` is invalid', - ); + expect(() => createNodeAsTool(options)).toThrow('Parameter key `invalid param` is invalid'); }); it('should throw an error for $fromAI calls with unsupported types', () => { - mockNodeParameters = { + node.parameters = { invalidTypeParam: "={{ $fromAI('invalidType', 'Param with unsupported type', 'unsupportedType') }}", }; - expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( - 'Invalid type: unsupportedType', - ); + expect(() => createNodeAsTool(options)).toThrow('Invalid type: unsupportedType'); }); it('should handle empty parameters and parameters with no fromAI calls', () => { - mockNodeParameters = { + node.parameters = { param1: 'static value 1', param2: 'static value 2', }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape).toEqual({}); }); @@ -186,13 +177,13 @@ describe('createNodeAsTool', () => { describe('Parameter Name and Description Handling', () => { it('should accept parameter names with underscores and hyphens', () => { - mockNodeParameters = { + node.parameters = { validName1: "={{ $fromAI('param_name-1', 'Valid name with underscore and hyphen', 'string') }}", validName2: "={{ $fromAI('param_name_2', 'Another valid name', 'number') }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape['param_name-1']).toBeInstanceOf(z.ZodString); expect(tool.schema.shape['param_name-1'].description).toBe( @@ -204,22 +195,20 @@ describe('createNodeAsTool', () => { }); it('should throw an error for parameter names with invalid special characters', () => { - mockNodeParameters = { + node.parameters = { invalidNameParam: "={{ $fromAI('param@name!', 'Invalid name with special characters', 'string') }}", }; - expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( - 'Parameter key `param@name!` is invalid', - ); + expect(() => createNodeAsTool(options)).toThrow('Parameter key `param@name!` is invalid'); }); it('should throw an error for empty parameter name', () => { - mockNodeParameters = { + node.parameters = { invalidNameParam: "={{ $fromAI('', 'Invalid name with special characters', 'string') }}", }; - expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + expect(() => createNodeAsTool(options)).toThrow( 'You must specify a key when using $fromAI()', ); }); @@ -227,50 +216,51 @@ describe('createNodeAsTool', () => { it('should handle parameter names with exact and exceeding character limits', () => { const longName = 'a'.repeat(64); const tooLongName = 'a'.repeat(65); - mockNodeParameters = { + node.parameters = { longNameParam: `={{ $fromAI('${longName}', 'Param with 64 character name', 'string') }}`, }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape[longName]).toBeInstanceOf(z.ZodString); expect(tool.schema.shape[longName].description).toBe('Param with 64 character name'); - expect(() => - createNodeAsTool(mockCtx, mockNode, { - tooLongNameParam: `={{ $fromAI('${tooLongName}', 'Param with 65 character name', 'string') }}`, - }), - ).toThrow(`Parameter key \`${tooLongName}\` is invalid`); + node.parameters = { + tooLongNameParam: `={{ $fromAI('${tooLongName}', 'Param with 65 character name', 'string') }}`, + }; + expect(() => createNodeAsTool(options)).toThrow( + `Parameter key \`${tooLongName}\` is invalid`, + ); }); it('should handle $fromAI calls with empty description', () => { - mockNodeParameters = { + node.parameters = { emptyDescriptionParam: "={{ $fromAI('emptyDescription', '', 'number') }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.emptyDescription).toBeInstanceOf(z.ZodNumber); expect(tool.schema.shape.emptyDescription.description).toBeUndefined(); }); it('should throw an error for calls with the same parameter but different descriptions', () => { - mockNodeParameters = { + node.parameters = { duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}", duplicateParam2: "={{ $fromAI('duplicate', 'Second duplicate', 'number') }}", }; - expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + expect(() => createNodeAsTool(options)).toThrow( "Duplicate key 'duplicate' found with different description or type", ); }); it('should throw an error for calls with the same parameter but different types', () => { - mockNodeParameters = { + node.parameters = { duplicateParam1: "={{ $fromAI('duplicate', 'First duplicate', 'string') }}", duplicateParam2: "={{ $fromAI('duplicate', 'First duplicate', 'number') }}", }; - expect(() => createNodeAsTool(mockCtx, mockNode, mockNodeParameters)).toThrow( + expect(() => createNodeAsTool(options)).toThrow( "Duplicate key 'duplicate' found with different description or type", ); }); @@ -278,7 +268,7 @@ describe('createNodeAsTool', () => { describe('Complex Parsing Scenarios', () => { it('should correctly parse $fromAI calls with varying spaces, capitalization, and within template literals', () => { - mockNodeParameters = { + node.parameters = { varyingSpacing1: "={{$fromAI('param1','Description1','string')}}", varyingSpacing2: "={{ $fromAI ( 'param2' , 'Description2' , 'number' ) }}", varyingSpacing3: "={{ $FROMai('param3', 'Description3', 'boolean') }}", @@ -288,7 +278,7 @@ describe('createNodeAsTool', () => { "={{ `Value is: ${$fromAI('templatedParam', 'Templated param description', 'string')}` }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.param1.description).toBe('Description1'); @@ -307,12 +297,12 @@ describe('createNodeAsTool', () => { }); it('should correctly parse multiple $fromAI calls interleaved with regular text', () => { - mockNodeParameters = { + node.parameters = { interleavedParams: "={{ 'Start ' + $fromAI('param1', 'First param', 'string') + ' Middle ' + $fromAI('param2', 'Second param', 'number') + ' End' }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.param1).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.param1.description).toBe('First param'); @@ -322,12 +312,12 @@ describe('createNodeAsTool', () => { }); it('should correctly parse $fromAI calls with complex JSON default values', () => { - mockNodeParameters = { + node.parameters = { complexJsonDefault: '={{ $fromAI(\'complexJson\', \'Param with complex JSON default\', \'json\', \'{"nested": {"key": "value"}, "array": [1, 2, 3]}\') }}', }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.complexJson._def.innerType).toBeInstanceOf(z.ZodRecord); expect(tool.schema.shape.complexJson.description).toBe('Param with complex JSON default'); @@ -338,7 +328,7 @@ describe('createNodeAsTool', () => { }); it('should ignore $fromAI calls embedded in non-string node parameters', () => { - mockNodeParameters = { + node.parameters = { numberParam: 42, booleanParam: false, objectParam: { @@ -355,7 +345,7 @@ describe('createNodeAsTool', () => { ], }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.innerParam).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.innerParam.description).toBe('Inner param'); @@ -373,48 +363,48 @@ describe('createNodeAsTool', () => { describe('Escaping and Special Characters', () => { it('should handle escaped single quotes in parameter names and descriptions', () => { - mockNodeParameters = { + node.parameters = { escapedQuotesParam: "={{ $fromAI('paramName', 'Description with \\'escaped\\' quotes', 'string') }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.paramName.description).toBe("Description with 'escaped' quotes"); }); it('should handle escaped double quotes in parameter names and descriptions', () => { - mockNodeParameters = { + node.parameters = { escapedQuotesParam: '={{ $fromAI("paramName", "Description with \\"escaped\\" quotes", "string") }}', }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.paramName.description).toBe('Description with "escaped" quotes'); }); it('should handle escaped backslashes in parameter names and descriptions', () => { - mockNodeParameters = { + node.parameters = { escapedBackslashesParam: "={{ $fromAI('paramName', 'Description with \\\\ backslashes', 'string') }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.paramName.description).toBe('Description with \\ backslashes'); }); it('should handle mixed escaped characters in parameter names and descriptions', () => { - mockNodeParameters = { + node.parameters = { mixedEscapesParam: '={{ $fromAI(`paramName`, \'Description with \\\'mixed" characters\', "number") }}', }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.paramName).toBeInstanceOf(z.ZodNumber); expect(tool.schema.shape.paramName.description).toBe('Description with \'mixed" characters'); @@ -423,12 +413,12 @@ describe('createNodeAsTool', () => { describe('Edge Cases and Limitations', () => { it('should ignore excess arguments in $fromAI calls beyond the fourth argument', () => { - mockNodeParameters = { + node.parameters = { excessArgsParam: "={{ $fromAI('excessArgs', 'Param with excess arguments', 'string', 'default', 'extraArg1', 'extraArg2') }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.excessArgs._def.innerType).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.excessArgs.description).toBe('Param with excess arguments'); @@ -436,12 +426,12 @@ describe('createNodeAsTool', () => { }); it('should correctly parse $fromAI calls with nested parentheses', () => { - mockNodeParameters = { + node.parameters = { nestedParenthesesParam: "={{ $fromAI('paramWithNested', 'Description with ((nested)) parentheses', 'string') }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.paramWithNested).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.paramWithNested.description).toBe( @@ -451,24 +441,24 @@ describe('createNodeAsTool', () => { it('should handle $fromAI calls with very long descriptions', () => { const longDescription = 'A'.repeat(1000); - mockNodeParameters = { + node.parameters = { longParam: `={{ $fromAI('longParam', '${longDescription}', 'string') }}`, }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.longParam).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.longParam.description).toBe(longDescription); }); it('should handle $fromAI calls with only some parameters', () => { - mockNodeParameters = { + node.parameters = { partialParam1: "={{ $fromAI('partial1') }}", partialParam2: "={{ $fromAI('partial2', 'Description only') }}", partialParam3: "={{ $fromAI('partial3', '', 'number') }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.partial1).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.partial2).toBeInstanceOf(z.ZodString); @@ -478,11 +468,11 @@ describe('createNodeAsTool', () => { describe('Unicode and Internationalization', () => { it('should handle $fromAI calls with unicode characters', () => { - mockNodeParameters = { + node.parameters = { unicodeParam: "={{ $fromAI('unicodeParam', '🌈 Unicode parameter 你好', 'string') }}", }; - const tool = createNodeAsTool(mockCtx, mockNode, mockNodeParameters).response; + const tool = createNodeAsTool(options).response; expect(tool.schema.shape.unicodeParam).toBeInstanceOf(z.ZodString); expect(tool.schema.shape.unicodeParam.description).toBe('🌈 Unicode parameter 你好'); diff --git a/packages/core/test/DirectoryLoader.test.ts b/packages/core/test/DirectoryLoader.test.ts new file mode 100644 index 0000000000..01a8c8d34a --- /dev/null +++ b/packages/core/test/DirectoryLoader.test.ts @@ -0,0 +1,781 @@ +import { mock } from 'jest-mock-extended'; +import type { + ICredentialType, + INodeType, + INodeTypeDescription, + IVersionedNodeType, +} from 'n8n-workflow'; +import { deepCopy } from 'n8n-workflow'; +import fs from 'node:fs'; +import fsPromises from 'node:fs/promises'; + +jest.mock('node:fs'); +jest.mock('node:fs/promises'); +const mockFs = mock(); +const mockFsPromises = mock(); +fs.readFileSync = mockFs.readFileSync; +fsPromises.readFile = mockFsPromises.readFile; + +jest.mock('fast-glob', () => async (pattern: string) => { + return pattern.endsWith('.node.js') + ? ['dist/Node1/Node1.node.js', 'dist/Node2/Node2.node.js'] + : ['dist/Credential1.js']; +}); + +import * as classLoader from '@/ClassLoader'; +import { + CustomDirectoryLoader, + PackageDirectoryLoader, + LazyPackageDirectoryLoader, +} from '@/DirectoryLoader'; + +describe('DirectoryLoader', () => { + const directory = '/not/a/real/path'; + const packageJson = JSON.stringify({ + name: 'n8n-nodes-testing', + n8n: { + credentials: ['dist/Credential1.js'], + nodes: ['dist/Node1/Node1.node.js', 'dist/Node2/Node2.node.js'], + }, + }); + + const createNode = (name: string, credential?: string) => + mock({ + description: { + name, + version: 1, + icon: `file:${name}.svg`, + iconUrl: undefined, + credentials: credential ? [{ name: credential }] : [], + properties: [], + }, + }); + + const createCredential = (name: string) => + mock({ + name, + icon: `file:${name}.svg`, + iconUrl: undefined, + extends: undefined, + properties: [], + }); + + let mockCredential1: ICredentialType, mockNode1: INodeType, mockNode2: INodeType; + + beforeEach(() => { + mockCredential1 = createCredential('credential1'); + mockNode1 = createNode('node1', 'credential1'); + mockNode2 = createNode('node2'); + jest.clearAllMocks(); + }); + + //@ts-expect-error overwrite a readonly property + classLoader.loadClassInIsolation = jest.fn((_: string, className: string) => { + if (className === 'Node1') return mockNode1; + if (className === 'Node2') return mockNode2; + if (className === 'Credential1') return mockCredential1; + throw new Error(`${className} is invalid`); + }); + + describe('CustomDirectoryLoader', () => { + it('should load custom nodes and credentials', async () => { + const loader = new CustomDirectoryLoader(directory); + expect(loader.packageName).toEqual('CUSTOM'); + + await loader.loadAll(); + + expect(loader.isLazyLoaded).toBe(false); + expect(mockFsPromises.readFile).not.toHaveBeenCalled(); + expect(classLoader.loadClassInIsolation).toHaveBeenCalledTimes(3); + + expect(loader.nodesByCredential).toEqual({ credential1: ['node1'] }); + expect(loader.credentialTypes).toEqual({ + credential1: { sourcePath: 'dist/Credential1.js', type: mockCredential1 }, + }); + expect(loader.nodeTypes).toEqual({ + node1: { sourcePath: 'dist/Node1/Node1.node.js', type: mockNode1 }, + node2: { sourcePath: 'dist/Node2/Node2.node.js', type: mockNode2 }, + }); + expect(mockCredential1.iconUrl).toBe('icons/CUSTOM/dist/credential1.svg'); + expect(mockNode1.description.iconUrl).toBe('icons/CUSTOM/dist/Node1/node1.svg'); + expect(mockNode2.description.iconUrl).toBe('icons/CUSTOM/dist/Node2/node2.svg'); + + expect(mockFs.readFileSync).not.toHaveBeenCalled(); + }); + }); + + describe('PackageDirectoryLoader', () => { + it('should load nodes and credentials from an installed package', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + + const loader = new PackageDirectoryLoader(directory); + expect(loader.packageName).toEqual('n8n-nodes-testing'); + + await loader.loadAll(); + + expect(loader.isLazyLoaded).toBe(false); + expect(mockFsPromises.readFile).not.toHaveBeenCalled(); + expect(classLoader.loadClassInIsolation).toHaveBeenCalledTimes(3); + + expect(loader.nodesByCredential).toEqual({ credential1: ['node1'] }); + expect(loader.credentialTypes).toEqual({ + credential1: { sourcePath: 'dist/Credential1.js', type: mockCredential1 }, + }); + expect(loader.nodeTypes).toEqual({ + node1: { sourcePath: 'dist/Node1/Node1.node.js', type: mockNode1 }, + node2: { sourcePath: 'dist/Node2/Node2.node.js', type: mockNode2 }, + }); + expect(mockCredential1.iconUrl).toBe('icons/n8n-nodes-testing/dist/credential1.svg'); + expect(mockNode1.description.iconUrl).toBe('icons/n8n-nodes-testing/dist/Node1/node1.svg'); + expect(mockNode2.description.iconUrl).toBe('icons/n8n-nodes-testing/dist/Node2/node2.svg'); + }); + + it('should throw error when package.json is missing', async () => { + mockFs.readFileSync.mockImplementationOnce(() => { + throw new Error('ENOENT'); + }); + + expect(() => new PackageDirectoryLoader(directory)).toThrow(); + }); + + it('should throw error when package.json is invalid', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue('invalid json'); + + expect(() => new PackageDirectoryLoader(directory)).toThrow('Failed to parse JSON'); + }); + + it('should do nothing if package.json has no n8n field', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue( + JSON.stringify({ + name: 'n8n-nodes-testing', + }), + ); + + const loader = new PackageDirectoryLoader(directory); + await loader.loadAll(); + + expect(loader.nodeTypes).toEqual({}); + expect(loader.credentialTypes).toEqual({}); + expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled(); + }); + + it('should hide httpRequestNode property when credential has supported nodes', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + mockCredential1.httpRequestNode = mock({ hidden: false }); + + const loader = new PackageDirectoryLoader(directory); + await loader.loadAll(); + + expect(mockCredential1.httpRequestNode?.hidden).toBe(true); + }); + + it('should not modify httpRequestNode when credential has no supported nodes', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + mockCredential1.httpRequestNode = mock({ hidden: false }); + mockNode1.description.credentials = []; + + const loader = new PackageDirectoryLoader(directory); + await loader.loadAll(); + + expect(mockCredential1.httpRequestNode?.hidden).toBe(false); + }); + + it('should inherit iconUrl from supported node when credential has no icon', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + mockCredential1.icon = undefined; + + const loader = new PackageDirectoryLoader(directory); + await loader.loadAll(); + + expect(mockCredential1.supportedNodes).toEqual(['node1']); + expect(mockCredential1.iconUrl).toBe(mockNode1.description.iconUrl); + }); + }); + + describe('LazyPackageDirectoryLoader', () => { + it('should skip loading nodes and credentials from a lazy-loadable package', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + mockFsPromises.readFile.mockResolvedValue('[]'); + + const loader = new LazyPackageDirectoryLoader(directory); + expect(loader.packageName).toEqual('n8n-nodes-testing'); + + await loader.loadAll(); + + expect(loader.isLazyLoaded).toBe(true); + expect(mockFsPromises.readFile).toHaveBeenCalledTimes(4); + expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled(); + }); + + it('should fall back to non-lazy loading if any json file fails to parse', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + mockFsPromises.readFile.mockRejectedValue(new Error('Failed to read file')); + + const loader = new LazyPackageDirectoryLoader(directory); + await loader.loadAll(); + + expect(loader.isLazyLoaded).toBe(false); + expect(mockFsPromises.readFile).toHaveBeenCalled(); + expect(classLoader.loadClassInIsolation).toHaveBeenCalledTimes(3); + }); + + it('should only load included nodes when includeNodes is set', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + + mockFsPromises.readFile.mockImplementation(async (path) => { + if (typeof path !== 'string') throw new Error('Invalid path'); + + if (path.endsWith('known/nodes.json')) { + return JSON.stringify({ + node1: { className: 'Node1', sourcePath: 'dist/Node1/Node1.node.js' }, + node2: { className: 'Node2', sourcePath: 'dist/Node2/Node2.node.js' }, + }); + } + if (path.endsWith('known/credentials.json')) { + return JSON.stringify({}); + } + if (path.endsWith('types/nodes.json')) { + return JSON.stringify([ + { name: 'n8n-nodes-testing.node1' }, + { name: 'n8n-nodes-testing.node2' }, + ]); + } + if (path.endsWith('types/credentials.json')) { + return JSON.stringify([]); + } + throw new Error('File not found'); + }); + + const loader = new LazyPackageDirectoryLoader(directory, [], ['n8n-nodes-testing.node1']); + await loader.loadAll(); + + expect(loader.isLazyLoaded).toBe(true); + expect(loader.known.nodes).toEqual({ + node1: { className: 'Node1', sourcePath: 'dist/Node1/Node1.node.js' }, + }); + expect(loader.types.nodes).toHaveLength(1); + expect(loader.types.nodes[0].name).toBe('n8n-nodes-testing.node1'); + expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled(); + }); + + it('should load no nodes when includeNodes does not match any nodes', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + + mockFsPromises.readFile.mockImplementation(async (path) => { + if (typeof path !== 'string') throw new Error('Invalid path'); + + if (path.endsWith('known/nodes.json')) { + return JSON.stringify({ + node1: { className: 'Node1', sourcePath: 'dist/Node1/Node1.node.js' }, + node2: { className: 'Node2', sourcePath: 'dist/Node2/Node2.node.js' }, + }); + } + if (path.endsWith('known/credentials.json')) { + return JSON.stringify({}); + } + if (path.endsWith('types/nodes.json')) { + return JSON.stringify([ + { name: 'n8n-nodes-testing.node1' }, + { name: 'n8n-nodes-testing.node2' }, + ]); + } + if (path.endsWith('types/credentials.json')) { + return JSON.stringify([]); + } + throw new Error('File not found'); + }); + + const loader = new LazyPackageDirectoryLoader( + directory, + [], + ['n8n-nodes-testing.nonexistent'], + ); + await loader.loadAll(); + + expect(loader.isLazyLoaded).toBe(true); + expect(loader.known.nodes).toEqual({}); + expect(loader.types.nodes).toHaveLength(0); + expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled(); + }); + + it('should exclude specified nodes when excludeNodes is set', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + + mockFsPromises.readFile.mockImplementation(async (path) => { + if (typeof path !== 'string') throw new Error('Invalid path'); + + if (path.endsWith('known/nodes.json')) { + return JSON.stringify({ + node1: { className: 'Node1', sourcePath: 'dist/Node1/Node1.node.js' }, + node2: { className: 'Node2', sourcePath: 'dist/Node2/Node2.node.js' }, + }); + } + if (path.endsWith('known/credentials.json')) { + return JSON.stringify({}); + } + if (path.endsWith('types/nodes.json')) { + return JSON.stringify([ + { name: 'n8n-nodes-testing.node1' }, + { name: 'n8n-nodes-testing.node2' }, + ]); + } + if (path.endsWith('types/credentials.json')) { + return JSON.stringify([]); + } + throw new Error('File not found'); + }); + + const loader = new LazyPackageDirectoryLoader(directory, ['n8n-nodes-testing.node1']); + await loader.loadAll(); + + expect(loader.isLazyLoaded).toBe(true); + expect(loader.known.nodes).toEqual({ + node2: { className: 'Node2', sourcePath: 'dist/Node2/Node2.node.js' }, + }); + expect(loader.types.nodes).toHaveLength(1); + expect(loader.types.nodes[0].name).toBe('n8n-nodes-testing.node2'); + expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled(); + }); + }); + + describe('reset()', () => { + it('should reset all properties to their initial state', async () => { + mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson); + + const loader = new PackageDirectoryLoader(directory); + await loader.loadAll(); + + // Verify loader has loaded data + expect(loader.nodeTypes).not.toEqual({}); + expect(loader.credentialTypes).not.toEqual({}); + expect(loader.types.nodes.length).toBeGreaterThan(0); + expect(loader.types.credentials.length).toBeGreaterThan(0); + expect(loader.loadedNodes.length).toBeGreaterThan(0); + expect(Object.keys(loader.known.nodes).length).toBeGreaterThan(0); + expect(Object.keys(loader.known.credentials).length).toBeGreaterThan(0); + + // Reset the loader + loader.reset(); + + // Verify all properties are reset + expect(loader.nodeTypes).toEqual({}); + expect(loader.credentialTypes).toEqual({}); + expect(loader.types.nodes).toEqual([]); + expect(loader.types.credentials).toEqual([]); + expect(loader.loadedNodes).toEqual([]); + expect(loader.known.nodes).toEqual({}); + expect(loader.known.credentials).toEqual({}); + }); + }); + + describe('getVersionedNodeTypeAll', () => { + it('should return array with single node for non-versioned node', () => { + const loader = new CustomDirectoryLoader(directory); + const node = createNode('node1'); + + const result = loader.getVersionedNodeTypeAll(node); + + expect(result).toHaveLength(1); + expect(result[0]).toBe(node); + }); + + it('should return all versions of a versioned node', () => { + const loader = new CustomDirectoryLoader(directory); + const nodeV1 = createNode('test'); + const nodeV2 = createNode('test'); + nodeV1.description.version = 1; + nodeV2.description.version = 2; + + const versionedNode = mock({ + description: { name: 'test', codex: {} }, + currentVersion: 2, + nodeVersions: { + 1: nodeV1, + 2: nodeV2, + }, + }); + + const result = loader.getVersionedNodeTypeAll(versionedNode); + + expect(result).toHaveLength(2); + expect(result).toEqual([nodeV2, nodeV1]); + expect(result[0].description.name).toBe('test'); + expect(result[1].description.name).toBe('test'); + }); + }); + + describe('getCredentialsForNode', () => { + it('should return empty array if node has no credentials', () => { + const loader = new CustomDirectoryLoader(directory); + const node = createNode('node1'); + + const result = loader.getCredentialsForNode(node); + + expect(Array.isArray(result)).toBe(true); + expect(result.length).toEqual(0); + }); + + it('should return credentials for non-versioned node', () => { + const loader = new CustomDirectoryLoader(directory); + const node = createNode('node1', 'testCred'); + + const result = loader.getCredentialsForNode(node); + + expect(result).toHaveLength(1); + expect(result[0].name).toBe('testCred'); + }); + + it('should return unique credentials from all versions of a versioned node', () => { + const loader = new CustomDirectoryLoader(directory); + const nodeV1 = createNode('test', 'cred1'); + const nodeV2 = createNode('test', 'cred2'); + + const versionedNode = mock({ + description: { name: 'test' }, + currentVersion: 2, + nodeVersions: { + 1: nodeV1, + 2: nodeV2, + }, + }); + + const result = loader.getCredentialsForNode(versionedNode); + + expect(result).toHaveLength(2); + expect(result[0].name).toBe('cred1'); + expect(result[1].name).toBe('cred2'); + }); + + it('should remove duplicate credentials from different versions', () => { + const loader = new CustomDirectoryLoader(directory); + const nodeV1 = createNode('test', 'cred1'); + const nodeV2 = createNode('test', 'cred1'); // Same credential + + const versionedNode = mock({ + description: { name: 'test' }, + currentVersion: 2, + nodeVersions: { + 1: nodeV1, + 2: nodeV2, + }, + }); + + const result = loader.getCredentialsForNode(versionedNode); + + expect(result).toHaveLength(1); + expect(result[0].name).toBe('cred1'); + }); + }); + + describe('loadCredentialFromFile', () => { + it('should load credential and store it correctly', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Credential1.js'; + + loader.loadCredentialFromFile(filePath); + + expect(loader.credentialTypes).toEqual({ + credential1: { + type: mockCredential1, + sourcePath: filePath, + }, + }); + + expect(loader.known.credentials).toEqual({ + credential1: { + className: mockCredential1.constructor.name, + sourcePath: filePath, + extends: undefined, + supportedNodes: undefined, + }, + }); + + expect(loader.types.credentials).toEqual([mockCredential1]); + }); + + it('should update credential icon paths', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Credential1.js'; + + const credWithIcon = createCredential('credentialWithIcon'); + credWithIcon.icon = { + light: 'file:light.svg', + dark: 'file:dark.svg', + }; + + jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(credWithIcon); + + loader.loadCredentialFromFile(filePath); + + expect(credWithIcon.iconUrl).toEqual({ + light: 'icons/CUSTOM/dist/light.svg', + dark: 'icons/CUSTOM/dist/dark.svg', + }); + expect(credWithIcon.icon).toBeUndefined(); + }); + + it('should add toJSON method to credential type', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Credential1.js'; + + const credWithAuth = createCredential('credWithAuth'); + credWithAuth.authenticate = jest.fn(); + + jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(credWithAuth); + + loader.loadCredentialFromFile(filePath); + + const serialized = deepCopy(credWithAuth); + expect(serialized.authenticate).toEqual({}); + }); + + it('should store credential extends and supported nodes info', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Credential1.js'; + + const extendingCred = createCredential('extendingCred'); + extendingCred.extends = ['baseCredential']; + + jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(extendingCred); + + // Set up nodesByCredential before loading + loader.nodesByCredential.extendingCred = ['node1', 'node2']; + + loader.loadCredentialFromFile(filePath); + + expect(loader.known.credentials.extendingCred).toEqual({ + className: extendingCred.constructor.name, + sourcePath: filePath, + extends: ['baseCredential'], + supportedNodes: ['node1', 'node2'], + }); + }); + + it('should throw error if credential class cannot be loaded', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/InvalidCred.js'; + + jest.spyOn(classLoader, 'loadClassInIsolation').mockImplementationOnce(() => { + throw new TypeError('Class not found'); + }); + + expect(() => loader.loadCredentialFromFile(filePath)).toThrow('Class could not be found'); + }); + }); + + describe('getCredential', () => { + it('should return existing loaded credential type', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Credential1.js'; + + loader.loadCredentialFromFile(filePath); + + const result = loader.getCredential('credential1'); + expect(result).toEqual({ + type: mockCredential1, + sourcePath: filePath, + }); + }); + + it('should load credential from known credentials if not already loaded', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Credential1.js'; + + // Setup known credentials without loading + loader.known.credentials.credential1 = { + className: 'Credential1', + sourcePath: filePath, + }; + + const result = loader.getCredential('credential1'); + + expect(result).toEqual({ + type: mockCredential1, + sourcePath: filePath, + }); + expect(classLoader.loadClassInIsolation).toHaveBeenCalledWith( + expect.stringContaining(filePath), + 'Credential1', + ); + }); + + it('should throw UnrecognizedCredentialTypeError if credential type is not found', () => { + const loader = new CustomDirectoryLoader(directory); + + expect(() => loader.getCredential('nonexistent')).toThrow( + 'Unrecognized credential type: nonexistent', + ); + }); + }); + + describe('loadNodeFromFile', () => { + it('should load node and store it correctly', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Node1/Node1.node.js'; + + loader.loadNodeFromFile(filePath); + + expect(loader.nodeTypes).toEqual({ + node1: { + type: mockNode1, + sourcePath: filePath, + }, + }); + + expect(loader.known.nodes).toEqual({ + node1: { + className: mockNode1.constructor.name, + sourcePath: filePath, + }, + }); + + expect(loader.types.nodes).toEqual([mockNode1.description]); + expect(loader.loadedNodes).toEqual([{ name: 'node1', version: 1 }]); + }); + + it('should update node icon paths', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Node1/Node1.node.js'; + + const nodeWithIcon = createNode('nodeWithIcon'); + nodeWithIcon.description.icon = { + light: 'file:light.svg', + dark: 'file:dark.svg', + }; + + jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(nodeWithIcon); + + loader.loadNodeFromFile(filePath); + + expect(nodeWithIcon.description.iconUrl).toEqual({ + light: 'icons/CUSTOM/dist/Node1/light.svg', + dark: 'icons/CUSTOM/dist/Node1/dark.svg', + }); + expect(nodeWithIcon.description.icon).toBeUndefined(); + }); + + it('should skip node if included in excludeNodes', () => { + const loader = new CustomDirectoryLoader(directory, ['CUSTOM.node1']); + const filePath = 'dist/Node1/Node1.node.js'; + + loader.loadNodeFromFile(filePath); + + expect(loader.nodeTypes).toEqual({}); + expect(loader.known.nodes).toEqual({}); + expect(loader.types.nodes).toEqual([]); + expect(loader.loadedNodes).toEqual([]); + }); + + it('should skip node if not in includeNodes', () => { + const loader = new CustomDirectoryLoader(directory, [], ['CUSTOM.other']); + const filePath = 'dist/Node1/Node1.node.js'; + + loader.loadNodeFromFile(filePath); + + expect(loader.nodeTypes).toEqual({}); + expect(loader.known.nodes).toEqual({}); + expect(loader.types.nodes).toEqual([]); + expect(loader.loadedNodes).toEqual([]); + }); + + it('should handle versioned nodes correctly', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Node1/Node1.node.js'; + + const nodeV1 = createNode('test'); + const nodeV2 = createNode('test'); + nodeV1.description.version = 1; + nodeV2.description.version = 2; + + const versionedNode = mock({ + description: { name: 'test', codex: {}, iconUrl: undefined, icon: undefined }, + currentVersion: 2, + nodeVersions: { + 1: nodeV1, + 2: nodeV2, + }, + }); + + jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(versionedNode); + + loader.loadNodeFromFile(filePath); + + expect(loader.loadedNodes).toEqual([{ name: 'test', version: 2 }]); + + const nodes = loader.types.nodes as INodeTypeDescription[]; + expect(nodes).toHaveLength(2); + expect(nodes[0]?.version).toBe(2); + expect(nodes[1]?.version).toBe(1); + }); + + it('should store credential associations correctly', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Node1/Node1.node.js'; + + const nodeWithCreds = createNode('testNode', 'testCred'); + jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(nodeWithCreds); + + loader.loadNodeFromFile(filePath); + + expect(loader.nodesByCredential).toEqual({ + testCred: ['testNode'], + }); + }); + + it('should throw error if node class cannot be loaded', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/InvalidNode/InvalidNode.node.js'; + + jest.spyOn(classLoader, 'loadClassInIsolation').mockImplementationOnce(() => { + throw new TypeError('Class not found'); + }); + + expect(() => loader.loadNodeFromFile(filePath)).toThrow('Class could not be found'); + }); + }); + + describe('getNode', () => { + it('should return existing loaded node type', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Node1/Node1.node.js'; + + loader.loadNodeFromFile(filePath); + + const result = loader.getNode('node1'); + expect(result).toEqual({ + type: mockNode1, + sourcePath: filePath, + }); + }); + + it('should load node from known nodes if not already loaded', () => { + const loader = new CustomDirectoryLoader(directory); + const filePath = 'dist/Node1/Node1.node.js'; + + // Setup known nodes without loading + loader.known.nodes.node1 = { + className: 'Node1', + sourcePath: filePath, + }; + + const result = loader.getNode('node1'); + + expect(result).toEqual({ + type: mockNode1, + sourcePath: filePath, + }); + expect(classLoader.loadClassInIsolation).toHaveBeenCalledWith( + expect.stringContaining(filePath), + 'Node1', + ); + }); + + it('should throw UnrecognizedNodeTypeError if node type is not found', () => { + const loader = new CustomDirectoryLoader(directory); + + expect(() => loader.getNode('nonexistent')).toThrow( + 'Unrecognized node type: CUSTOM.nonexistent', + ); + }); + }); +}); diff --git a/packages/core/test/InstanceSettings.test.ts b/packages/core/test/InstanceSettings.test.ts index 22f8fadff8..b6a9cf63c4 100644 --- a/packages/core/test/InstanceSettings.test.ts +++ b/packages/core/test/InstanceSettings.test.ts @@ -1,16 +1,18 @@ -import fs from 'fs'; +import { mock } from 'jest-mock-extended'; +jest.mock('node:fs', () => mock()); +import * as fs from 'node:fs'; -import { InstanceSettings } from '../src/InstanceSettings'; -import { InstanceSettingsConfig } from '../src/InstanceSettingsConfig'; +import { InstanceSettings } from '@/InstanceSettings'; +import { InstanceSettingsConfig } from '@/InstanceSettingsConfig'; describe('InstanceSettings', () => { - process.env.N8N_USER_FOLDER = '/test'; + const userFolder = '/test'; + process.env.N8N_USER_FOLDER = userFolder; + const settingsFile = `${userFolder}/.n8n/config`; - const existSpy = jest.spyOn(fs, 'existsSync'); - const statSpy = jest.spyOn(fs, 'statSync'); - const chmodSpy = jest.spyOn(fs, 'chmodSync'); + const mockFs = mock(fs); - const createSettingsInstance = (opts?: Partial) => + const createInstanceSettings = (opts?: Partial) => new InstanceSettings({ ...new InstanceSettingsConfig(), ...opts, @@ -18,18 +20,17 @@ describe('InstanceSettings', () => { beforeEach(() => { jest.resetAllMocks(); - statSpy.mockReturnValue({ mode: 0o600 } as fs.Stats); + mockFs.statSync.mockReturnValue({ mode: 0o600 } as fs.Stats); }); describe('If the settings file exists', () => { - const readSpy = jest.spyOn(fs, 'readFileSync'); beforeEach(() => { - existSpy.mockReturnValue(true); + mockFs.existsSync.mockReturnValue(true); }); it('should load settings from the file', () => { - readSpy.mockReturnValue(JSON.stringify({ encryptionKey: 'test_key' })); - const settings = createSettingsInstance(); + mockFs.readFileSync.mockReturnValue(JSON.stringify({ encryptionKey: 'test_key' })); + const settings = createInstanceSettings(); expect(settings.encryptionKey).toEqual('test_key'); expect(settings.instanceId).toEqual( '6ce26c63596f0cc4323563c529acfca0cccb0e57f6533d79a60a42c9ff862ae7', @@ -37,71 +38,69 @@ describe('InstanceSettings', () => { }); it('should throw error if settings file is not valid JSON', () => { - readSpy.mockReturnValue('{"encryptionKey":"test_key"'); - expect(() => createSettingsInstance()).toThrowError(); + mockFs.readFileSync.mockReturnValue('{"encryptionKey":"test_key"'); + expect(() => createInstanceSettings()).toThrowError(); }); it('should throw if the env and file keys do not match', () => { - readSpy.mockReturnValue(JSON.stringify({ encryptionKey: 'key_1' })); + mockFs.readFileSync.mockReturnValue(JSON.stringify({ encryptionKey: 'key_1' })); process.env.N8N_ENCRYPTION_KEY = 'key_2'; - expect(() => createSettingsInstance()).toThrowError(); + expect(() => createInstanceSettings()).toThrowError(); }); it('should check if the settings file has the correct permissions', () => { process.env.N8N_ENCRYPTION_KEY = 'test_key'; - readSpy.mockReturnValueOnce(JSON.stringify({ encryptionKey: 'test_key' })); - statSpy.mockReturnValueOnce({ mode: 0o600 } as fs.Stats); - const settings = createSettingsInstance(); + mockFs.readFileSync.mockReturnValueOnce(JSON.stringify({ encryptionKey: 'test_key' })); + mockFs.statSync.mockReturnValueOnce({ mode: 0o600 } as fs.Stats); + const settings = createInstanceSettings(); expect(settings.encryptionKey).toEqual('test_key'); expect(settings.instanceId).toEqual( '6ce26c63596f0cc4323563c529acfca0cccb0e57f6533d79a60a42c9ff862ae7', ); - expect(statSpy).toHaveBeenCalledWith('/test/.n8n/config'); + expect(mockFs.statSync).toHaveBeenCalledWith('/test/.n8n/config'); }); it('should check the permissions but not fix them if settings file has incorrect permissions by default', () => { - readSpy.mockReturnValueOnce(JSON.stringify({ encryptionKey: 'test_key' })); - statSpy.mockReturnValueOnce({ mode: 0o644 } as fs.Stats); - createSettingsInstance(); - expect(statSpy).toHaveBeenCalledWith('/test/.n8n/config'); - expect(chmodSpy).not.toHaveBeenCalled(); + mockFs.readFileSync.mockReturnValueOnce(JSON.stringify({ encryptionKey: 'test_key' })); + mockFs.statSync.mockReturnValueOnce({ mode: 0o644 } as fs.Stats); + createInstanceSettings(); + expect(mockFs.statSync).toHaveBeenCalledWith('/test/.n8n/config'); + expect(mockFs.chmodSync).not.toHaveBeenCalled(); }); it("should not check the permissions if 'N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS' is false", () => { process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS = 'false'; - readSpy.mockReturnValueOnce(JSON.stringify({ encryptionKey: 'test_key' })); - createSettingsInstance(); - expect(statSpy).not.toHaveBeenCalled(); - expect(chmodSpy).not.toHaveBeenCalled(); + mockFs.readFileSync.mockReturnValueOnce(JSON.stringify({ encryptionKey: 'test_key' })); + createInstanceSettings(); + expect(mockFs.statSync).not.toHaveBeenCalled(); + expect(mockFs.chmodSync).not.toHaveBeenCalled(); }); it("should fix the permissions of the settings file if 'N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS' is true", () => { process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS = 'true'; - readSpy.mockReturnValueOnce(JSON.stringify({ encryptionKey: 'test_key' })); - statSpy.mockReturnValueOnce({ mode: 0o644 } as fs.Stats); - createSettingsInstance({ + mockFs.readFileSync.mockReturnValueOnce(JSON.stringify({ encryptionKey: 'test_key' })); + mockFs.statSync.mockReturnValueOnce({ mode: 0o644 } as fs.Stats); + createInstanceSettings({ enforceSettingsFilePermissions: true, }); - expect(statSpy).toHaveBeenCalledWith('/test/.n8n/config'); - expect(chmodSpy).toHaveBeenCalledWith('/test/.n8n/config', 0o600); + expect(mockFs.statSync).toHaveBeenCalledWith('/test/.n8n/config'); + expect(mockFs.chmodSync).toHaveBeenCalledWith('/test/.n8n/config', 0o600); }); }); describe('If the settings file does not exist', () => { - const mkdirSpy = jest.spyOn(fs, 'mkdirSync'); - const writeFileSpy = jest.spyOn(fs, 'writeFileSync'); beforeEach(() => { - existSpy.mockReturnValue(false); - mkdirSpy.mockReturnValue(''); - writeFileSpy.mockReturnValue(); + mockFs.existsSync.mockReturnValue(false); + mockFs.mkdirSync.mockReturnValue(''); + mockFs.writeFileSync.mockReturnValue(); }); it('should create a new settings file without explicit permissions if N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS is not set', () => { process.env.N8N_ENCRYPTION_KEY = 'key_2'; - const settings = createSettingsInstance(); + const settings = createInstanceSettings(); expect(settings.encryptionKey).not.toEqual('test_key'); - expect(mkdirSpy).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); - expect(writeFileSpy).toHaveBeenCalledWith( + expect(mockFs.mkdirSync).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); + expect(mockFs.writeFileSync).toHaveBeenCalledWith( '/test/.n8n/config', expect.stringContaining('"encryptionKey":'), { @@ -114,10 +113,10 @@ describe('InstanceSettings', () => { it('should create a new settings file without explicit permissions if N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=false', () => { process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS = 'false'; process.env.N8N_ENCRYPTION_KEY = 'key_2'; - const settings = createSettingsInstance(); + const settings = createInstanceSettings(); expect(settings.encryptionKey).not.toEqual('test_key'); - expect(mkdirSpy).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); - expect(writeFileSpy).toHaveBeenCalledWith( + expect(mockFs.mkdirSync).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); + expect(mockFs.writeFileSync).toHaveBeenCalledWith( '/test/.n8n/config', expect.stringContaining('"encryptionKey":'), { @@ -130,12 +129,12 @@ describe('InstanceSettings', () => { it('should create a new settings file with explicit permissions if N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=true', () => { process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS = 'true'; process.env.N8N_ENCRYPTION_KEY = 'key_2'; - const settings = createSettingsInstance({ + const settings = createInstanceSettings({ enforceSettingsFilePermissions: true, }); expect(settings.encryptionKey).not.toEqual('test_key'); - expect(mkdirSpy).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); - expect(writeFileSpy).toHaveBeenCalledWith( + expect(mockFs.mkdirSync).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); + expect(mockFs.writeFileSync).toHaveBeenCalledWith( '/test/.n8n/config', expect.stringContaining('"encryptionKey":'), { @@ -147,14 +146,14 @@ describe('InstanceSettings', () => { it('should pick up the encryption key from env var N8N_ENCRYPTION_KEY', () => { process.env.N8N_ENCRYPTION_KEY = 'env_key'; - const settings = createSettingsInstance(); + const settings = createInstanceSettings(); expect(settings.encryptionKey).toEqual('env_key'); expect(settings.instanceId).toEqual( '2c70e12b7a0646f92279f427c7b38e7334d8e5389cff167a1dc30e73f826b683', ); expect(settings.encryptionKey).not.toEqual('test_key'); - expect(mkdirSpy).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); - expect(writeFileSpy).toHaveBeenCalledWith( + expect(mockFs.mkdirSync).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); + expect(mockFs.writeFileSync).toHaveBeenCalledWith( '/test/.n8n/config', expect.stringContaining('"encryptionKey":'), { @@ -167,10 +166,10 @@ describe('InstanceSettings', () => { it("should not set the permissions of the settings file if 'N8N_IGNORE_SETTINGS_FILE_PERMISSIONS' is true", () => { process.env.N8N_ENCRYPTION_KEY = 'key_2'; process.env.N8N_IGNORE_SETTINGS_FILE_PERMISSIONS = 'true'; - const settings = createSettingsInstance(); + const settings = createInstanceSettings(); expect(settings.encryptionKey).not.toEqual('test_key'); - expect(mkdirSpy).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); - expect(writeFileSpy).toHaveBeenCalledWith( + expect(mockFs.mkdirSync).toHaveBeenCalledWith('/test/.n8n', { recursive: true }); + expect(mockFs.writeFileSync).toHaveBeenCalledWith( '/test/.n8n/config', expect.stringContaining('"encryptionKey":'), { @@ -185,14 +184,67 @@ describe('InstanceSettings', () => { it('should generate a `hostId`', () => { const encryptionKey = 'test_key'; process.env.N8N_ENCRYPTION_KEY = encryptionKey; - jest.spyOn(fs, 'existsSync').mockReturnValueOnce(true); - jest.spyOn(fs, 'readFileSync').mockReturnValueOnce(JSON.stringify({ encryptionKey })); + mockFs.existsSync.mockReturnValueOnce(true); + mockFs.readFileSync.mockReturnValueOnce(JSON.stringify({ encryptionKey })); - const settings = createSettingsInstance(); + const settings = createInstanceSettings(); const [instanceType, nanoid] = settings.hostId.split('-'); expect(instanceType).toEqual('main'); expect(nanoid).toHaveLength(16); // e.g. sDX6ZPc0bozv66zM }); }); + + describe('isDocker', () => { + let settings: InstanceSettings; + + beforeEach(() => { + mockFs.existsSync.calledWith(settingsFile).mockReturnValue(true); + mockFs.readFileSync + .calledWith(settingsFile) + .mockReturnValue(JSON.stringify({ encryptionKey: 'test_key' })); + settings = new InstanceSettings(mock()); + }); + + it('should return true if /.dockerenv exists', () => { + mockFs.existsSync.calledWith('/.dockerenv').mockReturnValueOnce(true); + expect(settings.isDocker).toBe(true); + expect(mockFs.existsSync).toHaveBeenCalledWith('/.dockerenv'); + expect(mockFs.readFileSync).not.toHaveBeenCalledWith('/proc/self/cgroup', 'utf8'); + }); + + it('should return true if /proc/self/cgroup contains docker', () => { + mockFs.existsSync.calledWith('/.dockerenv').mockReturnValueOnce(false); + mockFs.readFileSync + .calledWith('/proc/self/cgroup', 'utf8') + .mockReturnValueOnce('docker cgroup'); + + expect(settings.isDocker).toBe(true); + expect(mockFs.existsSync).toHaveBeenCalledWith('/.dockerenv'); + expect(mockFs.readFileSync).toHaveBeenCalledWith('/proc/self/cgroup', 'utf8'); + }); + + it('should return false if no docker indicators are found', () => { + mockFs.existsSync.calledWith('/.dockerenv').mockReturnValueOnce(false); + mockFs.readFileSync.calledWith('/proc/self/cgroup', 'utf8').mockReturnValueOnce(''); + expect(settings.isDocker).toBe(false); + }); + + it('should return false if checking for docker throws an error', () => { + mockFs.existsSync.calledWith('/.dockerenv').mockImplementationOnce(() => { + throw new Error('Access denied'); + }); + expect(settings.isDocker).toBe(false); + }); + + it('should cache the result of isDocker check', () => { + mockFs.existsSync.calledWith('/.dockerenv').mockReturnValueOnce(true); + + expect(settings.isDocker).toBe(true); + + mockFs.existsSync.mockClear(); + expect(settings.isDocker).toBe(true); + expect(mockFs.existsSync).not.toHaveBeenCalled(); + }); + }); }); diff --git a/packages/core/test/NodeExecuteFunctions.test.ts b/packages/core/test/NodeExecuteFunctions.test.ts index f754abec58..b1b6e96577 100644 --- a/packages/core/test/NodeExecuteFunctions.test.ts +++ b/packages/core/test/NodeExecuteFunctions.test.ts @@ -1,3 +1,4 @@ +import FormData from 'form-data'; import { mkdtempSync, readFileSync } from 'fs'; import { IncomingMessage } from 'http'; import type { Agent } from 'https'; @@ -26,6 +27,7 @@ import { binaryToString, copyInputItems, getBinaryDataBuffer, + invokeAxios, isFilePathBlocked, parseContentDisposition, parseContentType, @@ -543,6 +545,46 @@ describe('NodeExecuteFunctions', () => { }); describe('parseRequestObject', () => { + test('should handle basic request options', async () => { + const axiosOptions = await parseRequestObject({ + url: 'https://example.com', + method: 'POST', + headers: { 'content-type': 'application/json' }, + body: { key: 'value' }, + }); + + expect(axiosOptions).toEqual( + expect.objectContaining({ + url: 'https://example.com', + method: 'POST', + headers: { accept: '*/*', 'content-type': 'application/json' }, + data: { key: 'value' }, + maxRedirects: 0, + }), + ); + }); + + test('should set correct headers for FormData', async () => { + const formData = new FormData(); + formData.append('key', 'value'); + + const axiosOptions = await parseRequestObject({ + url: 'https://example.com', + formData, + headers: { + 'content-type': 'multipart/form-data', + }, + }); + + expect(axiosOptions.headers).toMatchObject({ + accept: '*/*', + 'content-length': 163, + 'content-type': expect.stringMatching(/^multipart\/form-data; boundary=/), + }); + + expect(axiosOptions.data).toBeInstanceOf(FormData); + }); + test('should not use Host header for SNI', async () => { const axiosOptions = await parseRequestObject({ url: 'https://example.de/foo/bar', @@ -628,6 +670,77 @@ describe('NodeExecuteFunctions', () => { }); }); + describe('invokeAxios', () => { + const baseUrl = 'http://example.de'; + + beforeEach(() => { + nock.cleanAll(); + jest.clearAllMocks(); + }); + + it('should throw error for non-401 status codes', async () => { + nock(baseUrl).get('/test').reply(500, {}); + + await expect(invokeAxios({ url: `${baseUrl}/test` })).rejects.toThrow( + 'Request failed with status code 500', + ); + }); + + it('should throw error on 401 without digest auth challenge', async () => { + nock(baseUrl).get('/test').reply(401, {}); + + await expect( + invokeAxios( + { + url: `${baseUrl}/test`, + }, + { sendImmediately: false }, + ), + ).rejects.toThrow('Request failed with status code 401'); + }); + + it('should make successful requests', async () => { + nock(baseUrl).get('/test').reply(200, { success: true }); + + const response = await invokeAxios({ + url: `${baseUrl}/test`, + }); + + expect(response.status).toBe(200); + expect(response.data).toEqual({ success: true }); + }); + + it('should handle digest auth when receiving 401 with nonce', async () => { + nock(baseUrl) + .get('/test') + .matchHeader('authorization', 'Basic dXNlcjpwYXNz') + .once() + .reply(401, {}, { 'www-authenticate': 'Digest realm="test", nonce="abc123", qop="auth"' }); + + nock(baseUrl) + .get('/test') + .matchHeader( + 'authorization', + /^Digest username="user",realm="test",nonce="abc123",uri="\/test",qop="auth",algorithm="MD5",response="[0-9a-f]{32}"/, + ) + .reply(200, { success: true }); + + const response = await invokeAxios( + { + url: `${baseUrl}/test`, + auth: { + username: 'user', + password: 'pass', + }, + }, + { sendImmediately: false }, + ); + + expect(response.status).toBe(200); + expect(response.data).toEqual({ success: true }); + }); + }); + describe('copyInputItems', () => { it('should pick only selected properties', () => { const output = copyInputItems( diff --git a/packages/workflow/test/RoutingNode.test.ts b/packages/core/test/RoutingNode.test.ts similarity index 95% rename from packages/workflow/test/RoutingNode.test.ts rename to packages/core/test/RoutingNode.test.ts index a38f4551e2..45ef937803 100644 --- a/packages/workflow/test/RoutingNode.test.ts +++ b/packages/core/test/RoutingNode.test.ts @@ -1,29 +1,30 @@ import { mock } from 'jest-mock-extended'; - +import { get } from 'lodash'; import type { - INode, - INodeExecutionData, - INodeParameters, DeclarativeRestApiSettings, - IRunExecutionData, - INodeProperties, + IExecuteData, IExecuteSingleFunctions, IHttpRequestOptions, - ITaskDataConnections, - INodeExecuteFunctions, + IN8nHttpFullResponse, + IN8nHttpResponse, IN8nRequestOperations, + INode, INodeCredentialDescription, - IExecuteData, + INodeExecutionData, + INodeParameters, + INodeProperties, + INodeType, INodeTypeDescription, + IRunExecutionData, + ITaskDataConnections, IWorkflowExecuteAdditionalData, - IExecuteFunctions, -} from '@/Interfaces'; -import { applyDeclarativeNodeOptionParameters } from '@/NodeHelpers'; -import { RoutingNode } from '@/RoutingNode'; -import * as utilsModule from '@/utils'; -import { Workflow } from '@/Workflow'; +} from 'n8n-workflow'; +import { NodeHelpers, Workflow } from 'n8n-workflow'; -import * as Helpers from './Helpers'; +import * as executionContexts from '@/node-execution-context'; +import { RoutingNode } from '@/RoutingNode'; + +import { NodeTypes } from './helpers'; const postReceiveFunction1 = async function ( this: IExecuteSingleFunctions, @@ -42,14 +43,55 @@ const preSendFunction1 = async function ( return requestOptions; }; +const getExecuteSingleFunctions = ( + workflow: Workflow, + runExecutionData: IRunExecutionData, + runIndex: number, + node: INode, + itemIndex: number, +) => + mock({ + getItemIndex: () => itemIndex, + getNodeParameter: (parameterName: string) => + workflow.expression.getParameterValue( + get(node.parameters, parameterName), + runExecutionData, + runIndex, + itemIndex, + node.name, + [], + 'internal', + {}, + ), + getWorkflow: () => ({ + id: workflow.id, + name: workflow.name, + active: workflow.active, + }), + helpers: mock({ + async httpRequest( + requestOptions: IHttpRequestOptions, + ): Promise { + return { + body: { + headers: {}, + statusCode: 200, + requestOptions, + }, + }; + }, + }), + }); + describe('RoutingNode', () => { + const nodeTypes = NodeTypes(); const additionalData = mock(); test('applyDeclarativeNodeOptionParameters', () => { - const nodeTypes = Helpers.NodeTypes(); + const nodeTypes = NodeTypes(); const nodeType = nodeTypes.getByNameAndVersion('test.setMulti'); - applyDeclarativeNodeOptionParameters(nodeType); + NodeHelpers.applyDeclarativeNodeOptionParameters(nodeType); const options = nodeType.description.properties.find( (property) => property.name === 'requestOptions', @@ -667,7 +709,7 @@ describe('RoutingNode', () => { }, ]; - const nodeTypes = Helpers.NodeTypes(); + const nodeTypes = NodeTypes(); const node: INode = { parameters: {}, name: 'test', @@ -711,7 +753,7 @@ describe('RoutingNode', () => { mode, ); - const executeSingleFunctions = Helpers.getExecuteSingleFunctions( + const executeSingleFunctions = getExecuteSingleFunctions( workflow, runExecutionData, runIndex, @@ -1861,7 +1903,6 @@ describe('RoutingNode', () => { }, ]; - const nodeTypes = Helpers.NodeTypes(); const baseNode: INode = { parameters: {}, name: 'test', @@ -1877,7 +1918,7 @@ describe('RoutingNode', () => { const connectionInputData: INodeExecutionData[] = []; const runExecutionData: IRunExecutionData = { resultData: { runData: {} } }; const nodeType = nodeTypes.getByNameAndVersion(baseNode.type); - applyDeclarativeNodeOptionParameters(nodeType); + NodeHelpers.applyDeclarativeNodeOptionParameters(nodeType); const propertiesOriginal = nodeType.description.properties; @@ -1921,8 +1962,8 @@ describe('RoutingNode', () => { source: null, } as IExecuteData; - const executeFunctions = mock(); - const executeSingleFunctions = Helpers.getExecuteSingleFunctions( + const executeFunctions = mock(); + const executeSingleFunctions = getExecuteSingleFunctions( workflow, runExecutionData, runIndex, @@ -1930,10 +1971,10 @@ describe('RoutingNode', () => { itemIndex, ); - const nodeExecuteFunctions: Partial = { - getExecuteFunctions: () => executeFunctions, - getExecuteSingleFunctions: () => executeSingleFunctions, - }; + jest.spyOn(executionContexts, 'ExecuteContext').mockReturnValue(executeFunctions); + jest + .spyOn(executionContexts, 'ExecuteSingleContext') + .mockReturnValue(executeSingleFunctions); const numberOfItems = testData.input.specialTestOptions?.numberOfItems ?? 1; if (!inputData.main[0] || inputData.main[0].length !== numberOfItems) { @@ -1943,7 +1984,8 @@ describe('RoutingNode', () => { } } - const spy = jest.spyOn(utilsModule, 'sleep').mockReturnValue( + const workflowPackage = await import('n8n-workflow'); + const spy = jest.spyOn(workflowPackage, 'sleep').mockReturnValue( new Promise((resolve) => { resolve(); }), @@ -1956,18 +1998,13 @@ describe('RoutingNode', () => { ); const getNodeParameter = executeSingleFunctions.getNodeParameter; + // @ts-expect-error overwriting a method executeSingleFunctions.getNodeParameter = (parameterName: string) => parameterName in testData.input.node.parameters ? testData.input.node.parameters[parameterName] : (getNodeParameter(parameterName) ?? {}); - const result = await routingNode.runNode( - inputData, - runIndex, - nodeType, - executeData, - nodeExecuteFunctions as INodeExecuteFunctions, - ); + const result = await routingNode.runNode(inputData, runIndex, nodeType, executeData); if (testData.input.specialTestOptions?.sleepCalls) { expect(spy.mock.calls).toEqual(testData.input.specialTestOptions?.sleepCalls); @@ -2042,7 +2079,6 @@ describe('RoutingNode', () => { }, ]; - const nodeTypes = Helpers.NodeTypes(); const baseNode: INode = { parameters: {}, name: 'test', @@ -2052,12 +2088,10 @@ describe('RoutingNode', () => { position: [0, 0], }; - const mode = 'internal'; const runIndex = 0; const itemIndex = 0; - const connectionInputData: INodeExecutionData[] = []; const runExecutionData: IRunExecutionData = { resultData: { runData: {} } }; - const nodeType = nodeTypes.getByNameAndVersion(baseNode.type); + const nodeType = mock(); const inputData: ITaskDataConnections = { main: [ @@ -2093,53 +2127,17 @@ describe('RoutingNode', () => { nodeTypes, }); - const routingNode = new RoutingNode( - workflow, - node, - connectionInputData, - runExecutionData ?? null, - additionalData, - mode, - ); - - const executeData = { - data: {}, - node, - source: null, - } as IExecuteData; - let currentItemIndex = 0; for (let iteration = 0; iteration < inputData.main[0]!.length; iteration++) { - const nodeExecuteFunctions: Partial = { - getExecuteSingleFunctions: () => { - return Helpers.getExecuteSingleFunctions( - workflow, - runExecutionData, - runIndex, - node, - itemIndex + iteration, - ); - }, - }; - - if (!nodeExecuteFunctions.getExecuteSingleFunctions) { - fail('Expected nodeExecuteFunctions to contain getExecuteSingleFunctions'); - } - - const routingNodeExecutionContext = nodeExecuteFunctions.getExecuteSingleFunctions( - routingNode.workflow, - routingNode.runExecutionData, + const context = getExecuteSingleFunctions( + workflow, + runExecutionData, runIndex, - routingNode.connectionInputData, - inputData, - routingNode.node, - iteration, - routingNode.additionalData, - executeData, - routingNode.mode, + node, + itemIndex + iteration, ); - - currentItemIndex = routingNodeExecutionContext.getItemIndex(); + jest.spyOn(executionContexts, 'ExecuteSingleContext').mockReturnValue(context); + currentItemIndex = context.getItemIndex(); } const expectedItemIndex = inputData.main[0]!.length - 1; diff --git a/packages/core/test/TriggersAndPollers.test.ts b/packages/core/test/TriggersAndPollers.test.ts new file mode 100644 index 0000000000..c30a0693a6 --- /dev/null +++ b/packages/core/test/TriggersAndPollers.test.ts @@ -0,0 +1,157 @@ +import { mock } from 'jest-mock-extended'; +import { ApplicationError } from 'n8n-workflow'; +import type { + Workflow, + INode, + INodeExecutionData, + IPollFunctions, + IWorkflowExecuteAdditionalData, + INodeType, + INodeTypes, + ITriggerFunctions, +} from 'n8n-workflow'; + +import { TriggersAndPollers } from '@/TriggersAndPollers'; + +describe('TriggersAndPollers', () => { + const node = mock(); + const nodeType = mock({ + trigger: undefined, + poll: undefined, + }); + const nodeTypes = mock(); + const workflow = mock({ nodeTypes }); + const additionalData = mock({ + hooks: { + hookFunctions: { + sendResponse: [], + }, + }, + }); + const triggersAndPollers = new TriggersAndPollers(); + + beforeEach(() => { + jest.clearAllMocks(); + nodeTypes.getByNameAndVersion.mockReturnValue(nodeType); + }); + + describe('runTrigger()', () => { + const triggerFunctions = mock(); + const getTriggerFunctions = jest.fn().mockReturnValue(triggerFunctions); + const triggerFn = jest.fn(); + + it('should throw error if node type does not have trigger function', async () => { + await expect( + triggersAndPollers.runTrigger( + workflow, + node, + getTriggerFunctions, + additionalData, + 'trigger', + 'init', + ), + ).rejects.toThrow(ApplicationError); + }); + + it('should call trigger function in regular mode', async () => { + nodeType.trigger = triggerFn; + triggerFn.mockResolvedValue({ test: true }); + + const result = await triggersAndPollers.runTrigger( + workflow, + node, + getTriggerFunctions, + additionalData, + 'trigger', + 'init', + ); + + expect(triggerFn).toHaveBeenCalled(); + expect(result).toEqual({ test: true }); + }); + + it('should handle manual mode with promise resolution', async () => { + const mockEmitData: INodeExecutionData[][] = [[{ json: { data: 'test' } }]]; + const mockTriggerResponse = { workflowId: '123' }; + + nodeType.trigger = triggerFn; + triggerFn.mockResolvedValue(mockTriggerResponse); + + const result = await triggersAndPollers.runTrigger( + workflow, + node, + getTriggerFunctions, + additionalData, + 'manual', + 'init', + ); + + expect(result).toBeDefined(); + expect(result?.manualTriggerResponse).toBeInstanceOf(Promise); + + // Simulate emit + const mockTriggerFunctions = getTriggerFunctions.mock.results[0]?.value; + if (mockTriggerFunctions?.emit) { + mockTriggerFunctions.emit(mockEmitData); + } + }); + + it('should handle error emission in manual mode', async () => { + const testError = new Error('Test error'); + + nodeType.trigger = triggerFn; + triggerFn.mockResolvedValue({}); + + const result = await triggersAndPollers.runTrigger( + workflow, + node, + getTriggerFunctions, + additionalData, + 'manual', + 'init', + ); + + expect(result?.manualTriggerResponse).toBeInstanceOf(Promise); + + // Simulate error + const mockTriggerFunctions = getTriggerFunctions.mock.results[0]?.value; + if (mockTriggerFunctions?.emitError) { + mockTriggerFunctions.emitError(testError); + } + + await expect(result?.manualTriggerResponse).rejects.toThrow(testError); + }); + }); + + describe('runPoll()', () => { + const pollFunctions = mock(); + const pollFn = jest.fn(); + + it('should throw error if node type does not have poll function', async () => { + await expect(triggersAndPollers.runPoll(workflow, node, pollFunctions)).rejects.toThrow( + ApplicationError, + ); + }); + + it('should call poll function and return result', async () => { + const mockPollResult: INodeExecutionData[][] = [[{ json: { data: 'test' } }]]; + nodeType.poll = pollFn; + pollFn.mockResolvedValue(mockPollResult); + + const result = await triggersAndPollers.runPoll(workflow, node, pollFunctions); + + expect(pollFn).toHaveBeenCalled(); + expect(result).toBe(mockPollResult); + }); + + it('should return null if poll function returns no data', async () => { + nodeType.poll = pollFn; + pollFn.mockResolvedValue(null); + + const result = await triggersAndPollers.runPoll(workflow, node, pollFunctions); + + expect(pollFn).toHaveBeenCalled(); + expect(result).toBeNull(); + }); + }); +}); diff --git a/packages/core/test/WorkflowExecute.test.ts b/packages/core/test/WorkflowExecute.test.ts index 71be9a4891..6a826a8118 100644 --- a/packages/core/test/WorkflowExecute.test.ts +++ b/packages/core/test/WorkflowExecute.test.ts @@ -9,15 +9,31 @@ // XX denotes that the node is disabled // PD denotes that the node has pinned data -import type { IPinData, IRun, IRunData, WorkflowTestData } from 'n8n-workflow'; +import { mock } from 'jest-mock-extended'; +import { pick } from 'lodash'; +import type { + IExecuteData, + INode, + INodeType, + INodeTypes, + IPinData, + IRun, + IRunData, + IRunExecutionData, + ITriggerResponse, + IWorkflowExecuteAdditionalData, + WorkflowTestData, +} from 'n8n-workflow'; import { ApplicationError, createDeferredPromise, NodeExecutionOutput, + NodeHelpers, Workflow, } from 'n8n-workflow'; import { DirectedGraph } from '@/PartialExecutionUtils'; +import * as partialExecutionUtils from '@/PartialExecutionUtils'; import { createNodeData, toITaskData } from '@/PartialExecutionUtils/__tests__/helpers'; import { WorkflowExecute } from '@/WorkflowExecute'; @@ -324,5 +340,268 @@ describe('WorkflowExecute', () => { expect(nodes).toContain(node2.name); expect(nodes).not.toContain(node1.name); }); + + // ►► + // ┌────┐0 ┌─────────┐ + // ┌───────┐1 │ ├──────►afterLoop│ + // │trigger├───┬──►loop│1 └─────────┘ + // └───────┘ │ │ ├─┐ + // │ └────┘ │ + // │ │ ┌──────┐1 + // │ └─►inLoop├─┐ + // │ └──────┘ │ + // └────────────────────┘ + test('passes filtered run data to `recreateNodeExecutionStack`', async () => { + // ARRANGE + const waitPromise = createDeferredPromise(); + const nodeExecutionOrder: string[] = []; + const additionalData = Helpers.WorkflowExecuteAdditionalData(waitPromise, nodeExecutionOrder); + const workflowExecute = new WorkflowExecute(additionalData, 'manual'); + + const trigger = createNodeData({ name: 'trigger', type: 'n8n-nodes-base.manualTrigger' }); + const loop = createNodeData({ name: 'loop', type: 'n8n-nodes-base.splitInBatches' }); + const inLoop = createNodeData({ name: 'inLoop' }); + const afterLoop = createNodeData({ name: 'afterLoop' }); + const workflow = new DirectedGraph() + .addNodes(trigger, loop, inLoop, afterLoop) + .addConnections( + { from: trigger, to: loop }, + { from: loop, to: afterLoop }, + { from: loop, to: inLoop, outputIndex: 1 }, + { from: inLoop, to: loop }, + ) + .toWorkflow({ name: '', active: false, nodeTypes }); + + const pinData: IPinData = {}; + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { value: 1 } }])], + [loop.name]: [toITaskData([{ data: { nodeName: loop.name }, outputIndex: 1 }])], + [inLoop.name]: [toITaskData([{ data: { nodeName: inLoop.name } }])], + }; + const dirtyNodeNames: string[] = []; + + jest.spyOn(workflowExecute, 'processRunExecutionData').mockImplementationOnce(jest.fn()); + const recreateNodeExecutionStackSpy = jest.spyOn( + partialExecutionUtils, + 'recreateNodeExecutionStack', + ); + + // ACT + await workflowExecute.runPartialWorkflow2( + workflow, + runData, + pinData, + dirtyNodeNames, + afterLoop.name, + ); + + // ASSERT + expect(recreateNodeExecutionStackSpy).toHaveBeenNthCalledWith( + 1, + expect.any(DirectedGraph), + expect.any(Set), + // The run data should only contain the trigger node because the loop + // node has no data on the done branch. That means we have to rerun the + // whole loop, because we don't know how many iterations would be left. + pick(runData, trigger.name), + expect.any(Object), + ); + }); + + // ┌───────┐ ┌─────┐ + // │trigger├┬──►│node1│ + // └───────┘│ └─────┘ + // │ ┌─────┐ + // └──►│node2│ + // └─────┘ + test('passes subgraph to `cleanRunData`', async () => { + // ARRANGE + const waitPromise = createDeferredPromise(); + const nodeExecutionOrder: string[] = []; + const additionalData = Helpers.WorkflowExecuteAdditionalData(waitPromise, nodeExecutionOrder); + const workflowExecute = new WorkflowExecute(additionalData, 'manual'); + + const trigger = createNodeData({ name: 'trigger', type: 'n8n-nodes-base.manualTrigger' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const workflow = new DirectedGraph() + .addNodes(trigger, node1, node2) + .addConnections({ from: trigger, to: node1 }, { from: trigger, to: node2 }) + .toWorkflow({ name: '', active: false, nodeTypes }); + + const pinData: IPinData = {}; + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { value: 1 } }])], + [node1.name]: [toITaskData([{ data: { nodeName: node1.name } }])], + [node2.name]: [toITaskData([{ data: { nodeName: node2.name } }])], + }; + const dirtyNodeNames: string[] = []; + + jest.spyOn(workflowExecute, 'processRunExecutionData').mockImplementationOnce(jest.fn()); + const cleanRunDataSpy = jest.spyOn(partialExecutionUtils, 'cleanRunData'); + + // ACT + await workflowExecute.runPartialWorkflow2( + workflow, + runData, + pinData, + dirtyNodeNames, + node1.name, + ); + + // ASSERT + expect(cleanRunDataSpy).toHaveBeenNthCalledWith( + 1, + runData, + new DirectedGraph().addNodes(trigger, node1).addConnections({ from: trigger, to: node1 }), + new Set([node1]), + ); + }); + }); + + describe('checkReadyForExecution', () => { + const disabledNode = mock({ name: 'Disabled Node', disabled: true }); + const startNode = mock({ name: 'Start Node' }); + const unknownNode = mock({ name: 'Unknown Node', type: 'unknownNode' }); + + const nodeParamIssuesSpy = jest.spyOn(NodeHelpers, 'getNodeParametersIssues'); + + const nodeTypes = mock(); + nodeTypes.getByNameAndVersion.mockImplementation((type) => { + // TODO: getByNameAndVersion signature needs to be updated to allow returning undefined + if (type === 'unknownNode') return undefined as unknown as INodeType; + return mock({ + description: { + properties: [], + }, + }); + }); + const workflowExecute = new WorkflowExecute(mock(), 'manual'); + + beforeEach(() => jest.clearAllMocks()); + + it('should return null if there are no nodes', () => { + const workflow = new Workflow({ + nodes: [], + connections: {}, + active: false, + nodeTypes, + }); + + const issues = workflowExecute.checkReadyForExecution(workflow); + expect(issues).toBe(null); + expect(nodeTypes.getByNameAndVersion).not.toHaveBeenCalled(); + expect(nodeParamIssuesSpy).not.toHaveBeenCalled(); + }); + + it('should return null if there are no enabled nodes', () => { + const workflow = new Workflow({ + nodes: [disabledNode], + connections: {}, + active: false, + nodeTypes, + }); + + const issues = workflowExecute.checkReadyForExecution(workflow, { + startNode: disabledNode.name, + }); + expect(issues).toBe(null); + expect(nodeTypes.getByNameAndVersion).toHaveBeenCalledTimes(1); + expect(nodeParamIssuesSpy).not.toHaveBeenCalled(); + }); + + it('should return typeUnknown for unknown nodes', () => { + const workflow = new Workflow({ + nodes: [unknownNode], + connections: {}, + active: false, + nodeTypes, + }); + + const issues = workflowExecute.checkReadyForExecution(workflow, { + startNode: unknownNode.name, + }); + expect(issues).toEqual({ [unknownNode.name]: { typeUnknown: true } }); + expect(nodeTypes.getByNameAndVersion).toHaveBeenCalledTimes(2); + expect(nodeParamIssuesSpy).not.toHaveBeenCalled(); + }); + + it('should return issues for regular nodes', () => { + const workflow = new Workflow({ + nodes: [startNode], + connections: {}, + active: false, + nodeTypes, + }); + nodeParamIssuesSpy.mockReturnValue({ execution: false }); + + const issues = workflowExecute.checkReadyForExecution(workflow, { + startNode: startNode.name, + }); + expect(issues).toEqual({ [startNode.name]: { execution: false } }); + expect(nodeTypes.getByNameAndVersion).toHaveBeenCalledTimes(2); + expect(nodeParamIssuesSpy).toHaveBeenCalled(); + }); + }); + + describe('runNode', () => { + const nodeTypes = mock(); + const triggerNode = mock(); + const triggerResponse = mock({ + closeFunction: jest.fn(), + // This node should never trigger, or return + manualTriggerFunction: async () => await new Promise(() => {}), + }); + const triggerNodeType = mock({ + description: { + properties: [], + }, + execute: undefined, + poll: undefined, + webhook: undefined, + async trigger() { + return triggerResponse; + }, + }); + + nodeTypes.getByNameAndVersion.mockReturnValue(triggerNodeType); + + const workflow = new Workflow({ + nodeTypes, + nodes: [triggerNode], + connections: {}, + active: false, + }); + + const executionData = mock(); + const runExecutionData = mock(); + const additionalData = mock(); + const abortController = new AbortController(); + const workflowExecute = new WorkflowExecute(additionalData, 'manual'); + + test('should call closeFunction when manual trigger is aborted', async () => { + const runPromise = workflowExecute.runNode( + workflow, + executionData, + runExecutionData, + 0, + additionalData, + 'manual', + abortController.signal, + ); + // Yield back to the event-loop to let async parts of `runNode` execute + await new Promise((resolve) => setImmediate(resolve)); + + let isSettled = false; + void runPromise.then(() => { + isSettled = true; + }); + expect(isSettled).toBe(false); + expect(abortController.signal.aborted).toBe(false); + expect(triggerResponse.closeFunction).not.toHaveBeenCalled(); + + abortController.abort(); + expect(triggerResponse.closeFunction).toHaveBeenCalled(); + }); }); }); diff --git a/packages/core/test/error-reporter.test.ts b/packages/core/test/error-reporter.test.ts new file mode 100644 index 0000000000..1f507ab5c0 --- /dev/null +++ b/packages/core/test/error-reporter.test.ts @@ -0,0 +1,103 @@ +import { QueryFailedError } from '@n8n/typeorm'; +import type { ErrorEvent } from '@sentry/types'; +import { AxiosError } from 'axios'; +import { ApplicationError } from 'n8n-workflow'; + +import { ErrorReporter } from '@/error-reporter'; + +jest.mock('@sentry/node', () => ({ + init: jest.fn(), + setTag: jest.fn(), + captureException: jest.fn(), + Integrations: {}, +})); + +jest.spyOn(process, 'on'); + +describe('ErrorReporter', () => { + const errorReporter = new ErrorReporter(); + const event = {} as ErrorEvent; + + describe('beforeSend', () => { + it('should ignore errors with level warning', async () => { + const originalException = new ApplicationError('test'); + originalException.level = 'warning'; + + expect(await errorReporter.beforeSend(event, { originalException })).toEqual(null); + }); + + it('should keep events with a cause with error level', async () => { + const cause = new Error('cause-error'); + const originalException = new ApplicationError('test', cause); + + expect(await errorReporter.beforeSend(event, { originalException })).toEqual(event); + }); + + it('should ignore events with error cause with warning level', async () => { + const cause: Error & { level?: 'warning' } = new Error('cause-error'); + cause.level = 'warning'; + const originalException = new ApplicationError('test', cause); + + expect(await errorReporter.beforeSend(event, { originalException })).toEqual(null); + }); + + it('should set level, extra, and tags from ApplicationError', async () => { + const originalException = new ApplicationError('Test error', { + level: 'error', + extra: { foo: 'bar' }, + tags: { tag1: 'value1' }, + }); + + const testEvent = {} as ErrorEvent; + + const result = await errorReporter.beforeSend(testEvent, { originalException }); + + expect(result).toEqual({ + level: 'error', + extra: { foo: 'bar' }, + tags: { tag1: 'value1' }, + }); + }); + + it('should deduplicate errors with same stack trace', async () => { + const originalException = new Error(); + + const firstResult = await errorReporter.beforeSend(event, { originalException }); + expect(firstResult).toEqual(event); + + const secondResult = await errorReporter.beforeSend(event, { originalException }); + expect(secondResult).toBeNull(); + }); + + it('should handle Promise rejections', async () => { + const originalException = Promise.reject(new Error()); + + const result = await errorReporter.beforeSend(event, { originalException }); + + expect(result).toEqual(event); + }); + + test.each([ + ['undefined', undefined], + ['null', null], + ['an AxiosError', new AxiosError()], + ['a rejected Promise with AxiosError', Promise.reject(new AxiosError())], + [ + 'a QueryFailedError with SQLITE_FULL', + new QueryFailedError('', [], new Error('SQLITE_FULL')), + ], + [ + 'a QueryFailedError with SQLITE_IOERR', + new QueryFailedError('', [], new Error('SQLITE_IOERR')), + ], + ['an ApplicationError with "warning" level', new ApplicationError('', { level: 'warning' })], + [ + 'an Error with ApplicationError as cause with "warning" level', + new Error('', { cause: new ApplicationError('', { level: 'warning' }) }), + ], + ])('should ignore if originalException is %s', async (_, originalException) => { + const result = await errorReporter.beforeSend(event, { originalException }); + expect(result).toBeNull(); + }); + }); +}); diff --git a/packages/core/test/helpers/constants.ts b/packages/core/test/helpers/constants.ts index 3043f455e9..e67caadd1e 100644 --- a/packages/core/test/helpers/constants.ts +++ b/packages/core/test/helpers/constants.ts @@ -11,6 +11,7 @@ import { ManualTrigger } from '../../../nodes-base/dist/nodes/ManualTrigger/Manu import { Merge } from '../../../nodes-base/dist/nodes/Merge/Merge.node'; import { NoOp } from '../../../nodes-base/dist/nodes/NoOp/NoOp.node'; import { Set } from '../../../nodes-base/dist/nodes/Set/Set.node'; +import { SplitInBatches } from '../../../nodes-base/dist/nodes/SplitInBatches/SplitInBatches.node'; import { Start } from '../../../nodes-base/dist/nodes/Start/Start.node'; export const predefinedNodesTypes: INodeTypeData = { @@ -38,6 +39,10 @@ export const predefinedNodesTypes: INodeTypeData = { type: new ManualTrigger(), sourcePath: '', }, + 'n8n-nodes-base.splitInBatches': { + type: new SplitInBatches(), + sourcePath: '', + }, 'n8n-nodes-base.versionTest': { sourcePath: '', type: { @@ -97,6 +102,89 @@ export const predefinedNodesTypes: INodeTypeData = { }, }, }, + 'test.set': { + sourcePath: '', + type: { + description: { + displayName: 'Set', + name: 'set', + group: ['input'], + version: 1, + description: 'Sets a value', + defaults: { + name: 'Set', + color: '#0000FF', + }, + inputs: [NodeConnectionType.Main], + outputs: [NodeConnectionType.Main], + properties: [ + { + displayName: 'Value1', + name: 'value1', + type: 'string', + default: 'default-value1', + }, + { + displayName: 'Value2', + name: 'value2', + type: 'string', + default: 'default-value2', + }, + ], + }, + }, + }, + 'test.setMulti': { + sourcePath: '', + type: { + description: { + displayName: 'Set Multi', + name: 'setMulti', + group: ['input'], + version: 1, + description: 'Sets multiple values', + defaults: { + name: 'Set Multi', + color: '#0000FF', + }, + inputs: [NodeConnectionType.Main], + outputs: [NodeConnectionType.Main], + properties: [ + { + displayName: 'Values', + name: 'values', + type: 'fixedCollection', + typeOptions: { + multipleValues: true, + }, + default: {}, + options: [ + { + name: 'string', + displayName: 'String', + values: [ + { + displayName: 'Name', + name: 'name', + type: 'string', + default: 'propertyName', + placeholder: 'Name of the property to write data to.', + }, + { + displayName: 'Value', + name: 'value', + type: 'string', + default: '', + placeholder: 'The string value to write in the property.', + }, + ], + }, + ], + }, + ], + }, + }, + }, }; export const legacyWorkflowExecuteTests: WorkflowTestData[] = [ diff --git a/packages/core/test/helpers/index.ts b/packages/core/test/helpers/index.ts index 5f0858ea41..14f19789b0 100644 --- a/packages/core/test/helpers/index.ts +++ b/packages/core/test/helpers/index.ts @@ -17,12 +17,14 @@ import type { import { ApplicationError, NodeHelpers, WorkflowHooks } from 'n8n-workflow'; import path from 'path'; +import { UnrecognizedNodeTypeError } from '@/errors'; + import { predefinedNodesTypes } from './constants'; const BASE_DIR = path.resolve(__dirname, '../../..'); class NodeTypesClass implements INodeTypes { - constructor(private nodeTypes: INodeTypeData = predefinedNodesTypes) {} + constructor(private nodeTypes: INodeTypeData) {} getByName(nodeType: string): INodeType | IVersionedNodeType { return this.nodeTypes[nodeType].type; @@ -39,7 +41,7 @@ class NodeTypesClass implements INodeTypes { let nodeTypesInstance: NodeTypesClass | undefined; -export function NodeTypes(nodeTypes?: INodeTypeData): INodeTypes { +export function NodeTypes(nodeTypes: INodeTypeData = predefinedNodesTypes): INodeTypes { if (nodeTypesInstance === undefined || nodeTypes !== undefined) { nodeTypesInstance = new NodeTypesClass(nodeTypes); } @@ -102,12 +104,9 @@ export function getNodeTypes(testData: WorkflowTestData[] | WorkflowTestData) { ); for (const nodeName of nodeNames) { - if (!nodeName.startsWith('n8n-nodes-base.')) { - throw new ApplicationError('Unknown node type', { tags: { nodeType: nodeName } }); - } const loadInfo = knownNodes[nodeName.replace('n8n-nodes-base.', '')]; if (!loadInfo) { - throw new ApplicationError('Unknown node type', { tags: { nodeType: nodeName } }); + throw new UnrecognizedNodeTypeError('n8n-nodes-base', nodeName); } const sourcePath = loadInfo.sourcePath.replace(/^dist\//, './').replace(/\.js$/, '.ts'); const nodeSourcePath = path.join(BASE_DIR, 'nodes-base', sourcePath); diff --git a/packages/design-system/package.json b/packages/design-system/package.json index 9af2fa8204..47e5a2d13e 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "1.61.0", + "version": "1.62.0", "main": "src/main.ts", "import": "src/main.ts", "scripts": { diff --git a/packages/design-system/src/components/N8nMarkdown/Markdown.vue b/packages/design-system/src/components/N8nMarkdown/Markdown.vue index 1074d584bf..fa65a8cdc6 100644 --- a/packages/design-system/src/components/N8nMarkdown/Markdown.vue +++ b/packages/design-system/src/components/N8nMarkdown/Markdown.vue @@ -251,17 +251,9 @@ const onCheckboxChange = (index: number) => { } } - pre { - margin-bottom: var(--spacing-s); - display: grid; - } - pre > code { - display: block; - padding: var(--spacing-s); - color: var(--color-text-dark); background-color: var(--color-background-base); - overflow-x: auto; + color: var(--color-text-dark); } li > code, @@ -356,9 +348,8 @@ input[type='checkbox'] + label { } } - code { + pre > code { background-color: var(--color-sticky-code-background); - padding: 0 var(--spacing-4xs); color: var(--color-sticky-code-font); } @@ -385,6 +376,20 @@ input[type='checkbox'] + label { } } +.sticky, +.markdown { + pre { + margin-bottom: var(--spacing-s); + display: grid; + } + + pre > code { + display: block; + padding: var(--spacing-s); + overflow-x: auto; + } +} + .spacer { margin: var(--spacing-2xl); } diff --git a/packages/design-system/src/css/_tokens.scss b/packages/design-system/src/css/_tokens.scss index 87951534ee..b8f3049cf2 100644 --- a/packages/design-system/src/css/_tokens.scss +++ b/packages/design-system/src/css/_tokens.scss @@ -588,6 +588,7 @@ --border-width-base: 1px; --border-base: var(--border-width-base) var(--border-style-base) var(--color-foreground-base); + --font-size-4xs: 0.5rem; --font-size-3xs: 0.625rem; --font-size-2xs: 0.75rem; --font-size-xs: 0.8125rem; diff --git a/packages/editor-ui/.browserslistrc b/packages/editor-ui/.browserslistrc index 9dee646463..525ded22c2 100644 --- a/packages/editor-ui/.browserslistrc +++ b/packages/editor-ui/.browserslistrc @@ -1,3 +1,3 @@ -> 1% -last 2 versions -not ie <= 8 +defaults +cover 95% in US +cover 95% in alt-EU and last 2 years and not dead and fully supports es6-module and fully supports es6-module-dynamic-import diff --git a/packages/editor-ui/package.json b/packages/editor-ui/package.json index bccb5506b8..3cf113d98f 100644 --- a/packages/editor-ui/package.json +++ b/packages/editor-ui/package.json @@ -1,6 +1,6 @@ { "name": "n8n-editor-ui", - "version": "1.71.0", + "version": "1.72.0", "description": "Workflow Editor UI for n8n", "main": "index.js", "scripts": { @@ -40,9 +40,9 @@ "@n8n/codemirror-lang-sql": "^1.0.2", "@n8n/permissions": "workspace:*", "@sentry/vue": "catalog:frontend", - "@vue-flow/background": "^1.3.1", + "@vue-flow/background": "^1.3.2", "@vue-flow/controls": "^1.1.2", - "@vue-flow/core": "^1.41.4", + "@vue-flow/core": "^1.41.6", "@vue-flow/minimap": "^1.5.0", "@vue-flow/node-resizer": "^1.4.0", "@vueuse/components": "^10.11.0", @@ -95,12 +95,15 @@ "@types/lodash-es": "^4.17.6", "@types/luxon": "^3.2.0", "@types/uuid": "catalog:", + "@vitejs/plugin-legacy": "^6.0.0", "@vitejs/plugin-vue": "catalog:frontend", "@vitest/coverage-v8": "catalog:frontend", + "browserslist-to-esbuild": "^2.1.1", "miragejs": "^0.1.48", "unplugin-icons": "^0.19.0", "unplugin-vue-components": "^0.27.2", "vite": "catalog:frontend", + "vite-svg-loader": "5.1.0", "vitest": "catalog:frontend", "vitest-mock-extended": "catalog:frontend", "vue-tsc": "catalog:frontend" diff --git a/packages/editor-ui/public/static/logo/channel/beta-dark.svg b/packages/editor-ui/public/static/logo/channel/beta-dark.svg deleted file mode 100644 index 7253fbe02b..0000000000 --- a/packages/editor-ui/public/static/logo/channel/beta-dark.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/public/static/logo/channel/beta.svg b/packages/editor-ui/public/static/logo/channel/beta.svg deleted file mode 100644 index d4fb970c62..0000000000 --- a/packages/editor-ui/public/static/logo/channel/beta.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/public/static/logo/channel/dev-dark.svg b/packages/editor-ui/public/static/logo/channel/dev-dark.svg deleted file mode 100644 index 23c8b05711..0000000000 --- a/packages/editor-ui/public/static/logo/channel/dev-dark.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/public/static/logo/channel/dev.svg b/packages/editor-ui/public/static/logo/channel/dev.svg deleted file mode 100644 index f8f2b2d437..0000000000 --- a/packages/editor-ui/public/static/logo/channel/dev.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/public/static/logo/channel/nightly-dark.svg b/packages/editor-ui/public/static/logo/channel/nightly-dark.svg deleted file mode 100644 index a1b5661dcb..0000000000 --- a/packages/editor-ui/public/static/logo/channel/nightly-dark.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/public/static/logo/channel/nightly.svg b/packages/editor-ui/public/static/logo/channel/nightly.svg deleted file mode 100644 index bb4746df56..0000000000 --- a/packages/editor-ui/public/static/logo/channel/nightly.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/public/static/logo/collapsed.svg b/packages/editor-ui/public/static/logo/collapsed.svg deleted file mode 100644 index f65407694a..0000000000 --- a/packages/editor-ui/public/static/logo/collapsed.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/public/static/logo/expanded-dark.svg b/packages/editor-ui/public/static/logo/expanded-dark.svg deleted file mode 100644 index d9a71faffe..0000000000 --- a/packages/editor-ui/public/static/logo/expanded-dark.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/public/static/logo/expanded.svg b/packages/editor-ui/public/static/logo/expanded.svg deleted file mode 100644 index 337b52be1c..0000000000 --- a/packages/editor-ui/public/static/logo/expanded.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/editor-ui/src/Interface.ts b/packages/editor-ui/src/Interface.ts index ea4e0122e6..a2a1e8e065 100644 --- a/packages/editor-ui/src/Interface.ts +++ b/packages/editor-ui/src/Interface.ts @@ -59,7 +59,7 @@ import type { ROLE, } from '@/constants'; import type { BulkCommand, Undoable } from '@/models/history'; -import type { PartialBy, TupleToUnion } from '@/utils/typeHelpers'; +import type { PartialBy } from '@/utils/typeHelpers'; import type { ProjectSharingData } from '@/types/projects.types'; @@ -249,6 +249,16 @@ export interface IWorkflowDataCreate extends IWorkflowDataUpdate { projectId?: string; } +/** + * Workflow data with mandatory `templateId` + * This is used to identify sample workflows that we create for onboarding + */ +export interface WorkflowDataWithTemplateId extends Omit { + meta: WorkflowMetadata & { + templateId: Required['templateId']; + }; +} + export interface IWorkflowToShare extends IWorkflowDataUpdate { meta: WorkflowMetadata; } @@ -1361,51 +1371,6 @@ export type SamlPreferencesExtractedData = { returnUrl: string; }; -export type SshKeyTypes = ['ed25519', 'rsa']; - -export type SourceControlPreferences = { - connected: boolean; - repositoryUrl: string; - branchName: string; - branches: string[]; - branchReadOnly: boolean; - branchColor: string; - publicKey?: string; - keyGeneratorType?: TupleToUnion; - currentBranch?: string; -}; - -export interface SourceControlStatus { - ahead: number; - behind: number; - conflicted: string[]; - created: string[]; - current: string; - deleted: string[]; - detached: boolean; - files: Array<{ - path: string; - index: string; - working_dir: string; - }>; - modified: string[]; - not_added: string[]; - renamed: string[]; - staged: string[]; - tracking: null; -} - -export interface SourceControlAggregatedFile { - conflict: boolean; - file: string; - id: string; - location: string; - name: string; - status: string; - type: string; - updatedAt?: string; -} - export declare namespace Cloud { export interface PlanData { planId: number; diff --git a/packages/editor-ui/src/__tests__/defaults.ts b/packages/editor-ui/src/__tests__/defaults.ts index dff54f420f..3771840d6a 100644 --- a/packages/editor-ui/src/__tests__/defaults.ts +++ b/packages/editor-ui/src/__tests__/defaults.ts @@ -126,5 +126,5 @@ export const defaultSettings: FrontendSettings = { enabled: false, }, betaFeatures: [], - virtualSchemaView: false, + easyAIWorkflowOnboarded: false, }; diff --git a/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts b/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts index 46f86fa693..653fbc3b51 100644 --- a/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts +++ b/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts @@ -2,7 +2,7 @@ import type { Server, Request } from 'miragejs'; import { Response } from 'miragejs'; import { jsonParse } from 'n8n-workflow'; import type { AppSchema } from '@/__tests__/server/types'; -import type { SourceControlPreferences } from '@/Interface'; +import type { SourceControlPreferences } from '@/types/sourceControl.types'; export function routesForSourceControl(server: Server) { const sourceControlApiRoot = '/rest/source-control'; diff --git a/packages/editor-ui/src/api/sourceControl.ts b/packages/editor-ui/src/api/sourceControl.ts index 615c7d6660..c8e4eebcbc 100644 --- a/packages/editor-ui/src/api/sourceControl.ts +++ b/packages/editor-ui/src/api/sourceControl.ts @@ -1,11 +1,12 @@ import type { IDataObject } from 'n8n-workflow'; +import type { IRestApiContext } from '@/Interface'; import type { - IRestApiContext, SourceControlAggregatedFile, SourceControlPreferences, SourceControlStatus, SshKeyTypes, -} from '@/Interface'; +} from '@/types/sourceControl.types'; + import { makeRestApiRequest } from '@/utils/apiUtils'; import type { TupleToUnion } from '@/utils/typeHelpers'; diff --git a/packages/editor-ui/src/components/ButtonParameter/utils.test.ts b/packages/editor-ui/src/components/ButtonParameter/utils.test.ts index df7e13d477..7453d41fb5 100644 --- a/packages/editor-ui/src/components/ButtonParameter/utils.test.ts +++ b/packages/editor-ui/src/components/ButtonParameter/utils.test.ts @@ -1,8 +1,10 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { generateCodeForAiTransform } from './utils'; +import { generateCodeForAiTransform, reducePayloadSizeOrThrow } from './utils'; import { createPinia, setActivePinia } from 'pinia'; import { generateCodeForPrompt } from '@/api/ai'; +import type { AskAiRequest } from '@/types/assistant.types'; +import type { Schema } from '@/Interface'; vi.mock('./utils', async () => { const actual = await vi.importActual('./utils'); @@ -86,3 +88,69 @@ describe('generateCodeForAiTransform - Retry Tests', () => { expect(generateCodeForPrompt).toHaveBeenCalledTimes(1); }); }); + +const mockPayload = () => + ({ + context: { + schema: [ + { nodeName: 'node1', data: 'some data' }, + { nodeName: 'node2', data: 'other data' }, + ], + inputSchema: { + schema: { + value: [ + { key: 'prop1', value: 'value1' }, + { key: 'prop2', value: 'value2' }, + ], + }, + }, + }, + question: 'What is node1 and prop1?', + }) as unknown as AskAiRequest.RequestPayload; + +describe('reducePayloadSizeOrThrow', () => { + it('reduces schema size when tokens exceed the limit', () => { + const payload = mockPayload(); + const error = new Error('Limit is 100 tokens, but 104 were provided'); + + reducePayloadSizeOrThrow(payload, error); + + expect(payload.context.schema.length).toBe(1); + expect(payload.context.schema[0]).toEqual({ nodeName: 'node1', data: 'some data' }); + }); + + it('removes unreferenced properties in input schema', () => { + const payload = mockPayload(); + const error = new Error('Limit is 100 tokens, but 150 were provided'); + + reducePayloadSizeOrThrow(payload, error); + + expect(payload.context.inputSchema.schema.value.length).toBe(1); + expect((payload.context.inputSchema.schema.value as Schema[])[0].key).toBe('prop1'); + }); + + it('removes all parent nodes if needed', () => { + const payload = mockPayload(); + const error = new Error('Limit is 100 tokens, but 150 were provided'); + + payload.question = ''; + + reducePayloadSizeOrThrow(payload, error); + + expect(payload.context.schema.length).toBe(0); + }); + + it('throws error if tokens still exceed after reductions', () => { + const payload = mockPayload(); + const error = new Error('Limit is 100 tokens, but 200 were provided'); + + expect(() => reducePayloadSizeOrThrow(payload, error)).toThrowError(error); + }); + + it('throws error if message format is invalid', () => { + const payload = mockPayload(); + const error = new Error('Invalid token message format'); + + expect(() => reducePayloadSizeOrThrow(payload, error)).toThrowError(error); + }); +}); diff --git a/packages/editor-ui/src/components/ButtonParameter/utils.ts b/packages/editor-ui/src/components/ButtonParameter/utils.ts index b95846975f..1044477bbb 100644 --- a/packages/editor-ui/src/components/ButtonParameter/utils.ts +++ b/packages/editor-ui/src/components/ButtonParameter/utils.ts @@ -57,6 +57,134 @@ export function getSchemas() { }; } +//------ Reduce payload ------ + +const estimateNumberOfTokens = (item: unknown, averageTokenLength: number): number => { + if (typeof item === 'object') { + return Math.ceil(JSON.stringify(item).length / averageTokenLength); + } + + return 0; +}; + +const calculateRemainingTokens = (error: Error) => { + // Expected message format: + //'This model's maximum context length is 8192 tokens. However, your messages resulted in 10514 tokens.' + const tokens = error.message.match(/\d+/g); + + if (!tokens || tokens.length < 2) throw error; + + const maxTokens = parseInt(tokens[0], 10); + const currentTokens = parseInt(tokens[1], 10); + + return currentTokens - maxTokens; +}; + +const trimParentNodesSchema = ( + payload: AskAiRequest.RequestPayload, + remainingTokensToReduce: number, + averageTokenLength: number, +) => { + //check if parent nodes schema takes more tokens than available + let parentNodesTokenCount = estimateNumberOfTokens(payload.context.schema, averageTokenLength); + + if (remainingTokensToReduce > parentNodesTokenCount) { + remainingTokensToReduce -= parentNodesTokenCount; + payload.context.schema = []; + } + + //remove parent nodes not referenced in the prompt + if (payload.context.schema.length) { + const nodes = [...payload.context.schema]; + + for (let nodeIndex = 0; nodeIndex < nodes.length; nodeIndex++) { + if (payload.question.includes(nodes[nodeIndex].nodeName)) continue; + + const nodeTokens = estimateNumberOfTokens(nodes[nodeIndex], averageTokenLength); + remainingTokensToReduce -= nodeTokens; + parentNodesTokenCount -= nodeTokens; + payload.context.schema.splice(nodeIndex, 1); + + if (remainingTokensToReduce <= 0) break; + } + } + + return [remainingTokensToReduce, parentNodesTokenCount]; +}; + +const trimInputSchemaProperties = ( + payload: AskAiRequest.RequestPayload, + remainingTokensToReduce: number, + averageTokenLength: number, + parentNodesTokenCount: number, +) => { + if (remainingTokensToReduce <= 0) return remainingTokensToReduce; + + //remove properties not referenced in the prompt from the input schema + if (Array.isArray(payload.context.inputSchema.schema.value)) { + const props = [...payload.context.inputSchema.schema.value]; + + for (let index = 0; index < props.length; index++) { + const key = props[index].key; + + if (key && payload.question.includes(key)) continue; + + const propTokens = estimateNumberOfTokens(props[index], averageTokenLength); + remainingTokensToReduce -= propTokens; + payload.context.inputSchema.schema.value.splice(index, 1); + + if (remainingTokensToReduce <= 0) break; + } + } + + //if tokensToReduce is still remaining, remove all parent nodes + if (remainingTokensToReduce > 0) { + payload.context.schema = []; + remainingTokensToReduce -= parentNodesTokenCount; + } + + return remainingTokensToReduce; +}; + +/** + * Attempts to reduce the size of the payload to fit within token limits or throws an error if unsuccessful, + * payload would be modified in place + * + * @param {AskAiRequest.RequestPayload} payload - The request payload to be trimmed, + * 'schema' and 'inputSchema.schema' will be modified. + * @param {Error} error - The error to throw if the token reduction fails. + * @param {number} [averageTokenLength=4] - The average token length used for estimation. + * @throws {Error} - Throws the provided error if the payload cannot be reduced sufficiently. + */ +export function reducePayloadSizeOrThrow( + payload: AskAiRequest.RequestPayload, + error: Error, + averageTokenLength = 4, +) { + try { + let remainingTokensToReduce = calculateRemainingTokens(error); + + const [remaining, parentNodesTokenCount] = trimParentNodesSchema( + payload, + remainingTokensToReduce, + averageTokenLength, + ); + + remainingTokensToReduce = remaining; + + remainingTokensToReduce = trimInputSchemaProperties( + payload, + remainingTokensToReduce, + averageTokenLength, + parentNodesTokenCount, + ); + + if (remainingTokensToReduce > 0) throw error; + } catch (e) { + throw e; + } +} + export async function generateCodeForAiTransform(prompt: string, path: string, retries = 1) { const schemas = getSchemas(); @@ -83,6 +211,11 @@ export async function generateCodeForAiTransform(prompt: string, path: string, r code = generatedCode; break; } catch (e) { + if (e.message.includes('maximum context length')) { + reducePayloadSizeOrThrow(payload, e); + continue; + } + retries--; if (!retries) throw e; } diff --git a/packages/editor-ui/src/components/CredentialEdit/CredentialEdit.vue b/packages/editor-ui/src/components/CredentialEdit/CredentialEdit.vue index f1b3620a97..e85184a11c 100644 --- a/packages/editor-ui/src/components/CredentialEdit/CredentialEdit.vue +++ b/packages/editor-ui/src/components/CredentialEdit/CredentialEdit.vue @@ -759,7 +759,10 @@ const createToastMessagingForNewCredentials = ( toastText = i18n.baseText('credentials.create.personal.toast.text'); } - if (projectsStore.currentProject) { + if ( + projectsStore.currentProject && + projectsStore.currentProject.id !== projectsStore.personalProject?.id + ) { toastTitle = i18n.baseText('credentials.create.project.toast.title', { interpolate: { projectName: project?.name ?? '' }, }); diff --git a/packages/editor-ui/src/components/ExpressionEditModal.vue b/packages/editor-ui/src/components/ExpressionEditModal.vue index e1f7309a71..07dcee3c3a 100644 --- a/packages/editor-ui/src/components/ExpressionEditModal.vue +++ b/packages/editor-ui/src/components/ExpressionEditModal.vue @@ -14,7 +14,7 @@ import type { INodeProperties } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; import { outputTheme } from './ExpressionEditorModal/theme'; import ExpressionOutput from './InlineExpressionEditor/ExpressionOutput.vue'; -import RunDataSchema from './RunDataSchema.vue'; +import VirtualSchema from '@/components/VirtualSchema.vue'; import OutputItemSelect from './InlineExpressionEditor/OutputItemSelect.vue'; import { useI18n } from '@/composables/useI18n'; import { useDebounce } from '@/composables/useDebounce'; @@ -167,14 +167,13 @@ const onResizeThrottle = useThrottleFn(onResize, 10); - diff --git a/packages/editor-ui/src/components/Logo.vue b/packages/editor-ui/src/components/Logo.vue deleted file mode 100644 index 4909c77de7..0000000000 --- a/packages/editor-ui/src/components/Logo.vue +++ /dev/null @@ -1,22 +0,0 @@ - - - - - diff --git a/packages/editor-ui/src/components/Logo/Logo.vue b/packages/editor-ui/src/components/Logo/Logo.vue new file mode 100644 index 0000000000..f10c168e28 --- /dev/null +++ b/packages/editor-ui/src/components/Logo/Logo.vue @@ -0,0 +1,119 @@ + + + + + diff --git a/packages/editor-ui/src/components/Logo/__tests__/Logo.test.ts b/packages/editor-ui/src/components/Logo/__tests__/Logo.test.ts new file mode 100644 index 0000000000..49b8d1a22f --- /dev/null +++ b/packages/editor-ui/src/components/Logo/__tests__/Logo.test.ts @@ -0,0 +1,38 @@ +import { createComponentRenderer } from '@/__tests__/render'; +import Logo from '../Logo.vue'; + +vi.stubGlobal('URL', { + createObjectURL: vi.fn(), +}); + +describe('Logo', () => { + const renderComponent = createComponentRenderer(Logo); + + it('renders the logo for authView location', () => { + const wrapper = renderComponent({ + props: { location: 'authView', releaseChannel: 'stable' }, + }); + expect(wrapper.html()).toMatchSnapshot(); + }); + + it('renders the logo for sidebar location when sidebar is expanded', () => { + const wrapper = renderComponent({ + props: { location: 'sidebar', collapsed: false, releaseChannel: 'stable' }, + }); + expect(wrapper.html()).toMatchSnapshot(); + }); + + it('renders the logo for sidebar location when sidebar is collapsed', () => { + const wrapper = renderComponent({ + props: { location: 'sidebar', collapsed: true, releaseChannel: 'stable' }, + }); + expect(wrapper.html()).toMatchSnapshot(); + }); + + it('renders the releaseChannelTag for non-stable releaseChannel', async () => { + const wrapper = renderComponent({ + props: { location: 'authView', releaseChannel: 'dev' }, + }); + expect(wrapper.html()).toMatchSnapshot(); + }); +}); diff --git a/packages/editor-ui/src/components/Logo/__tests__/__snapshots__/Logo.test.ts.snap b/packages/editor-ui/src/components/Logo/__tests__/__snapshots__/Logo.test.ts.snap new file mode 100644 index 0000000000..5947e5d0a5 --- /dev/null +++ b/packages/editor-ui/src/components/Logo/__tests__/__snapshots__/Logo.test.ts.snap @@ -0,0 +1,49 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`Logo > renders the logo for authView location 1`] = ` +"
+ + + + + + +
" +`; + +exports[`Logo > renders the logo for sidebar location when sidebar is collapsed 1`] = ` +"" +`; + +exports[`Logo > renders the logo for sidebar location when sidebar is expanded 1`] = ` +"" +`; + +exports[`Logo > renders the releaseChannelTag for non-stable releaseChannel 1`] = ` +"
+ + + + + +
dev
+
" +`; diff --git a/packages/editor-ui/src/components/Logo/logo-icon.svg b/packages/editor-ui/src/components/Logo/logo-icon.svg new file mode 100644 index 0000000000..90a0081f12 --- /dev/null +++ b/packages/editor-ui/src/components/Logo/logo-icon.svg @@ -0,0 +1,3 @@ + + + diff --git a/packages/editor-ui/src/components/Logo/logo-text.svg b/packages/editor-ui/src/components/Logo/logo-text.svg new file mode 100644 index 0000000000..6b8c963894 --- /dev/null +++ b/packages/editor-ui/src/components/Logo/logo-text.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/packages/editor-ui/src/components/MainHeader/WorkflowDetails.vue b/packages/editor-ui/src/components/MainHeader/WorkflowDetails.vue index 6725bbf92d..89ff66a32e 100644 --- a/packages/editor-ui/src/components/MainHeader/WorkflowDetails.vue +++ b/packages/editor-ui/src/components/MainHeader/WorkflowDetails.vue @@ -605,7 +605,11 @@ function showCreateWorkflowSuccessToast(id?: string) { if (!id || ['new', PLACEHOLDER_EMPTY_WORKFLOW_ID].includes(id)) { let toastTitle = locale.baseText('workflows.create.personal.toast.title'); let toastText = locale.baseText('workflows.create.personal.toast.text'); - if (projectsStore.currentProject) { + + if ( + projectsStore.currentProject && + projectsStore.currentProject.id !== projectsStore.personalProject?.id + ) { toastTitle = locale.baseText('workflows.create.project.toast.title', { interpolate: { projectName: projectsStore.currentProject.name ?? '' }, }); diff --git a/packages/editor-ui/src/components/MainSidebar.vue b/packages/editor-ui/src/components/MainSidebar.vue index 9b294a85f1..40ace711e1 100644 --- a/packages/editor-ui/src/components/MainSidebar.vue +++ b/packages/editor-ui/src/components/MainSidebar.vue @@ -25,6 +25,7 @@ import { usePageRedirectionHelper } from '@/composables/usePageRedirectionHelper import { useGlobalEntityCreation } from '@/composables/useGlobalEntityCreation'; import { N8nNavigationDropdown } from 'n8n-design-system'; import { onClickOutside, type VueInstance } from '@vueuse/core'; +import Logo from './Logo/Logo.vue'; const becomeTemplateCreatorStore = useBecomeTemplateCreatorStore(); const cloudPlanStore = useCloudPlanStore(); @@ -165,10 +166,6 @@ const createBtn = ref>(); const isCollapsed = computed(() => uiStore.sidebarMenuCollapsed); -const logoPath = computed( - () => basePath.value + (isCollapsed.value ? 'static/logo/collapsed.svg' : uiStore.logo), -); - const hasVersionUpdates = computed( () => settingsStore.settings.releaseChannel === 'stable' && versionsStore.hasVersionUpdates, ); @@ -296,6 +293,7 @@ const { handleSelect: handleMenuSelect, createProjectAppendSlotName, projectsLimitReachedMessage, + upgradeLabel, } = useGlobalEntityCreation(); onClickOutside(createBtn as Ref, () => { createBtn.value?.close(); @@ -320,7 +318,11 @@ onClickOutside(createBtn as Ref, () => {
- n8n + , () => { type="tertiary" @click="handleMenuSelect(item.id)" > - {{ i18n.baseText('generic.upgrade') }} + {{ upgradeLabel }} @@ -450,15 +452,11 @@ onClickOutside(createBtn as Ref, () => { &.sideMenuCollapsed { width: $sidebar-width; - padding-top: 90px; + padding-top: 100px; .logo { flex-direction: column; - gap: 16px; - } - - .logo img { - left: 0; + gap: 12px; } } } diff --git a/packages/editor-ui/src/components/MainSidebarSourceControl.vue b/packages/editor-ui/src/components/MainSidebarSourceControl.vue index 1b792b4cd5..d00f5364fa 100644 --- a/packages/editor-ui/src/components/MainSidebarSourceControl.vue +++ b/packages/editor-ui/src/components/MainSidebarSourceControl.vue @@ -8,7 +8,7 @@ import { useLoadingService } from '@/composables/useLoadingService'; import { useUIStore } from '@/stores/ui.store'; import { useSourceControlStore } from '@/stores/sourceControl.store'; import { SOURCE_CONTROL_PULL_MODAL_KEY, SOURCE_CONTROL_PUSH_MODAL_KEY } from '@/constants'; -import type { SourceControlAggregatedFile } from '../Interface'; +import type { SourceControlAggregatedFile } from '@/types/sourceControl.types'; import { sourceControlEventBus } from '@/event-bus/source-control'; defineProps<{ diff --git a/packages/editor-ui/src/components/Node.vue b/packages/editor-ui/src/components/Node.vue index 1f1ea90ab5..f20724bd5f 100644 --- a/packages/editor-ui/src/components/Node.vue +++ b/packages/editor-ui/src/components/Node.vue @@ -9,7 +9,6 @@ import { SIMULATE_NODE_TYPE, SIMULATE_TRIGGER_NODE_TYPE, WAIT_NODE_TYPE, - WAIT_TIME_UNLIMITED, } from '@/constants'; import type { ExecutionSummary, @@ -18,7 +17,12 @@ import type { NodeOperationError, Workflow, } from 'n8n-workflow'; -import { NodeConnectionType, NodeHelpers, SEND_AND_WAIT_OPERATION } from 'n8n-workflow'; +import { + NodeConnectionType, + NodeHelpers, + SEND_AND_WAIT_OPERATION, + WAIT_INDEFINITELY, +} from 'n8n-workflow'; import type { StyleValue } from 'vue'; import { computed, onMounted, ref, watch } from 'vue'; import xss from 'xss'; @@ -345,7 +349,7 @@ const waiting = computed(() => { return i18n.baseText('node.theNodeIsWaitingFormCall'); } const waitDate = new Date(workflowExecution.waitTill); - if (waitDate.toISOString() === WAIT_TIME_UNLIMITED) { + if (waitDate.getTime() === WAIT_INDEFINITELY.getTime()) { return i18n.baseText('node.theNodeIsWaitingIndefinitelyForAnIncomingWebhookCall'); } return i18n.baseText('node.nodeIsWaitingTill', { diff --git a/packages/editor-ui/src/components/Node/NodeCreator/ItemTypes/NodeItem.vue b/packages/editor-ui/src/components/Node/NodeCreator/ItemTypes/NodeItem.vue index 9b4a8614d2..b982e4ecee 100644 --- a/packages/editor-ui/src/components/Node/NodeCreator/ItemTypes/NodeItem.vue +++ b/packages/editor-ui/src/components/Node/NodeCreator/ItemTypes/NodeItem.vue @@ -9,7 +9,6 @@ import { } from '@/constants'; import { isCommunityPackageName } from '@/utils/nodeTypesUtils'; -import { getNewNodePosition, NODE_SIZE } from '@/utils/nodeViewUtils'; import { useNodeCreatorStore } from '@/stores/nodeCreator.store'; import NodeIcon from '@/components/NodeIcon.vue'; @@ -93,15 +92,6 @@ const isTrigger = computed(() => { }); function onDragStart(event: DragEvent): void { - /** - * Workaround for firefox, that doesn't attach the pageX and pageY coordinates to "ondrag" event. - * All browsers attach the correct page coordinates to the "dragover" event. - * @bug https://bugzilla.mozilla.org/show_bug.cgi?id=505521 - */ - document.body.addEventListener('dragover', onDragOver); - - const { pageX: x, pageY: y } = event; - if (event.dataTransfer) { event.dataTransfer.effectAllowed = 'copy'; event.dataTransfer.dropEffect = 'copy'; @@ -113,22 +103,9 @@ function onDragStart(event: DragEvent): void { } dragging.value = true; - draggablePosition.value = { x, y }; -} - -function onDragOver(event: DragEvent): void { - if (!dragging.value || (event.pageX === 0 && event.pageY === 0)) { - return; - } - - const [x, y] = getNewNodePosition([], [event.pageX - NODE_SIZE / 2, event.pageY - NODE_SIZE / 2]); - - draggablePosition.value = { x, y }; } function onDragEnd(): void { - document.body.removeEventListener('dragover', onDragOver); - dragging.value = false; setTimeout(() => { draggablePosition.value = { x: -100, y: -100 }; @@ -144,7 +121,7 @@ function onCommunityNodeTooltipClick(event: MouseEvent) { - + diff --git a/packages/editor-ui/src/components/Projects/ProjectHeader.test.ts b/packages/editor-ui/src/components/Projects/ProjectHeader.test.ts index 21a4c8c52f..09b652dab9 100644 --- a/packages/editor-ui/src/components/Projects/ProjectHeader.test.ts +++ b/packages/editor-ui/src/components/Projects/ProjectHeader.test.ts @@ -1,5 +1,4 @@ import { createTestingPinia } from '@pinia/testing'; -import { within } from '@testing-library/dom'; import { createComponentRenderer } from '@/__tests__/render'; import { mockedStore } from '@/__tests__/utils'; import { createTestProject } from '@/__tests__/data/projects'; @@ -10,13 +9,19 @@ import { useProjectsStore } from '@/stores/projects.store'; import type { Project } from '@/types/projects.types'; import { ProjectTypes } from '@/types/projects.types'; import { VIEWS } from '@/constants'; +import userEvent from '@testing-library/user-event'; +import { waitFor, within } from '@testing-library/vue'; +const mockPush = vi.fn(); vi.mock('vue-router', async () => { const actual = await vi.importActual('vue-router'); const params = {}; const location = {}; return { ...actual, + useRouter: () => ({ + push: mockPush, + }), useRoute: () => ({ params, location, @@ -32,7 +37,6 @@ const renderComponent = createComponentRenderer(ProjectHeader, { global: { stubs: { ProjectTabs: projectTabsSpy, - N8nNavigationDropdown: true, }, }, }); @@ -143,23 +147,45 @@ describe('ProjectHeader', () => { ); }); - test.each([ - [null, 'Create'], - [createTestProject({ type: ProjectTypes.Personal }), 'Create in personal'], - [createTestProject({ type: ProjectTypes.Team }), 'Create in project'], - ])('in project %s should render correct create button label %s', (project, label) => { - projectsStore.currentProject = project; - const { getByTestId } = renderComponent({ - global: { - stubs: { - N8nNavigationDropdown: { - template: '
', - }, - }, - }, + it('should create a workflow', async () => { + const project = createTestProject({ + scopes: ['workflow:create'], }); + projectsStore.currentProject = project; - expect(within(getByTestId('resource-add')).getByRole('button', { name: label })).toBeVisible(); + const { getByTestId } = renderComponent(); + + await userEvent.click(getByTestId('add-resource-workflow')); + + expect(mockPush).toHaveBeenCalledWith({ + name: VIEWS.NEW_WORKFLOW, + query: { projectId: project.id }, + }); + }); + + describe('dropdown', () => { + it('should create a credential', async () => { + const project = createTestProject({ + scopes: ['credential:create'], + }); + projectsStore.currentProject = project; + + const { getByTestId } = renderComponent(); + + await userEvent.click(within(getByTestId('add-resource')).getByRole('button')); + + await waitFor(() => expect(getByTestId('action-credential')).toBeVisible()); + + await userEvent.click(getByTestId('action-credential')); + + expect(mockPush).toHaveBeenCalledWith({ + name: VIEWS.PROJECTS_CREDENTIALS, + params: { + projectId: project.id, + credentialId: 'create', + }, + }); + }); }); it('should not render creation button in setting page', async () => { @@ -167,15 +193,7 @@ describe('ProjectHeader', () => { vi.spyOn(router, 'useRoute').mockReturnValueOnce({ name: VIEWS.PROJECT_SETTINGS, } as RouteLocationNormalizedLoadedGeneric); - const { queryByTestId } = renderComponent({ - global: { - stubs: { - N8nNavigationDropdown: { - template: '
', - }, - }, - }, - }); - expect(queryByTestId('resource-add')).not.toBeInTheDocument(); + const { queryByTestId } = renderComponent(); + expect(queryByTestId('add-resource-buttons')).not.toBeInTheDocument(); }); }); diff --git a/packages/editor-ui/src/components/Projects/ProjectHeader.vue b/packages/editor-ui/src/components/Projects/ProjectHeader.vue index 977abe7393..8ef45239fc 100644 --- a/packages/editor-ui/src/components/Projects/ProjectHeader.vue +++ b/packages/editor-ui/src/components/Projects/ProjectHeader.vue @@ -1,21 +1,21 @@