Merge branch 'master' of github.com:n8n-io/n8n into feature-sub-workflow-inputs

This commit is contained in:
Ivan Atanasov 2024-12-05 11:32:50 +01:00
commit 8e374b4394
No known key found for this signature in database
219 changed files with 7017 additions and 1794 deletions

View file

@ -4,18 +4,70 @@ on:
schedule:
- cron: '0 2 * * *'
workflow_dispatch:
pull_request:
paths:
- packages/core/package.json
- packages/nodes-base/package.json
- packages/@n8n/nodes-langchain/package.json
- .github/workflows/test-workflows.yml
pull_request_review:
types: [submitted]
jobs:
run-test-workflows:
build:
name: Install & Build
runs-on: ubuntu-latest
timeout-minutes: 30
if: github.event_name != 'pull_request_review' || startsWith(github.event.pull_request.base.ref, 'release/')
steps:
- name: Checkout
uses: actions/checkout@v4.1.1
- uses: actions/checkout@v4.1.1
- run: corepack enable
- uses: actions/setup-node@v4.0.2
with:
path: n8n
node-version: 20.x
cache: 'pnpm'
- run: pnpm install --frozen-lockfile
- name: Setup build cache
uses: rharkor/caching-for-turbo@v1.5
- name: Build Backend
run: pnpm build:backend
- name: Cache build artifacts
uses: actions/cache/save@v4.0.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}:workflow-tests
run-test-workflows:
name: Workflow Tests
runs-on: ubuntu-latest
needs: build
timeout-minutes: 10
steps:
- uses: actions/checkout@v4.1.1
- run: corepack enable
- uses: actions/setup-node@v4.0.2
with:
node-version: 20.x
cache: 'pnpm'
- run: pnpm install --frozen-lockfile
- name: Setup build cache
uses: rharkor/caching-for-turbo@v1.5
- name: Restore cached build artifacts
uses: actions/cache/restore@v4.0.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}:workflow-tests
- name: Install OS dependencies
run: |
sudo apt update -y
echo 'tzdata tzdata/Areas select Europe' | sudo debconf-set-selections
echo 'tzdata tzdata/Zones/Europe select Paris' | sudo debconf-set-selections
DEBIAN_FRONTEND="noninteractive" sudo apt-get install -y graphicsmagick
- name: Checkout workflows repo
uses: actions/checkout@v4.1.1
@ -23,52 +75,24 @@ jobs:
repository: n8n-io/test-workflows
path: test-workflows
- run: corepack enable
working-directory: n8n
- uses: actions/setup-node@v4.0.2
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'n8n/pnpm-lock.yaml'
- name: Install dependencies
run: |
sudo apt update -y
echo 'tzdata tzdata/Areas select Europe' | sudo debconf-set-selections
echo 'tzdata tzdata/Zones/Europe select Paris' | sudo debconf-set-selections
DEBIAN_FRONTEND="noninteractive" sudo apt-get install -y graphicsmagick
shell: bash
- name: pnpm install and build
working-directory: n8n
run: |
pnpm install --frozen-lockfile
pnpm build:backend
shell: bash
- name: Import credentials
run: n8n/packages/cli/bin/n8n import:credentials --input=test-workflows/credentials.json
shell: bash
run: packages/cli/bin/n8n import:credentials --input=test-workflows/credentials.json
env:
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
- name: Import workflows
run: n8n/packages/cli/bin/n8n import:workflow --separate --input=test-workflows/workflows
shell: bash
run: packages/cli/bin/n8n import:workflow --separate --input=test-workflows/workflows
env:
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
- name: Copy static assets
run: |
cp n8n/assets/n8n-logo.png /tmp/n8n-logo.png
cp n8n/assets/n8n-screenshot.png /tmp/n8n-screenshot.png
cp assets/n8n-logo.png /tmp/n8n-logo.png
cp assets/n8n-screenshot.png /tmp/n8n-screenshot.png
cp test-workflows/testData/pdfs/*.pdf /tmp/
shell: bash
- name: Run tests
run: n8n/packages/cli/bin/n8n executeBatch --shallow --skipList=test-workflows/skipList.txt --githubWorkflow --shortOutput --concurrency=16 --compare=test-workflows/snapshots
shell: bash
run: packages/cli/bin/n8n executeBatch --shallow --skipList=test-workflows/skipList.txt --githubWorkflow --shortOutput --concurrency=16 --compare=test-workflows/snapshots
id: tests
env:
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
@ -76,23 +100,6 @@ jobs:
DB_SQLITE_POOL_SIZE: 4
N8N_SENTRY_DSN: ${{secrets.CI_SENTRY_DSN}}
# -
# name: Export credentials
# if: always()
# run: n8n/packages/cli/bin/n8n export:credentials --output=test-workflows/credentials.json --all --pretty
# shell: bash
# env:
# N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
# -
# name: Commit and push credential changes
# if: always()
# run: |
# cd test-workflows
# git config --global user.name 'n8n test bot'
# git config --global user.email 'n8n-test-bot@users.noreply.github.com'
# git commit -am "Automated credential update"
# git push --force --quiet "https://janober:${{ secrets.TOKEN }}@github.com/n8n-io/test-workflows.git" main:main
- name: Notify Slack on failure
uses: act10ns/slack@v2.0.0
if: failure() && github.ref == 'refs/heads/master'

View file

@ -1,3 +1,48 @@
# [1.71.0](https://github.com/n8n-io/n8n/compare/n8n@1.70.0...n8n@1.71.0) (2024-12-04)
### Bug Fixes
* **core:** Fix push for waiting executions ([#11984](https://github.com/n8n-io/n8n/issues/11984)) ([8d71307](https://github.com/n8n-io/n8n/commit/8d71307da0398e7e39bf53e8e1cfa21ac1ceaf69))
* **core:** Improve header parameter parsing on http client responses ([#11953](https://github.com/n8n-io/n8n/issues/11953)) ([41e9e39](https://github.com/n8n-io/n8n/commit/41e9e39b5b53ecd9d8d1b385df65a26ecb9bccd8))
* **core:** Opt-out from optimizations if `$item` is used ([#12036](https://github.com/n8n-io/n8n/issues/12036)) ([872535a](https://github.com/n8n-io/n8n/commit/872535a40c85dcfad3a4b27c57c026ae003f562f))
* **core:** Use the configured timezone in task runner ([#12032](https://github.com/n8n-io/n8n/issues/12032)) ([2e6845a](https://github.com/n8n-io/n8n/commit/2e6845afcbc30dff73c3f3f15f21278cab397387))
* **core:** Validate node name when creating `NodeOperationErrror` ([#11999](https://github.com/n8n-io/n8n/issues/11999)) ([e68c9da](https://github.com/n8n-io/n8n/commit/e68c9da30c31cd5f994cb01ce759192562bfbd40))
* **editor:** Add execution concurrency info and paywall ([#11847](https://github.com/n8n-io/n8n/issues/11847)) ([57d3269](https://github.com/n8n-io/n8n/commit/57d3269e400ee4e7e3636614870ebdfdb0aa8c1d))
* **editor:** Fix bug causing connection lines to disappear when hovering stickies ([#11950](https://github.com/n8n-io/n8n/issues/11950)) ([439a1cc](https://github.com/n8n-io/n8n/commit/439a1cc4f39243e91715b21a84b8e7266ce872cd))
* **editor:** Fix canvas keybindings using splitter keys such as zooming using `+` key ([#12022](https://github.com/n8n-io/n8n/issues/12022)) ([6af9c82](https://github.com/n8n-io/n8n/commit/6af9c82af6020e99d61e442ee9c2d40761baf027))
* **editor:** Fix community check ([#11979](https://github.com/n8n-io/n8n/issues/11979)) ([af0398a](https://github.com/n8n-io/n8n/commit/af0398a5e3a8987c01c7112e6f689b35e1ef92fe))
* **editor:** Fix copy/paste keyboard events in canvas chat ([#12004](https://github.com/n8n-io/n8n/issues/12004)) ([967340a](https://github.com/n8n-io/n8n/commit/967340a2938a79c89319121bf57a8d654f88e06c))
* **editor:** Fix node showing as successful if errors exists on subsequent runs ([#12019](https://github.com/n8n-io/n8n/issues/12019)) ([8616b17](https://github.com/n8n-io/n8n/commit/8616b17cc6c305da69bbb54fd56ab7cb34213f7c))
* **editor:** Fix pin data showing up in production executions on new canvas ([#11951](https://github.com/n8n-io/n8n/issues/11951)) ([5f6f8a1](https://github.com/n8n-io/n8n/commit/5f6f8a1bddfd76b586c08da821e8b59070f449fc))
* **editor:** Handle source control initialization to prevent UI form crashing ([#11776](https://github.com/n8n-io/n8n/issues/11776)) ([6be8e86](https://github.com/n8n-io/n8n/commit/6be8e86c45bd64d000bc95d2ef2d68220e930c02))
* **editor:** Implement dirty nodes for partial executions ([#11739](https://github.com/n8n-io/n8n/issues/11739)) ([b8da4ff](https://github.com/n8n-io/n8n/commit/b8da4ff9edb0fbb0093c4c41fe11f8e67b696ca3))
* **editor:** Resolve going back from Settings ([#11958](https://github.com/n8n-io/n8n/issues/11958)) ([d74423c](https://github.com/n8n-io/n8n/commit/d74423c75198d38d0d99a1879051b5e964ecae74))
* **editor:** Unify executions card label color ([#11949](https://github.com/n8n-io/n8n/issues/11949)) ([fc79718](https://github.com/n8n-io/n8n/commit/fc797188d63e87df34b3a153eb4a0d0b7361b3f5))
* **editor:** Use optional chaining for all members in execution data when using the debug feature ([#12024](https://github.com/n8n-io/n8n/issues/12024)) ([67aa0c9](https://github.com/n8n-io/n8n/commit/67aa0c9107bda16b1cb6d273e17c3cde77035f51))
* **GraphQL Node:** Throw error if GraphQL variables are not objects or strings ([#11904](https://github.com/n8n-io/n8n/issues/11904)) ([85f30b2](https://github.com/n8n-io/n8n/commit/85f30b27ae282da58a25186d13ff17196dcd7d9c))
* **HTTP Request Node:** Use iconv-lite to decode http responses, to support more encoding types ([#11930](https://github.com/n8n-io/n8n/issues/11930)) ([461b39c](https://github.com/n8n-io/n8n/commit/461b39c5df5dd446cb8ceef469b204c7c5111229))
* Load workflows with unconnected Switch outputs ([#12020](https://github.com/n8n-io/n8n/issues/12020)) ([abc851c](https://github.com/n8n-io/n8n/commit/abc851c0cff298607a0dc2f2882aa17136898f45))
* **n8n Form Node:** Use https to load google fonts ([#11948](https://github.com/n8n-io/n8n/issues/11948)) ([eccd924](https://github.com/n8n-io/n8n/commit/eccd924f5e8dbe59e37099d1a6fbe8866fef55bf))
* **Telegram Trigger Node:** Fix header secret check ([#12018](https://github.com/n8n-io/n8n/issues/12018)) ([f16de4d](https://github.com/n8n-io/n8n/commit/f16de4db01c0496205635a3203a44098e7908453))
* **Webflow Node:** Fix issue with pagination in v2 node ([#11934](https://github.com/n8n-io/n8n/issues/11934)) ([1eb94bc](https://github.com/n8n-io/n8n/commit/1eb94bcaf54d9e581856ce0b87253e1c28fa68e2))
* **Webflow Node:** Fix issue with publishing items ([#11982](https://github.com/n8n-io/n8n/issues/11982)) ([0a8a57e](https://github.com/n8n-io/n8n/commit/0a8a57e4ec8081ab1a53f36d686b3d5dcaae2476))
### Features
* **AI Transform Node:** Node Prompt improvements ([#11611](https://github.com/n8n-io/n8n/issues/11611)) ([40a7445](https://github.com/n8n-io/n8n/commit/40a7445f0873af2cdbd10b12bd691c07a43e27cc))
* **Code Node:** Warning if pairedItem absent or could not be auto mapped ([#11737](https://github.com/n8n-io/n8n/issues/11737)) ([3a5bd12](https://github.com/n8n-io/n8n/commit/3a5bd129459272cbac960ae2754db3028943f87e))
* **editor:** Canvas chat UI & UX improvements ([#11924](https://github.com/n8n-io/n8n/issues/11924)) ([1e25774](https://github.com/n8n-io/n8n/commit/1e25774541461c86da5c4af8efec792e2814eeb1))
* **editor:** Persist user's preferred display modes on localStorage ([#11929](https://github.com/n8n-io/n8n/issues/11929)) ([bd69316](https://github.com/n8n-io/n8n/commit/bd693162b86a21c90880bab2c2e67aab733095ff))
### Performance Improvements
* **editor:** Virtualize SchemaView ([#11694](https://github.com/n8n-io/n8n/issues/11694)) ([9c6def9](https://github.com/n8n-io/n8n/commit/9c6def91975764522fa52cdf21e9cb5bdb4d721d))
# [1.70.0](https://github.com/n8n-io/n8n/compare/n8n@1.69.0...n8n@1.70.0) (2024-11-27)

View file

@ -40,6 +40,7 @@ export function saveCredential() {
.within(() => {
cy.get('button').should('not.exist');
});
getCredentialSaveButton().should('have.text', 'Saved');
}
export function closeCredentialModal() {

View file

@ -76,6 +76,10 @@ export function getCanvasNodes() {
);
}
export function getCanvasNodeByName(nodeName: string) {
return getCanvasNodes().filter(`:contains(${nodeName})`);
}
export function getSaveButton() {
return cy.getByTestId('workflow-save-button');
}
@ -194,3 +198,8 @@ export function pasteWorkflow(workflow: object) {
export function clickZoomToFit() {
getZoomToFitButton().click();
}
export function deleteNode(name: string) {
getCanvasNodeByName(name).first().click();
cy.get('body').type('{del}');
}

View file

@ -1,3 +1,4 @@
import { saveCredential } from '../composables/modals/credential-modal';
import * as projects from '../composables/projects';
import { INSTANCE_MEMBERS, INSTANCE_OWNER, INSTANCE_ADMIN, NOTION_NODE_NAME } from '../constants';
import {
@ -225,8 +226,7 @@ describe('Sharing', { disableAutoLogin: true }, () => {
.filter(':contains("Development")')
.should('have.length', 1)
.click();
credentialsModal.getters.saveButton().click();
credentialsModal.getters.saveButton().should('have.text', 'Saved');
saveCredential();
credentialsModal.actions.close();
projects.getProjectTabWorkflows().click();
@ -252,8 +252,7 @@ describe('Sharing', { disableAutoLogin: true }, () => {
credentialsModal.actions.changeTab('Sharing');
credentialsModal.getters.usersSelect().click();
getVisibleSelect().find('li').should('have.length', 4).first().click();
credentialsModal.getters.saveButton().click();
credentialsModal.getters.saveButton().should('have.text', 'Saved');
saveCredential();
credentialsModal.actions.close();
credentialsPage.getters

View file

@ -1,5 +1,6 @@
import { type ICredentialType } from 'n8n-workflow';
import { getCredentialSaveButton, saveCredential } from '../composables/modals/credential-modal';
import {
AGENT_NODE_NAME,
AI_TOOL_HTTP_NODE_NAME,
@ -194,7 +195,7 @@ describe('Credentials', () => {
credentialsModal.getters.credentialsEditModal().should('be.visible');
credentialsModal.getters.name().click();
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME);
credentialsModal.getters.saveButton().click();
saveCredential();
credentialsModal.getters.closeButton().click();
workflowPage.getters
.nodeCredentialsSelect()
@ -212,7 +213,7 @@ describe('Credentials', () => {
credentialsModal.getters.credentialsEditModal().should('be.visible');
credentialsModal.getters.name().click();
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME2);
credentialsModal.getters.saveButton().click();
saveCredential();
credentialsModal.getters.closeButton().click();
workflowPage.getters
.nodeCredentialsSelect()
@ -237,7 +238,7 @@ describe('Credentials', () => {
credentialsModal.getters.credentialsEditModal().should('be.visible');
credentialsModal.getters.name().click();
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME);
credentialsModal.getters.saveButton().click();
saveCredential();
credentialsModal.getters.closeButton().click();
workflowPage.getters
.nodeCredentialsSelect()
@ -342,7 +343,8 @@ describe('Credentials', () => {
credentialsModal.getters.connectionParameter('Internal Integration Secret').type('1234567890');
credentialsModal.actions.setName('My awesome Notion account');
credentialsModal.getters.saveButton().click({ force: true });
getCredentialSaveButton().click();
errorToast().should('have.length', 1);
errorToast().should('be.visible');

View file

@ -49,33 +49,47 @@ describe('Two-factor authentication', { disableAutoLogin: true }, () => {
cy.intercept('GET', '/rest/mfa/qr').as('getMfaQrCode');
});
it('Should be able to login with MFA token', () => {
it('Should be able to login with MFA code', () => {
const { email, password } = user;
signinPage.actions.loginWithEmailAndPassword(email, password);
personalSettingsPage.actions.enableMfa();
mainSidebar.actions.signout();
const token = generateOTPToken(user.mfaSecret);
mfaLoginPage.actions.loginWithMfaToken(email, password, token);
const mfaCode = generateOTPToken(user.mfaSecret);
mfaLoginPage.actions.loginWithMfaCode(email, password, mfaCode);
mainSidebar.actions.signout();
});
it('Should be able to login with recovery code', () => {
it('Should be able to login with MFA recovery code', () => {
const { email, password } = user;
signinPage.actions.loginWithEmailAndPassword(email, password);
personalSettingsPage.actions.enableMfa();
mainSidebar.actions.signout();
mfaLoginPage.actions.loginWithRecoveryCode(email, password, user.mfaRecoveryCodes[0]);
mfaLoginPage.actions.loginWithMfaRecoveryCode(email, password, user.mfaRecoveryCodes[0]);
mainSidebar.actions.signout();
});
it('Should be able to disable MFA in account', () => {
it('Should be able to disable MFA in account with MFA code', () => {
const { email, password } = user;
signinPage.actions.loginWithEmailAndPassword(email, password);
personalSettingsPage.actions.enableMfa();
mainSidebar.actions.signout();
const token = generateOTPToken(user.mfaSecret);
mfaLoginPage.actions.loginWithMfaToken(email, password, token);
personalSettingsPage.actions.disableMfa();
const mfaCode = generateOTPToken(user.mfaSecret);
mfaLoginPage.actions.loginWithMfaCode(email, password, mfaCode);
const disableToken = generateOTPToken(user.mfaSecret);
personalSettingsPage.actions.disableMfa(disableToken);
personalSettingsPage.getters.enableMfaButton().should('exist');
mainSidebar.actions.signout();
});
it('Should be able to disable MFA in account with recovery code', () => {
const { email, password } = user;
signinPage.actions.loginWithEmailAndPassword(email, password);
personalSettingsPage.actions.enableMfa();
mainSidebar.actions.signout();
const mfaCode = generateOTPToken(user.mfaSecret);
mfaLoginPage.actions.loginWithMfaCode(email, password, mfaCode);
personalSettingsPage.actions.disableMfa(user.mfaRecoveryCodes[0]);
personalSettingsPage.getters.enableMfaButton().should('exist');
mainSidebar.actions.signout();
});
});

View file

@ -0,0 +1,17 @@
import {
deleteNode,
getCanvasNodes,
navigateToNewWorkflowPage,
pasteWorkflow,
} from '../composables/workflow';
import Workflow from '../fixtures/Switch_node_with_null_connection.json';
describe('ADO-2929 can load Switch nodes', () => {
it('can load workflows with Switch nodes with null at connection index', () => {
navigateToNewWorkflowPage();
pasteWorkflow(Workflow);
getCanvasNodes().should('have.length', 3);
deleteNode('Switch');
getCanvasNodes().should('have.length', 2);
});
});

View file

@ -1,3 +1,4 @@
import { getCredentialSaveButton } from '../composables/modals/credential-modal';
import { CredentialsPage, CredentialsModal } from '../pages';
const credentialsPage = new CredentialsPage();
@ -40,7 +41,7 @@ describe('Credentials', () => {
});
// Check that the credential was saved and connected successfully
credentialsModal.getters.saveButton().should('contain.text', 'Saved');
getCredentialSaveButton().should('contain.text', 'Saved');
credentialsModal.getters.oauthConnectSuccessBanner().should('be.visible');
});
});

View file

@ -0,0 +1,85 @@
{
"nodes": [
{
"parameters": {},
"id": "418350b8-b402-4d3b-93ba-3794d36c1ad5",
"name": "When clicking \"Test workflow\"",
"type": "n8n-nodes-base.manualTrigger",
"typeVersion": 1,
"position": [440, 380]
},
{
"parameters": {
"rules": {
"values": [
{
"conditions": {
"options": {
"caseSensitive": true,
"leftValue": "",
"typeValidation": "strict"
},
"conditions": [
{
"leftValue": "",
"rightValue": "",
"operator": {
"type": "string",
"operation": "equals"
}
}
],
"combinator": "and"
}
},
{},
{}
]
},
"options": {}
},
"id": "b67ad46f-6b0d-4ff4-b2d2-dfbde44e287c",
"name": "Switch",
"type": "n8n-nodes-base.switch",
"typeVersion": 3,
"position": [660, 380]
},
{
"parameters": {
"options": {}
},
"id": "24731c11-e2a4-4854-81a6-277ce72e8a93",
"name": "Edit Fields",
"type": "n8n-nodes-base.set",
"typeVersion": 3.3,
"position": [840, 480]
}
],
"connections": {
"When clicking \"Test workflow\"": {
"main": [
[
{
"node": "Switch",
"type": "main",
"index": 0
}
]
]
},
"Switch": {
"main": [
null,
null,
[
{
"node": "Edit Fields",
"type": "main",
"index": 0
}
]
]
}
},
"pinData": {}
}

View file

@ -8,18 +8,18 @@ export class MfaLoginPage extends BasePage {
getters = {
form: () => cy.getByTestId('mfa-login-form'),
token: () => cy.getByTestId('token'),
recoveryCode: () => cy.getByTestId('recoveryCode'),
mfaCode: () => cy.getByTestId('mfaCode'),
mfaRecoveryCode: () => cy.getByTestId('mfaRecoveryCode'),
enterRecoveryCodeButton: () => cy.getByTestId('mfa-enter-recovery-code-button'),
};
actions = {
loginWithMfaToken: (email: string, password: string, mfaToken: string) => {
loginWithMfaCode: (email: string, password: string, mfaCode: string) => {
const signinPage = new SigninPage();
const workflowsPage = new WorkflowsPage();
cy.session(
[mfaToken],
[mfaCode],
() => {
cy.visit(signinPage.url);
@ -30,7 +30,7 @@ export class MfaLoginPage extends BasePage {
});
this.getters.form().within(() => {
this.getters.token().type(mfaToken);
this.getters.mfaCode().type(mfaCode);
});
// we should be redirected to /workflows
@ -43,12 +43,12 @@ export class MfaLoginPage extends BasePage {
},
);
},
loginWithRecoveryCode: (email: string, password: string, recoveryCode: string) => {
loginWithMfaRecoveryCode: (email: string, password: string, mfaRecoveryCode: string) => {
const signinPage = new SigninPage();
const workflowsPage = new WorkflowsPage();
cy.session(
[recoveryCode],
[mfaRecoveryCode],
() => {
cy.visit(signinPage.url);
@ -61,7 +61,7 @@ export class MfaLoginPage extends BasePage {
this.getters.enterRecoveryCodeButton().click();
this.getters.form().within(() => {
this.getters.recoveryCode().type(recoveryCode);
this.getters.mfaRecoveryCode().type(mfaRecoveryCode);
});
// we should be redirected to /workflows

View file

@ -1,3 +1,4 @@
import { getCredentialSaveButton, saveCredential } from '../../composables/modals/credential-modal';
import { getVisibleSelect } from '../../utils';
import { BasePage } from '../base';
@ -13,8 +14,6 @@ export class CredentialsModal extends BasePage {
this.getters.credentialInputs().find(`:contains('${fieldName}') .n8n-input input`),
name: () => cy.getByTestId('credential-name'),
nameInput: () => cy.getByTestId('credential-name').find('input'),
// Saving of the credentials takes a while on the CI so we need to increase the timeout
saveButton: () => cy.getByTestId('credential-save-button', { timeout: 5000 }),
deleteButton: () => cy.getByTestId('credential-delete-button'),
closeButton: () => this.getters.editCredentialModal().find('.el-dialog__close').first(),
oauthConnectButton: () => cy.getByTestId('oauth-connect-button'),
@ -41,17 +40,17 @@ export class CredentialsModal extends BasePage {
},
save: (test = false) => {
cy.intercept('POST', '/rest/credentials').as('saveCredential');
this.getters.saveButton().click({ force: true });
saveCredential();
cy.wait('@saveCredential');
if (test) cy.wait('@testCredential');
this.getters.saveButton().should('contain.text', 'Saved');
getCredentialSaveButton().should('contain.text', 'Saved');
},
saveSharing: () => {
cy.intercept('PUT', '/rest/credentials/*/share').as('shareCredential');
this.getters.saveButton().click({ force: true });
saveCredential();
cy.wait('@shareCredential');
this.getters.saveButton().should('contain.text', 'Saved');
getCredentialSaveButton().should('contain.text', 'Saved');
},
close: () => {
this.getters.closeButton().click();
@ -65,7 +64,7 @@ export class CredentialsModal extends BasePage {
.each(($el) => {
cy.wrap($el).type('test');
});
this.getters.saveButton().click();
saveCredential();
if (closeModal) {
this.getters.closeButton().click();
}

View file

@ -22,6 +22,8 @@ export class PersonalSettingsPage extends BasePage {
saveSettingsButton: () => cy.getByTestId('save-settings-button'),
enableMfaButton: () => cy.getByTestId('enable-mfa-button'),
disableMfaButton: () => cy.getByTestId('disable-mfa-button'),
mfaCodeOrMfaRecoveryCodeInput: () => cy.getByTestId('mfa-code-or-recovery-code-input'),
mfaSaveButton: () => cy.getByTestId('mfa-save-button'),
themeSelector: () => cy.getByTestId('theme-select'),
selectOptionsVisible: () => cy.get('.el-select-dropdown:visible .el-select-dropdown__item'),
};
@ -83,9 +85,11 @@ export class PersonalSettingsPage extends BasePage {
mfaSetupModal.getters.saveButton().click();
});
},
disableMfa: () => {
disableMfa: (mfaCodeOrRecoveryCode: string) => {
cy.visit(this.url);
this.getters.disableMfaButton().click();
this.getters.mfaCodeOrMfaRecoveryCodeInput().type(mfaCodeOrRecoveryCode);
this.getters.mfaSaveButton().click();
},
};
}

View file

@ -33,7 +33,7 @@ COPY docker/images/n8n/docker-entrypoint.sh /
# Setup the Task Runner Launcher
ARG TARGETPLATFORM
ARG LAUNCHER_VERSION=0.6.0-rc
ARG LAUNCHER_VERSION=0.7.0-rc
COPY docker/images/n8n/n8n-task-runners.json /etc/n8n-task-runners.json
# Download, verify, then extract the launcher binary
RUN \

View file

@ -24,7 +24,7 @@ RUN set -eux; \
# Setup the Task Runner Launcher
ARG TARGETPLATFORM
ARG LAUNCHER_VERSION=0.6.0-rc
ARG LAUNCHER_VERSION=0.7.0-rc
COPY n8n-task-runners.json /etc/n8n-task-runners.json
# Download, verify, then extract the launcher binary
RUN \

View file

@ -7,6 +7,7 @@
"args": ["/usr/local/lib/node_modules/n8n/node_modules/@n8n/task-runner/dist/start.js"],
"allowed-env": [
"PATH",
"GENERIC_TIMEZONE",
"N8N_RUNNERS_GRANT_TOKEN",
"N8N_RUNNERS_N8N_URI",
"N8N_RUNNERS_MAX_PAYLOAD",

View file

@ -1,6 +1,6 @@
{
"name": "n8n-monorepo",
"version": "1.70.0",
"version": "1.71.0",
"private": true,
"engines": {
"node": ">=20.15",
@ -80,7 +80,7 @@
"tslib": "^2.6.2",
"tsconfig-paths": "^4.2.0",
"typescript": "^5.7.2",
"vue-tsc": "^2.1.6",
"vue-tsc": "^2.1.10",
"ws": ">=8.17.1"
},
"patchedDependencies": {
@ -90,7 +90,7 @@
"@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch",
"@types/ws@8.5.4": "patches/@types__ws@8.5.4.patch",
"@types/uuencode@0.0.3": "patches/@types__uuencode@0.0.3.patch",
"vue-tsc@2.1.6": "patches/vue-tsc@2.1.6.patch"
"vue-tsc@2.1.10": "patches/vue-tsc@2.1.10.patch"
}
}
}

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/api-types",
"version": "0.8.0",
"version": "0.9.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/chat",
"version": "0.31.0",
"version": "0.32.0",
"scripts": {
"dev": "pnpm run storybook",
"build": "pnpm build:vite && pnpm build:bundle",
@ -46,11 +46,12 @@
"devDependencies": {
"@iconify-json/mdi": "^1.1.54",
"@n8n/storybook": "workspace:*",
"@vitejs/plugin-vue": "catalog:frontend",
"@vitest/coverage-v8": "catalog:frontend",
"unplugin-icons": "^0.19.0",
"vite": "catalog:frontend",
"vitest": "catalog:frontend",
"vite-plugin-dts": "^4.2.3",
"vite-plugin-dts": "^4.3.0",
"vue-tsc": "catalog:frontend"
},
"files": [

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/config",
"version": "1.20.0",
"version": "1.21.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -7,5 +7,5 @@ export type FrontendBetaFeatures = 'canvas_v2';
export class FrontendConfig {
/** Which UI experiments to enable. Separate multiple values with a comma `,` */
@Env('N8N_UI_BETA_FEATURES')
betaFeatures: StringArray<FrontendBetaFeatures> = [];
betaFeatures: StringArray<FrontendBetaFeatures> = ['canvas_v2'];
}

View file

@ -6,10 +6,6 @@ export class WorkflowsConfig {
@Env('WORKFLOWS_DEFAULT_NAME')
defaultName: string = 'My workflow';
/** Show onboarding flow in new workflow */
@Env('N8N_ONBOARDING_FLOW_DISABLED')
onboardingFlowDisabled: boolean = false;
/** Default option for which workflows may call the current workflow */
@Env('N8N_WORKFLOW_CALLER_POLICY_DEFAULT_OPTION')
callerPolicyDefaultOption: 'any' | 'none' | 'workflowsFromAList' | 'workflowsFromSameOwner' =

View file

@ -150,7 +150,6 @@ describe('GlobalConfig', () => {
},
workflows: {
defaultName: 'My workflow',
onboardingFlowDisabled: false,
callerPolicyDefaultOption: 'workflowsFromSameOwner',
},
endpoints: {

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/imap",
"version": "0.7.0",
"version": "0.8.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/json-schema-to-zod",
"version": "1.1.0",
"version": "1.2.0",
"description": "Converts JSON schema objects into Zod schemas",
"types": "./dist/types/index.d.ts",
"main": "./dist/cjs/index.js",

View file

@ -1,2 +1,2 @@
export type * from './types';
export { jsonSchemaToZod } from './json-schema-to-zod.js';
export { jsonSchemaToZod } from './json-schema-to-zod';

View file

@ -1,8 +1,10 @@
import type { ZodObjectAny } from '@langchain/core/dist/types/zod';
import type { z } from 'zod';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
import { NodeOperationError, type IExecuteFunctions, type INode } from 'n8n-workflow';
type ZodObjectAny = z.ZodObject<any, any, any, any>;
export async function extractParsedOutput(
ctx: IExecuteFunctions,
outputParser: BaseOutputParser<unknown>,

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-nodes-langchain",
"version": "1.70.0",
"version": "1.71.0",
"description": "",
"main": "index.js",
"scripts": {
@ -135,47 +135,47 @@
"@getzep/zep-js": "0.9.0",
"@google-ai/generativelanguage": "2.6.0",
"@google-cloud/resource-manager": "5.3.0",
"@google/generative-ai": "0.19.0",
"@google/generative-ai": "0.21.0",
"@huggingface/inference": "2.8.0",
"@langchain/anthropic": "0.3.7",
"@langchain/aws": "0.1.1",
"@langchain/anthropic": "0.3.8",
"@langchain/aws": "0.1.2",
"@langchain/cohere": "0.3.1",
"@langchain/community": "0.3.11",
"@langchain/community": "0.3.15",
"@langchain/core": "catalog:",
"@langchain/google-genai": "0.1.2",
"@langchain/google-vertexai": "0.1.0",
"@langchain/google-genai": "0.1.4",
"@langchain/google-vertexai": "0.1.3",
"@langchain/groq": "0.1.2",
"@langchain/mistralai": "0.1.1",
"@langchain/ollama": "0.1.1",
"@langchain/openai": "0.3.11",
"@langchain/pinecone": "0.1.1",
"@langchain/qdrant": "0.1.0",
"@langchain/mistralai": "0.2.0",
"@langchain/ollama": "0.1.2",
"@langchain/openai": "0.3.14",
"@langchain/pinecone": "0.1.3",
"@langchain/qdrant": "0.1.1",
"@langchain/redis": "0.1.0",
"@langchain/textsplitters": "0.1.0",
"@mozilla/readability": "0.5.0",
"@n8n/json-schema-to-zod": "workspace:*",
"@n8n/typeorm": "0.3.20-12",
"@n8n/vm2": "3.9.25",
"@pinecone-database/pinecone": "3.0.3",
"@pinecone-database/pinecone": "4.0.0",
"@qdrant/js-client-rest": "1.11.0",
"@supabase/supabase-js": "2.45.4",
"@xata.io/client": "0.28.4",
"basic-auth": "catalog:",
"cheerio": "1.0.0",
"cohere-ai": "7.13.2",
"cohere-ai": "7.14.0",
"d3-dsv": "2.0.0",
"epub2": "3.0.2",
"form-data": "catalog:",
"generate-schema": "2.6.0",
"html-to-text": "9.0.5",
"jsdom": "23.0.1",
"langchain": "0.3.5",
"langchain": "0.3.6",
"lodash": "catalog:",
"mammoth": "1.7.2",
"mime-types": "2.1.35",
"n8n-nodes-base": "workspace:*",
"n8n-workflow": "workspace:*",
"openai": "4.69.0",
"openai": "4.73.1",
"pdf-parse": "1.1.1",
"pg": "8.12.0",
"redis": "4.6.12",

View file

@ -32,7 +32,9 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
[{ json: { action: 'parse', text } }],
]);
try {
const parsed = await super.parse(text);
const jsonString = text.includes('```') ? text.split(/```(?:json)?/)[1] : text;
const json = JSON.parse(jsonString.trim());
const parsed = await this.schema.parseAsync(json);
const result = (get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_OBJECT_KEY]) ??
get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_ARRAY_KEY]) ??

View file

@ -3,19 +3,19 @@
"private": true,
"version": "0.0.1",
"devDependencies": {
"@chromatic-com/storybook": "^2.0.2",
"@storybook/addon-a11y": "^8.3.5",
"@storybook/addon-actions": "^8.3.5",
"@storybook/addon-docs": "^8.3.5",
"@storybook/addon-essentials": "^8.3.5",
"@storybook/addon-interactions": "^8.3.5",
"@storybook/addon-links": "^8.3.5",
"@storybook/addon-themes": "^8.3.5",
"@storybook/blocks": "^8.3.5",
"@storybook/test": "^8.3.5",
"@storybook/vue3": "^8.3.5",
"@storybook/vue3-vite": "^8.3.5",
"chromatic": "^11.10.2",
"storybook": "^8.3.5"
"@chromatic-com/storybook": "^3.2.2",
"@storybook/addon-a11y": "^8.4.6",
"@storybook/addon-actions": "^8.4.6",
"@storybook/addon-docs": "^8.4.6",
"@storybook/addon-essentials": "^8.4.6",
"@storybook/addon-interactions": "^8.4.6",
"@storybook/addon-links": "^8.4.6",
"@storybook/addon-themes": "^8.4.6",
"@storybook/blocks": "^8.4.6",
"@storybook/test": "^8.4.6",
"@storybook/vue3": "^8.4.6",
"@storybook/vue3-vite": "^8.4.6",
"chromatic": "^11.20.0",
"storybook": "^8.4.6"
}
}

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/task-runner",
"version": "1.8.0",
"version": "1.9.0",
"scripts": {
"clean": "rimraf dist .turbo",
"start": "node dist/start.js",

View file

@ -34,6 +34,9 @@ export class BaseRunnerConfig {
@Env('N8N_RUNNERS_AUTO_SHUTDOWN_TIMEOUT')
idleTimeout: number = 0;
@Env('GENERIC_TIMEZONE')
timezone: string = 'America/New_York';
@Nested
healthcheckServer!: HealthcheckServerConfig;
}

View file

@ -1,5 +1,5 @@
import { DateTime } from 'luxon';
import type { CodeExecutionMode, IDataObject } from 'n8n-workflow';
import { setGlobalState, type CodeExecutionMode, type IDataObject } from 'n8n-workflow';
import fs from 'node:fs';
import { builtinModules } from 'node:module';
@ -326,6 +326,43 @@ describe('JsTaskRunner', () => {
});
});
describe('timezone', () => {
it('should use the specified timezone in the workflow', async () => {
const taskData = newDataRequestResponse(inputItems.map(wrapIntoJson), {});
taskData.workflow.settings = {
timezone: 'Europe/Helsinki',
};
const outcome = await execTaskWithParams({
task: newTaskWithSettings({
code: 'return { val: $now.toSeconds() }',
nodeMode: 'runOnceForAllItems',
}),
taskData,
});
const helsinkiTimeNow = DateTime.now().setZone('Europe/Helsinki').toSeconds();
expect(outcome.result[0].json.val).toBeCloseTo(helsinkiTimeNow, 1);
});
it('should use the default timezone', async () => {
setGlobalState({
defaultTimezone: 'Europe/Helsinki',
});
const outcome = await execTaskWithParams({
task: newTaskWithSettings({
code: 'return { val: $now.toSeconds() }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {}),
});
const helsinkiTimeNow = DateTime.now().setZone('Europe/Helsinki').toSeconds();
expect(outcome.result[0].json.val).toBeCloseTo(helsinkiTimeNow, 1);
});
});
it('should allow access to Node.js Buffers', async () => {
const outcomeAll = await execTaskWithParams({
task: newTaskWithSettings({

View file

@ -17,7 +17,7 @@ describe('BuiltInsParser', () => {
const parseAndExpectOk = (code: string) => {
const result = parser.parseUsedBuiltIns(code);
if (!result.ok) {
fail(result.error);
throw result.error;
}
return result.result;
@ -151,6 +151,13 @@ describe('BuiltInsParser', () => {
});
});
describe('$item', () => {
it('should require all nodes and input when $item is used', () => {
const state = parseAndExpectOk('$item("0").$node["my node"].json["title"]');
expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true }));
});
});
describe('ECMAScript syntax', () => {
describe('ES2020', () => {
it('should parse optional chaining', () => {

View file

@ -125,6 +125,11 @@ export class BuiltInsParser {
private visitIdentifier = (node: Identifier, state: BuiltInsParserState) => {
if (node.name === '$env') {
state.markEnvAsNeeded();
} else if (node.name === '$item') {
// $item is legacy syntax that is basically an alias for WorkflowDataProxy
// and allows accessing any data. We need to support it for backwards
// compatibility, but we're not gonna implement any optimizations
state.markNeedsAllNodes();
} else if (
node.name === '$input' ||
node.name === '$json' ||

View file

@ -1,4 +1,4 @@
import { ensureError } from 'n8n-workflow';
import { ensureError, setGlobalState } from 'n8n-workflow';
import Container from 'typedi';
import { MainConfig } from './config/main-config';
@ -44,6 +44,10 @@ function createSignalHandler(signal: string) {
void (async function start() {
const config = Container.get(MainConfig);
setGlobalState({
defaultTimezone: config.baseRunnerConfig.timezone,
});
if (config.sentryConfig.sentryDsn) {
const { ErrorReporter } = await import('@/error-reporter');
errorReporter = new ErrorReporter(config.sentryConfig);

View file

@ -1,6 +1,6 @@
{
"name": "n8n",
"version": "1.70.0",
"version": "1.71.0",
"description": "n8n Workflow Automation Tool",
"main": "dist/index",
"types": "dist/index.d.ts",
@ -94,7 +94,7 @@
"@n8n/permissions": "workspace:*",
"@n8n/task-runner": "workspace:*",
"@n8n/typeorm": "0.3.20-12",
"@n8n_io/ai-assistant-sdk": "1.10.3",
"@n8n_io/ai-assistant-sdk": "1.12.0",
"@n8n_io/license-sdk": "2.13.1",
"@oclif/core": "4.0.7",
"@rudderstack/rudder-sdk-node": "2.0.9",

View file

@ -0,0 +1,61 @@
import { GlobalConfig } from '@n8n/config';
import type { ClientOptions, ErrorEvent } from '@sentry/types';
import { strict as assert } from 'node:assert';
import { Container } from 'typedi';
import { InternalServerError } from '@/errors/response-errors/internal-server.error';
const init = jest.fn();
jest.mock('@sentry/integrations');
jest.mock('@sentry/node', () => ({
init,
setTag: jest.fn(),
captureException: jest.fn(),
Integrations: {},
}));
jest.spyOn(process, 'on');
describe('initErrorHandling', () => {
let beforeSend: ClientOptions['beforeSend'];
beforeAll(async () => {
Container.get(GlobalConfig).sentry.backendDsn = 'backend-dsn';
const errorReporting = require('@/error-reporting');
await errorReporting.initErrorHandling();
const options = (init.mock.calls[0] as [ClientOptions])[0];
beforeSend = options.beforeSend;
});
it('ignores errors with level warning', async () => {
const originalException = new InternalServerError('test');
originalException.level = 'warning';
const event = {} as ErrorEvent;
assert(beforeSend);
expect(await beforeSend(event, { originalException })).toEqual(null);
});
it('keeps events with a cause with error level', async () => {
const cause = new Error('cause-error');
const originalException = new InternalServerError('test', cause);
const event = {} as ErrorEvent;
assert(beforeSend);
expect(await beforeSend(event, { originalException })).toEqual(event);
});
it('ignores events with error cause with warning level', async () => {
const cause: Error & { level?: 'warning' } = new Error('cause-error');
cause.level = 'warning';
const originalException = new InternalServerError('test', cause);
const event = {} as ErrorEvent;
assert(beforeSend);
expect(await beforeSend(event, { originalException })).toEqual(null);
});
});

View file

@ -0,0 +1,43 @@
import { mock } from 'jest-mock-extended';
import type { INode } from 'n8n-workflow';
import { NodeOperationError, type Workflow } from 'n8n-workflow';
import { objectToError } from '../workflow-execute-additional-data';
describe('objectToError', () => {
describe('node error handling', () => {
it('should create `NodeOperationError` when node is found', () => {
const errorObject = {
message: 'Test error',
node: {
name: 'testNode',
},
};
const workflow = mock<Workflow>();
const node = mock<INode>();
workflow.getNode.mockReturnValue(node);
const result = objectToError(errorObject, workflow);
expect(workflow.getNode).toHaveBeenCalledWith('testNode');
expect(result).toBeInstanceOf(NodeOperationError);
});
it('should create `Error` when node is not found', () => {
const errorObject = {
message: 'Test error',
node: {
// missing `name`
},
};
const workflow = mock<Workflow>();
const result = objectToError(errorObject, workflow);
expect(workflow.getNode).not.toHaveBeenCalled();
expect(result).toBeInstanceOf(Error);
expect(result).not.toBeInstanceOf(NodeOperationError);
expect(result.message).toBe('Test error');
});
});
});

View file

@ -1,4 +1,23 @@
import { WorkflowHooks, type ExecutionError, type IWorkflowExecuteHooks } from 'n8n-workflow';
import { mock } from 'jest-mock-extended';
import { DirectedGraph, WorkflowExecute } from 'n8n-core';
import * as core from 'n8n-core';
import type {
IExecuteData,
INode,
IRun,
ITaskData,
IWaitingForExecution,
IWaitingForExecutionSource,
IWorkflowExecutionDataProcess,
StartNodeData,
} from 'n8n-workflow';
import {
Workflow,
WorkflowHooks,
type ExecutionError,
type IWorkflowExecuteHooks,
} from 'n8n-workflow';
import PCancelable from 'p-cancelable';
import Container from 'typedi';
import { ActiveExecutions } from '@/active-executions';
@ -6,6 +25,7 @@ import config from '@/config';
import type { User } from '@/databases/entities/user';
import { ExecutionNotFoundError } from '@/errors/execution-not-found-error';
import { Telemetry } from '@/telemetry';
import { PermissionChecker } from '@/user-management/permission-checker';
import { WorkflowRunner } from '@/workflow-runner';
import { mockInstance } from '@test/mocking';
import { createExecution } from '@test-integration/db/executions';
@ -43,61 +63,138 @@ afterAll(() => {
beforeEach(async () => {
await testDb.truncate(['Workflow', 'SharedWorkflow']);
jest.clearAllMocks();
});
test('processError should return early in Bull stalled edge case', async () => {
const workflow = await createWorkflow({}, owner);
const execution = await createExecution(
{
status: 'success',
finished: true,
},
workflow,
);
config.set('executions.mode', 'queue');
await runner.processError(
new Error('test') as ExecutionError,
new Date(),
'webhook',
execution.id,
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
);
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
describe('processError', () => {
test('processError should return early in Bull stalled edge case', async () => {
const workflow = await createWorkflow({}, owner);
const execution = await createExecution(
{
status: 'success',
finished: true,
},
workflow,
);
config.set('executions.mode', 'queue');
await runner.processError(
new Error('test') as ExecutionError,
new Date(),
'webhook',
execution.id,
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
);
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
});
test('processError should return early if the error is `ExecutionNotFoundError`', async () => {
const workflow = await createWorkflow({}, owner);
const execution = await createExecution({ status: 'success', finished: true }, workflow);
await runner.processError(
new ExecutionNotFoundError(execution.id),
new Date(),
'webhook',
execution.id,
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
);
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
});
test('processError should process error', async () => {
const workflow = await createWorkflow({}, owner);
const execution = await createExecution(
{
status: 'success',
finished: true,
},
workflow,
);
await Container.get(ActiveExecutions).add(
{ executionMode: 'webhook', workflowData: workflow },
execution.id,
);
config.set('executions.mode', 'regular');
await runner.processError(
new Error('test') as ExecutionError,
new Date(),
'webhook',
execution.id,
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
);
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(1);
});
});
test('processError should return early if the error is `ExecutionNotFoundError`', async () => {
const workflow = await createWorkflow({}, owner);
const execution = await createExecution({ status: 'success', finished: true }, workflow);
await runner.processError(
new ExecutionNotFoundError(execution.id),
new Date(),
'webhook',
execution.id,
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
);
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
});
describe('run', () => {
it('uses recreateNodeExecutionStack to create a partial execution if a triggerToStartFrom with data is sent', async () => {
// ARRANGE
const activeExecutions = Container.get(ActiveExecutions);
jest.spyOn(activeExecutions, 'add').mockResolvedValue('1');
jest.spyOn(activeExecutions, 'attachWorkflowExecution').mockReturnValueOnce();
const permissionChecker = Container.get(PermissionChecker);
jest.spyOn(permissionChecker, 'check').mockResolvedValueOnce();
test('processError should process error', async () => {
const workflow = await createWorkflow({}, owner);
const execution = await createExecution(
{
status: 'success',
finished: true,
},
workflow,
);
await Container.get(ActiveExecutions).add(
{ executionMode: 'webhook', workflowData: workflow },
execution.id,
);
config.set('executions.mode', 'regular');
await runner.processError(
new Error('test') as ExecutionError,
new Date(),
'webhook',
execution.id,
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
);
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(1);
jest.spyOn(WorkflowExecute.prototype, 'processRunExecutionData').mockReturnValueOnce(
new PCancelable(() => {
return mock<IRun>();
}),
);
jest.spyOn(Workflow.prototype, 'getNode').mockReturnValueOnce(mock<INode>());
jest.spyOn(DirectedGraph, 'fromWorkflow').mockReturnValueOnce(new DirectedGraph());
const recreateNodeExecutionStackSpy = jest
.spyOn(core, 'recreateNodeExecutionStack')
.mockReturnValueOnce({
nodeExecutionStack: mock<IExecuteData[]>(),
waitingExecution: mock<IWaitingForExecution>(),
waitingExecutionSource: mock<IWaitingForExecutionSource>(),
});
const data = mock<IWorkflowExecutionDataProcess>({
triggerToStartFrom: { name: 'trigger', data: mock<ITaskData>() },
workflowData: { nodes: [] },
executionData: undefined,
startNodes: [mock<StartNodeData>()],
destinationNode: undefined,
});
// ACT
await runner.run(data);
// ASSERT
expect(recreateNodeExecutionStackSpy).toHaveBeenCalled();
});
it('does not use recreateNodeExecutionStack to create a partial execution if a triggerToStartFrom without data is sent', async () => {
// ARRANGE
const activeExecutions = Container.get(ActiveExecutions);
jest.spyOn(activeExecutions, 'add').mockResolvedValue('1');
jest.spyOn(activeExecutions, 'attachWorkflowExecution').mockReturnValueOnce();
const permissionChecker = Container.get(PermissionChecker);
jest.spyOn(permissionChecker, 'check').mockResolvedValueOnce();
jest.spyOn(WorkflowExecute.prototype, 'processRunExecutionData').mockReturnValueOnce(
new PCancelable(() => {
return mock<IRun>();
}),
);
const recreateNodeExecutionStackSpy = jest.spyOn(core, 'recreateNodeExecutionStack');
const data = mock<IWorkflowExecutionDataProcess>({
triggerToStartFrom: { name: 'trigger', data: undefined },
workflowData: { nodes: [] },
executionData: undefined,
startNodes: [mock<StartNodeData>()],
destinationNode: undefined,
});
// ACT
await runner.run(data);
// ASSERT
expect(recreateNodeExecutionStackSpy).not.toHaveBeenCalled();
});
});

View file

@ -1,6 +1,5 @@
import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk';
import type { Response } from 'express';
import { ErrorReporterProxy } from 'n8n-workflow';
import { strict as assert } from 'node:assert';
import { WritableStream } from 'node:stream/web';
@ -33,8 +32,7 @@ export class AiController {
}
} catch (e) {
assert(e instanceof Error);
ErrorReporterProxy.error(e);
throw new InternalServerError(`Something went wrong: ${e.message}`);
throw new InternalServerError(e.message, e);
}
}
@ -46,8 +44,7 @@ export class AiController {
return await this.aiService.applySuggestion(req.body, req.user);
} catch (e) {
assert(e instanceof Error);
ErrorReporterProxy.error(e);
throw new InternalServerError(`Something went wrong: ${e.message}`);
throw new InternalServerError(e.message, e);
}
}
@ -57,8 +54,7 @@ export class AiController {
return await this.aiService.askAi(req.body, req.user);
} catch (e) {
assert(e instanceof Error);
ErrorReporterProxy.error(e);
throw new InternalServerError(`Something went wrong: ${e.message}`);
throw new InternalServerError(e.message, e);
}
}
}

View file

@ -41,7 +41,7 @@ export class AuthController {
/** Log in a user */
@Post('/login', { skipAuth: true, rateLimit: true })
async login(req: LoginRequest, res: Response): Promise<PublicUser | undefined> {
const { email, password, mfaToken, mfaRecoveryCode } = req.body;
const { email, password, mfaCode, mfaRecoveryCode } = req.body;
if (!email) throw new ApplicationError('Email is required to log in');
if (!password) throw new ApplicationError('Password is required to log in');
@ -75,16 +75,16 @@ export class AuthController {
if (user) {
if (user.mfaEnabled) {
if (!mfaToken && !mfaRecoveryCode) {
if (!mfaCode && !mfaRecoveryCode) {
throw new AuthError('MFA Error', 998);
}
const isMFATokenValid = await this.mfaService.validateMfa(
const isMfaCodeOrMfaRecoveryCodeValid = await this.mfaService.validateMfa(
user.id,
mfaToken,
mfaCode,
mfaRecoveryCode,
);
if (!isMFATokenValid) {
if (!isMfaCodeOrMfaRecoveryCodeValid) {
throw new AuthError('Invalid mfa token or recovery code');
}
}

View file

@ -201,7 +201,7 @@ export class CommunityPackagesController {
error instanceof Error ? error.message : UNKNOWN_FAILURE_REASON,
].join(':');
throw new InternalServerError(message);
throw new InternalServerError(message, error);
}
// broadcast to connected frontends that node list has been updated
@ -283,7 +283,7 @@ export class CommunityPackagesController {
error instanceof Error ? error.message : UNKNOWN_FAILURE_REASON,
].join(':');
throw new InternalServerError(message);
throw new InternalServerError(message, error);
}
}
}

View file

@ -68,8 +68,8 @@ export class MeController {
throw new BadRequestError('Two-factor code is required to change email');
}
const isMfaTokenValid = await this.mfaService.validateMfa(userId, payload.mfaCode, undefined);
if (!isMfaTokenValid) {
const isMfaCodeValid = await this.mfaService.validateMfa(userId, payload.mfaCode, undefined);
if (!isMfaCodeValid) {
throw new InvalidMfaCodeError();
}
}
@ -142,8 +142,8 @@ export class MeController {
throw new BadRequestError('Two-factor code is required to change password.');
}
const isMfaTokenValid = await this.mfaService.validateMfa(user.id, mfaCode, undefined);
if (!isMfaTokenValid) {
const isMfaCodeValid = await this.mfaService.validateMfa(user.id, mfaCode, undefined);
if (!isMfaCodeValid) {
throw new InvalidMfaCodeError();
}
}

View file

@ -59,7 +59,7 @@ export class MFAController {
@Post('/enable', { rateLimit: true })
async activateMFA(req: MFA.Activate) {
const { token = null } = req.body;
const { mfaCode = null } = req.body;
const { id, mfaEnabled } = req.user;
await this.externalHooks.run('mfa.beforeSetup', [req.user]);
@ -67,7 +67,7 @@ export class MFAController {
const { decryptedSecret: secret, decryptedRecoveryCodes: recoveryCodes } =
await this.mfaService.getSecretAndRecoveryCodes(id);
if (!token) throw new BadRequestError('Token is required to enable MFA feature');
if (!mfaCode) throw new BadRequestError('Token is required to enable MFA feature');
if (mfaEnabled) throw new BadRequestError('MFA already enabled');
@ -75,10 +75,10 @@ export class MFAController {
throw new BadRequestError('Cannot enable MFA without generating secret and recovery codes');
}
const verified = this.mfaService.totp.verifySecret({ secret, token, window: 10 });
const verified = this.mfaService.totp.verifySecret({ secret, mfaCode, window: 10 });
if (!verified)
throw new BadRequestError('MFA token expired. Close the modal and enable MFA again', 997);
throw new BadRequestError('MFA code expired. Close the modal and enable MFA again', 997);
await this.mfaService.enableMfa(id);
}
@ -86,27 +86,38 @@ export class MFAController {
@Post('/disable', { rateLimit: true })
async disableMFA(req: MFA.Disable) {
const { id: userId } = req.user;
const { token = null } = req.body;
if (typeof token !== 'string' || !token) {
throw new BadRequestError('Token is required to disable MFA feature');
const { mfaCode, mfaRecoveryCode } = req.body;
const mfaCodeDefined = mfaCode && typeof mfaCode === 'string';
const mfaRecoveryCodeDefined = mfaRecoveryCode && typeof mfaRecoveryCode === 'string';
if (!mfaCodeDefined === !mfaRecoveryCodeDefined) {
throw new BadRequestError(
'Either MFA code or recovery code is required to disable MFA feature',
);
}
await this.mfaService.disableMfa(userId, token);
if (mfaCodeDefined) {
await this.mfaService.disableMfaWithMfaCode(userId, mfaCode);
} else if (mfaRecoveryCodeDefined) {
await this.mfaService.disableMfaWithRecoveryCode(userId, mfaRecoveryCode);
}
}
@Post('/verify', { rateLimit: true })
async verifyMFA(req: MFA.Verify) {
const { id } = req.user;
const { token } = req.body;
const { mfaCode } = req.body;
const { decryptedSecret: secret } = await this.mfaService.getSecretAndRecoveryCodes(id);
if (!token) throw new BadRequestError('Token is required to enable MFA feature');
if (!mfaCode) throw new BadRequestError('MFA code is required to enable MFA feature');
if (!secret) throw new BadRequestError('No MFA secret se for this user');
const verified = this.mfaService.totp.verifySecret({ secret, token });
const verified = this.mfaService.totp.verifySecret({ secret, mfaCode });
if (!verified) throw new BadRequestError('MFA secret could not be verified');
}

View file

@ -120,7 +120,7 @@ export class PasswordResetController {
publicApi: false,
});
if (error instanceof Error) {
throw new InternalServerError(`Please contact your administrator: ${error.message}`);
throw new InternalServerError(`Please contact your administrator: ${error.message}`, error);
}
}
@ -171,7 +171,7 @@ export class PasswordResetController {
*/
@Post('/change-password', { skipAuth: true })
async changePassword(req: PasswordResetRequest.NewPassword, res: Response) {
const { token, password, mfaToken } = req.body;
const { token, password, mfaCode } = req.body;
if (!token || !password) {
this.logger.debug(
@ -189,11 +189,11 @@ export class PasswordResetController {
if (!user) throw new NotFoundError('');
if (user.mfaEnabled) {
if (!mfaToken) throw new BadRequestError('If MFA enabled, mfaToken is required.');
if (!mfaCode) throw new BadRequestError('If MFA enabled, mfaCode is required.');
const { decryptedSecret: secret } = await this.mfaService.getSecretAndRecoveryCodes(user.id);
const validToken = this.mfaService.totp.verifySecret({ secret, token: mfaToken });
const validToken = this.mfaService.totp.verifySecret({ secret, mfaCode });
if (!validToken) throw new BadRequestError('Invalid MFA token.');
}

View file

@ -54,7 +54,7 @@ export class TranslationController {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return require(NODE_HEADERS_PATH);
} catch (error) {
throw new InternalServerError('Failed to load headers file');
throw new InternalServerError('Failed to load headers file', error);
}
}
}

View file

@ -38,7 +38,7 @@ export class PurgeInvalidWorkflowConnections1675940580449 implements Irreversibl
// It filters out all connections that are connected to a node that cannot receive input
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
outputConnection[outputConnectionItemIdx] = (outputConnectionItem ?? []).filter(
(outgoingConnections) =>
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
);

View file

@ -1,8 +1,10 @@
import type { FindManyOptions } from '@n8n/typeorm';
import { DataSource, Repository } from '@n8n/typeorm';
import { Service } from 'typedi';
import type { AggregatedTestRunMetrics } from '@/databases/entities/test-run.ee';
import { TestRun } from '@/databases/entities/test-run.ee';
import type { ListQuery } from '@/requests';
@Service()
export class TestRunRepository extends Repository<TestRun> {
@ -26,4 +28,18 @@ export class TestRunRepository extends Repository<TestRun> {
public async markAsCompleted(id: string, metrics: AggregatedTestRunMetrics) {
return await this.update(id, { status: 'completed', completedAt: new Date(), metrics });
}
public async getMany(testDefinitionId: string, options: ListQuery.Options) {
const findManyOptions: FindManyOptions<TestRun> = {
where: { testDefinition: { id: testDefinitionId } },
order: { createdAt: 'DESC' },
};
if (options?.take) {
findManyOptions.skip = options.skip;
findManyOptions.take = options.take;
}
return await this.find(findManyOptions);
}
}

View file

@ -90,6 +90,17 @@ export const initErrorHandling = async () => {
if (tags) event.tags = { ...event.tags, ...tags };
}
if (
originalException instanceof Error &&
'cause' in originalException &&
originalException.cause instanceof Error &&
'level' in originalException.cause &&
originalException.cause.level === 'warning'
) {
// handle underlying errors propagating from dependencies like ai-assistant-sdk
return null;
}
if (originalException instanceof Error && originalException.stack) {
const eventHash = createHash('sha1').update(originalException.stack).digest('base64');
if (seenErrors.has(eventHash)) return null;

View file

@ -16,8 +16,9 @@ export abstract class ResponseError extends ApplicationError {
readonly errorCode: number = httpStatusCode,
// The error hint the response
readonly hint: string | undefined = undefined,
cause?: unknown,
) {
super(message);
super(message, { cause });
this.name = 'ResponseError';
}
}

View file

@ -1,7 +1,7 @@
import { ResponseError } from './abstract/response.error';
export class InternalServerError extends ResponseError {
constructor(message: string, errorCode = 500) {
super(message, 500, errorCode);
constructor(message: string, cause?: unknown) {
super(message, 500, 500, undefined, cause);
}
}

View file

@ -0,0 +1,7 @@
import { ForbiddenError } from './forbidden.error';
export class InvalidMfaRecoveryCodeError extends ForbiddenError {
constructor(hint?: string) {
super('Invalid MFA recovery code', hint);
}
}

View file

@ -112,7 +112,7 @@ export class TestMetricsController {
}
@Delete('/:testDefinitionId/metrics/:id')
async delete(req: TestMetricsRequest.GetOne) {
async delete(req: TestMetricsRequest.Delete) {
const { id: metricId, testDefinitionId } = req.params;
await this.getTestDefinition(req);

View file

@ -13,7 +13,7 @@ export declare namespace TestDefinitionsRequest {
type GetOne = AuthenticatedRequest<RouteParams.TestId>;
type GetMany = AuthenticatedRequest<{}, {}, {}, ListQuery.Params & { includeScopes?: string }> & {
type GetMany = AuthenticatedRequest<{}, {}, {}, ListQuery.Params> & {
listQueryOptions: ListQuery.Options;
};
@ -63,3 +63,27 @@ export declare namespace TestMetricsRequest {
type Delete = AuthenticatedRequest<RouteParams.TestDefinitionId & RouteParams.TestMetricId>;
}
// ----------------------------------
// /test-definitions/:testDefinitionId/runs
// ----------------------------------
export declare namespace TestRunsRequest {
namespace RouteParams {
type TestId = {
testDefinitionId: string;
};
type TestRunId = {
id: string;
};
}
type GetMany = AuthenticatedRequest<RouteParams.TestId, {}, {}, ListQuery.Params> & {
listQueryOptions: ListQuery.Options;
};
type GetOne = AuthenticatedRequest<RouteParams.TestId & RouteParams.TestRunId>;
type Delete = AuthenticatedRequest<RouteParams.TestId & RouteParams.TestRunId>;
}

View file

@ -0,0 +1,24 @@
import { readFileSync } from 'fs';
import path from 'path';
import { createPinData } from '../utils.ee';
const wfUnderTestJson = JSON.parse(
readFileSync(path.join(__dirname, './mock-data/workflow.under-test.json'), { encoding: 'utf-8' }),
);
const executionDataJson = JSON.parse(
readFileSync(path.join(__dirname, './mock-data/execution-data.json'), { encoding: 'utf-8' }),
);
describe('createPinData', () => {
test('should create pin data from past execution data', () => {
const pinData = createPinData(wfUnderTestJson, executionDataJson);
expect(pinData).toEqual(
expect.objectContaining({
'When clicking Test workflow': expect.anything(),
}),
);
});
});

View file

@ -0,0 +1,72 @@
import { EvaluationMetrics } from '../evaluation-metrics.ee';
describe('EvaluationMetrics', () => {
test('should aggregate metrics correctly', () => {
const testMetricNames = new Set(['metric1', 'metric2']);
const metrics = new EvaluationMetrics(testMetricNames);
metrics.addResults({ metric1: 1, metric2: 0 });
metrics.addResults({ metric1: 0.5, metric2: 0.2 });
const aggregatedMetrics = metrics.getAggregatedMetrics();
expect(aggregatedMetrics).toEqual({ metric1: 0.75, metric2: 0.1 });
});
test('should aggregate only numbers', () => {
const testMetricNames = new Set(['metric1', 'metric2']);
const metrics = new EvaluationMetrics(testMetricNames);
metrics.addResults({ metric1: 1, metric2: 0 });
metrics.addResults({ metric1: '0.5', metric2: 0.2 });
metrics.addResults({ metric1: 'not a number', metric2: [1, 2, 3] });
const aggregatedUpMetrics = metrics.getAggregatedMetrics();
expect(aggregatedUpMetrics).toEqual({ metric1: 1, metric2: 0.1 });
});
test('should handle missing values', () => {
const testMetricNames = new Set(['metric1', 'metric2']);
const metrics = new EvaluationMetrics(testMetricNames);
metrics.addResults({ metric1: 1 });
metrics.addResults({ metric2: 0.2 });
const aggregatedMetrics = metrics.getAggregatedMetrics();
expect(aggregatedMetrics).toEqual({ metric1: 1, metric2: 0.2 });
});
test('should handle empty metrics', () => {
const testMetricNames = new Set(['metric1', 'metric2']);
const metrics = new EvaluationMetrics(testMetricNames);
const aggregatedMetrics = metrics.getAggregatedMetrics();
expect(aggregatedMetrics).toEqual({});
});
test('should handle empty testMetrics', () => {
const metrics = new EvaluationMetrics(new Set());
metrics.addResults({ metric1: 1, metric2: 0 });
metrics.addResults({ metric1: 0.5, metric2: 0.2 });
const aggregatedMetrics = metrics.getAggregatedMetrics();
expect(aggregatedMetrics).toEqual({});
});
test('should ignore non-relevant values', () => {
const testMetricNames = new Set(['metric1']);
const metrics = new EvaluationMetrics(testMetricNames);
metrics.addResults({ metric1: 1, notRelevant: 0 });
metrics.addResults({ metric1: 0.5, notRelevant2: { foo: 'bar' } });
const aggregatedMetrics = metrics.getAggregatedMetrics();
expect(aggregatedMetrics).toEqual({ metric1: 0.75 });
});
});

View file

@ -0,0 +1,40 @@
import { readFileSync } from 'fs';
import path from 'path';
import { getPastExecutionStartNode } from '../utils.ee';
const executionDataJson = JSON.parse(
readFileSync(path.join(__dirname, './mock-data/execution-data.json'), { encoding: 'utf-8' }),
);
const executionDataMultipleTriggersJson = JSON.parse(
readFileSync(path.join(__dirname, './mock-data/execution-data.multiple-triggers.json'), {
encoding: 'utf-8',
}),
);
const executionDataMultipleTriggersJson2 = JSON.parse(
readFileSync(path.join(__dirname, './mock-data/execution-data.multiple-triggers-2.json'), {
encoding: 'utf-8',
}),
);
describe('getPastExecutionStartNode', () => {
test('should return the start node of the past execution', () => {
const startNode = getPastExecutionStartNode(executionDataJson);
expect(startNode).toEqual('When clicking Test workflow');
});
test('should return the start node of the past execution with multiple triggers', () => {
const startNode = getPastExecutionStartNode(executionDataMultipleTriggersJson);
expect(startNode).toEqual('When clicking Test workflow');
});
test('should return the start node of the past execution with multiple triggers - chat trigger', () => {
const startNode = getPastExecutionStartNode(executionDataMultipleTriggersJson2);
expect(startNode).toEqual('When chat message received');
});
});

View file

@ -0,0 +1,95 @@
{
"startData": {},
"resultData": {
"runData": {
"When chat message received": [
{
"startTime": 1732882447976,
"executionTime": 0,
"executionStatus": "success",
"data": {
"main": [
[
{
"json": {
"sessionId": "192c5b3c0b0642d68eab1a747a59cb6e",
"action": "sendMessage",
"chatInput": "hey"
}
}
]
]
},
"source": [null]
}
],
"NoOp": [
{
"hints": [],
"startTime": 1732882448034,
"executionTime": 0,
"source": [
{
"previousNode": "When clicking Test workflow"
}
],
"executionStatus": "success",
"data": {
"main": [
[
{
"json": {
"sessionId": "192c5b3c0b0642d68eab1a747a59cb6e",
"action": "sendMessage",
"chatInput": "hey"
},
"pairedItem": {
"item": 0
}
}
]
]
}
}
],
"NoOp2": [
{
"hints": [],
"startTime": 1732882448037,
"executionTime": 0,
"source": [
{
"previousNode": "NoOp"
}
],
"executionStatus": "success",
"data": {
"main": [
[
{
"json": {
"sessionId": "192c5b3c0b0642d68eab1a747a59cb6e",
"action": "sendMessage",
"chatInput": "hey"
},
"pairedItem": {
"item": 0
}
}
]
]
}
}
]
},
"pinData": {},
"lastNodeExecuted": "NoOp2"
},
"executionData": {
"contextData": {},
"nodeExecutionStack": [],
"metadata": {},
"waitingExecution": {},
"waitingExecutionSource": {}
}
}

View file

@ -0,0 +1,87 @@
{
"startData": {},
"resultData": {
"runData": {
"When clicking Test workflow": [
{
"hints": [],
"startTime": 1732882424975,
"executionTime": 0,
"source": [],
"executionStatus": "success",
"data": {
"main": [
[
{
"json": {},
"pairedItem": {
"item": 0
}
}
]
]
}
}
],
"NoOp": [
{
"hints": [],
"startTime": 1732882424977,
"executionTime": 1,
"source": [
{
"previousNode": "When clicking Test workflow"
}
],
"executionStatus": "success",
"data": {
"main": [
[
{
"json": {},
"pairedItem": {
"item": 0
}
}
]
]
}
}
],
"NoOp2": [
{
"hints": [],
"startTime": 1732882424978,
"executionTime": 0,
"source": [
{
"previousNode": "NoOp"
}
],
"executionStatus": "success",
"data": {
"main": [
[
{
"json": {},
"pairedItem": {
"item": 0
}
}
]
]
}
}
]
},
"pinData": {},
"lastNodeExecuted": "NoOp2"
},
"executionData": {
"contextData": {},
"nodeExecutionStack": [],
"metadata": {},
"waitingExecution": {},
"waitingExecutionSource": {}
}
}

View file

@ -57,6 +57,12 @@
"name": "success",
"value": true,
"type": "boolean"
},
{
"id": "877d1bf8-31a7-4571-9293-a6837b51d22b",
"name": "metric1",
"value": 0.1,
"type": "number"
}
]
},

View file

@ -0,0 +1,76 @@
{
"name": "Multiple Triggers Workflow",
"nodes": [
{
"parameters": {},
"type": "n8n-nodes-base.manualTrigger",
"typeVersion": 1,
"position": [-20, -120],
"id": "19562c2d-d2c8-45c8-ae0a-1b1effe29817",
"name": "When clicking Test workflow"
},
{
"parameters": {
"options": {}
},
"type": "@n8n/n8n-nodes-langchain.chatTrigger",
"typeVersion": 1.1,
"position": [-20, 120],
"id": "9b4b833b-56f6-4099-9b7d-5e94b75a735c",
"name": "When chat message received",
"webhookId": "8aeccd03-d45f-48d2-a2c7-1fb8c53d2ad7"
},
{
"parameters": {},
"type": "n8n-nodes-base.noOp",
"typeVersion": 1,
"position": [260, -20],
"id": "d3ab7426-11e7-4f42-9a57-11b8de019783",
"name": "NoOp"
},
{
"parameters": {},
"type": "n8n-nodes-base.noOp",
"typeVersion": 1,
"position": [480, -20],
"id": "fb73bed6-ec2a-4283-b564-c96730b94889",
"name": "NoOp2"
}
],
"connections": {
"When clicking Test workflow": {
"main": [
[
{
"node": "NoOp",
"type": "main",
"index": 0
}
]
]
},
"When chat message received": {
"main": [
[
{
"node": "NoOp",
"type": "main",
"index": 0
}
]
]
},
"NoOp": {
"main": [
[
{
"node": "NoOp2",
"type": "main",
"index": 0
}
]
]
}
},
"pinData": {}
}

View file

@ -2,15 +2,17 @@ import type { SelectQueryBuilder } from '@n8n/typeorm';
import { stringify } from 'flatted';
import { readFileSync } from 'fs';
import { mock, mockDeep } from 'jest-mock-extended';
import type { IRun } from 'n8n-workflow';
import type { GenericValue, IRun } from 'n8n-workflow';
import path from 'path';
import type { ActiveExecutions } from '@/active-executions';
import type { ExecutionEntity } from '@/databases/entities/execution-entity';
import type { TestDefinition } from '@/databases/entities/test-definition.ee';
import type { TestMetric } from '@/databases/entities/test-metric.ee';
import type { TestRun } from '@/databases/entities/test-run.ee';
import type { User } from '@/databases/entities/user';
import type { ExecutionRepository } from '@/databases/repositories/execution.repository';
import type { TestMetricRepository } from '@/databases/repositories/test-metric.repository.ee';
import type { TestRunRepository } from '@/databases/repositories/test-run.repository.ee';
import type { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import type { WorkflowRunner } from '@/workflow-runner';
@ -58,12 +60,38 @@ function mockExecutionData() {
});
}
function mockEvaluationExecutionData(metrics: Record<string, GenericValue>) {
return mock<IRun>({
data: {
resultData: {
lastNodeExecuted: 'lastNode',
runData: {
lastNode: [
{
data: {
main: [
[
{
json: metrics,
},
],
],
},
},
],
},
},
},
});
}
describe('TestRunnerService', () => {
const executionRepository = mock<ExecutionRepository>();
const workflowRepository = mock<WorkflowRepository>();
const workflowRunner = mock<WorkflowRunner>();
const activeExecutions = mock<ActiveExecutions>();
const testRunRepository = mock<TestRunRepository>();
const testMetricRepository = mock<TestMetricRepository>();
beforeEach(() => {
const executionsQbMock = mockDeep<SelectQueryBuilder<ExecutionEntity>>({
@ -80,6 +108,11 @@ describe('TestRunnerService', () => {
.mockResolvedValueOnce(executionMocks[1]);
testRunRepository.createTestRun.mockResolvedValue(mock<TestRun>({ id: 'test-run-id' }));
testMetricRepository.find.mockResolvedValue([
mock<TestMetric>({ name: 'metric1' }),
mock<TestMetric>({ name: 'metric2' }),
]);
});
afterEach(() => {
@ -97,6 +130,7 @@ describe('TestRunnerService', () => {
executionRepository,
activeExecutions,
testRunRepository,
testMetricRepository,
);
expect(testRunnerService).toBeInstanceOf(TestRunnerService);
@ -109,6 +143,7 @@ describe('TestRunnerService', () => {
executionRepository,
activeExecutions,
testRunRepository,
testMetricRepository,
);
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
@ -143,6 +178,7 @@ describe('TestRunnerService', () => {
executionRepository,
activeExecutions,
testRunRepository,
testMetricRepository,
);
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
@ -166,17 +202,17 @@ describe('TestRunnerService', () => {
.mockResolvedValue(mockExecutionData());
activeExecutions.getPostExecutePromise
.calledWith('some-execution-id-2')
.calledWith('some-execution-id-3')
.mockResolvedValue(mockExecutionData());
// Mock executions of evaluation workflow
activeExecutions.getPostExecutePromise
.calledWith('some-execution-id-3')
.mockResolvedValue(mockExecutionData());
.calledWith('some-execution-id-2')
.mockResolvedValue(mockEvaluationExecutionData({ metric1: 1, metric2: 0 }));
activeExecutions.getPostExecutePromise
.calledWith('some-execution-id-4')
.mockResolvedValue(mockExecutionData());
.mockResolvedValue(mockEvaluationExecutionData({ metric1: 0.5 }));
await testRunnerService.runTest(
mock<User>(),
@ -225,7 +261,8 @@ describe('TestRunnerService', () => {
expect(testRunRepository.markAsRunning).toHaveBeenCalledWith('test-run-id');
expect(testRunRepository.markAsCompleted).toHaveBeenCalledTimes(1);
expect(testRunRepository.markAsCompleted).toHaveBeenCalledWith('test-run-id', {
success: false,
metric1: 0.75,
metric2: 0,
});
});
});

View file

@ -0,0 +1,32 @@
import type { IDataObject } from 'n8n-workflow';
export class EvaluationMetrics {
private readonly rawMetricsByName = new Map<string, number[]>();
constructor(private readonly metricNames: Set<string>) {
for (const metricName of metricNames) {
this.rawMetricsByName.set(metricName, []);
}
}
addResults(result: IDataObject) {
for (const [metricName, metricValue] of Object.entries(result)) {
if (typeof metricValue === 'number' && this.metricNames.has(metricName)) {
this.rawMetricsByName.get(metricName)!.push(metricValue);
}
}
}
getAggregatedMetrics() {
const aggregatedMetrics: Record<string, number> = {};
for (const [metricName, metricValues] of this.rawMetricsByName.entries()) {
if (metricValues.length > 0) {
const metricSum = metricValues.reduce((acc, val) => acc + val, 0);
aggregatedMetrics[metricName] = metricSum / metricValues.length;
}
}
return aggregatedMetrics;
}
}

View file

@ -1,9 +1,9 @@
import { parse } from 'flatted';
import type {
IDataObject,
IPinData,
IRun,
IRunData,
IRunExecutionData,
IWorkflowExecutionDataProcess,
} from 'n8n-workflow';
import assert from 'node:assert';
@ -15,12 +15,15 @@ import type { TestDefinition } from '@/databases/entities/test-definition.ee';
import type { User } from '@/databases/entities/user';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { TestMetricRepository } from '@/databases/repositories/test-metric.repository.ee';
import { TestRunRepository } from '@/databases/repositories/test-run.repository.ee';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import type { IExecutionResponse } from '@/interfaces';
import { getRunData } from '@/workflow-execute-additional-data';
import { WorkflowRunner } from '@/workflow-runner';
import { EvaluationMetrics } from './evaluation-metrics.ee';
import { createPinData, getPastExecutionStartNode } from './utils.ee';
/**
* This service orchestrates the running of test cases.
* It uses the test definitions to find
@ -39,45 +42,33 @@ export class TestRunnerService {
private readonly executionRepository: ExecutionRepository,
private readonly activeExecutions: ActiveExecutions,
private readonly testRunRepository: TestRunRepository,
private readonly testMetricRepository: TestMetricRepository,
) {}
/**
* Extracts the execution data from the past execution.
* Creates a pin data object from the past execution data
* for the given workflow.
* For now, it only pins trigger nodes.
*/
private createTestDataFromExecution(workflow: WorkflowEntity, execution: ExecutionEntity) {
const executionData = parse(execution.executionData.data) as IExecutionResponse['data'];
const triggerNodes = workflow.nodes.filter((node) => /trigger$/i.test(node.type));
const pinData = {} as IPinData;
for (const triggerNode of triggerNodes) {
const triggerData = executionData.resultData.runData[triggerNode.name];
if (triggerData?.[0]?.data?.main?.[0]) {
pinData[triggerNode.name] = triggerData[0]?.data?.main?.[0];
}
}
return { pinData, executionData };
}
/**
* Runs a test case with the given pin data.
* Waits for the workflow under test to finish execution.
*/
private async runTestCase(
workflow: WorkflowEntity,
testCasePinData: IPinData,
pastExecutionData: IRunExecutionData,
userId: string,
): Promise<IRun | undefined> {
// Create pin data from the past execution data
const pinData = createPinData(workflow, pastExecutionData);
// Determine the start node of the past execution
const pastExecutionStartNode = getPastExecutionStartNode(pastExecutionData);
// Prepare the data to run the workflow
const data: IWorkflowExecutionDataProcess = {
destinationNode: pastExecutionData.startData?.destinationNode,
startNodes: pastExecutionStartNode
? [{ name: pastExecutionStartNode, sourceData: null }]
: undefined,
executionMode: 'evaluation',
runData: {},
pinData: testCasePinData,
pinData,
workflowData: workflow,
partialExecutionVersion: '-1',
userId,
@ -125,6 +116,11 @@ export class TestRunnerService {
return await executePromise;
}
/**
* Evaluation result is the first item in the output of the last node
* executed in the evaluation workflow. Defaults to an empty object
* in case the node doesn't produce any output items.
*/
private extractEvaluationResult(execution: IRun): IDataObject {
const lastNodeExecuted = execution.data.resultData.lastNodeExecuted;
assert(lastNodeExecuted, 'Could not find the last node executed in evaluation workflow');
@ -136,6 +132,21 @@ export class TestRunnerService {
return mainConnectionData?.[0]?.json ?? {};
}
/**
* Get the metrics to collect from the evaluation workflow execution results.
*/
private async getTestMetricNames(testDefinitionId: string) {
const metrics = await this.testMetricRepository.find({
where: {
testDefinition: {
id: testDefinitionId,
},
},
});
return new Set(metrics.map((m) => m.name));
}
/**
* Creates a new test run for the given test definition.
*/
@ -164,11 +175,15 @@ export class TestRunnerService {
.andWhere('execution.workflowId = :workflowId', { workflowId: test.workflowId })
.getMany();
// Get the metrics to collect from the evaluation workflow
const testMetricNames = await this.getTestMetricNames(test.id);
// 2. Run over all the test cases
await this.testRunRepository.markAsRunning(testRun.id);
const metrics = [];
// Object to collect the results of the evaluation workflow executions
const metrics = new EvaluationMetrics(testMetricNames);
for (const { id: pastExecutionId } of pastExecutions) {
// Fetch past execution with data
@ -178,11 +193,10 @@ export class TestRunnerService {
});
assert(pastExecution, 'Execution not found');
const testData = this.createTestDataFromExecution(workflow, pastExecution);
const { pinData, executionData } = testData;
const executionData = parse(pastExecution.executionData.data) as IRunExecutionData;
// Run the test case and wait for it to finish
const testCaseExecution = await this.runTestCase(workflow, pinData, user.id);
const testCaseExecution = await this.runTestCase(workflow, executionData, user.id);
// In case of a permission check issue, the test case execution will be undefined.
// Skip them and continue with the next test case
@ -205,12 +219,10 @@ export class TestRunnerService {
assert(evalExecution);
// Extract the output of the last node executed in the evaluation workflow
metrics.push(this.extractEvaluationResult(evalExecution));
metrics.addResults(this.extractEvaluationResult(evalExecution));
}
// TODO: 3. Aggregate the results
// Now we just set success to true if all the test cases passed
const aggregatedMetrics = { success: metrics.every((metric) => metric.success) };
const aggregatedMetrics = metrics.getAggregatedMetrics();
await this.testRunRepository.markAsCompleted(testRun.id, aggregatedMetrics);
}

View file

@ -0,0 +1,34 @@
import type { IRunExecutionData, IPinData } from 'n8n-workflow';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
/**
* Extracts the execution data from the past execution
* and creates a pin data object from it for the given workflow.
* For now, it only pins trigger nodes.
*/
export function createPinData(workflow: WorkflowEntity, executionData: IRunExecutionData) {
const triggerNodes = workflow.nodes.filter((node) => /trigger$/i.test(node.type));
const pinData = {} as IPinData;
for (const triggerNode of triggerNodes) {
const triggerData = executionData.resultData.runData[triggerNode.name];
if (triggerData?.[0]?.data?.main?.[0]) {
pinData[triggerNode.name] = triggerData[0]?.data?.main?.[0];
}
}
return pinData;
}
/**
* Returns the start node of the past execution.
* The start node is the node that has no source and has run data.
*/
export function getPastExecutionStartNode(executionData: IRunExecutionData) {
return Object.keys(executionData.resultData.runData).find((nodeName) => {
const data = executionData.resultData.runData[nodeName];
return !data[0].source || data[0].source.length === 0 || data[0].source[0] === null;
});
}

View file

@ -0,0 +1,77 @@
import { TestRunRepository } from '@/databases/repositories/test-run.repository.ee';
import { Delete, Get, RestController } from '@/decorators';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { TestRunsRequest } from '@/evaluation/test-definitions.types.ee';
import { listQueryMiddleware } from '@/middlewares';
import { getSharedWorkflowIds } from '@/public-api/v1/handlers/workflows/workflows.service';
import { TestDefinitionService } from './test-definition.service.ee';
@RestController('/evaluation/test-definitions')
export class TestRunsController {
constructor(
private readonly testDefinitionService: TestDefinitionService,
private readonly testRunRepository: TestRunRepository,
) {}
/** This method is used in multiple places in the controller to get the test definition
* (or just check that it exists and the user has access to it).
*/
private async getTestDefinition(
req: TestRunsRequest.GetOne | TestRunsRequest.GetMany | TestRunsRequest.Delete,
) {
const { testDefinitionId } = req.params;
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
const testDefinition = await this.testDefinitionService.findOne(
testDefinitionId,
userAccessibleWorkflowIds,
);
if (!testDefinition) throw new NotFoundError('Test definition not found');
return testDefinition;
}
@Get('/:testDefinitionId/runs', { middlewares: listQueryMiddleware })
async getMany(req: TestRunsRequest.GetMany) {
const { testDefinitionId } = req.params;
await this.getTestDefinition(req);
return await this.testRunRepository.getMany(testDefinitionId, req.listQueryOptions);
}
@Get('/:testDefinitionId/runs/:id')
async getOne(req: TestRunsRequest.GetOne) {
const { id: testRunId, testDefinitionId } = req.params;
await this.getTestDefinition(req);
const testRun = await this.testRunRepository.findOne({
where: { id: testRunId, testDefinition: { id: testDefinitionId } },
});
if (!testRun) throw new NotFoundError('Test run not found');
return testRun;
}
@Delete('/:testDefinitionId/runs/:id')
async delete(req: TestRunsRequest.Delete) {
const { id: testRunId, testDefinitionId } = req.params;
await this.getTestDefinition(req);
const testRun = await this.testRunRepository.findOne({
where: { id: testRunId, testDefinition: { id: testDefinitionId } },
});
if (!testRun) throw new NotFoundError('Test run not found');
await this.testRunRepository.delete({ id: testRunId });
return { success: true };
}
}

View file

@ -251,7 +251,7 @@ export class ExecutionService {
requestFilters = requestFiltersRaw as IGetExecutionsQueryFilter;
}
} catch (error) {
throw new InternalServerError('Parameter "filter" contained invalid JSON string.');
throw new InternalServerError('Parameter "filter" contained invalid JSON string.', error);
}
}

View file

@ -4,6 +4,7 @@ import { v4 as uuid } from 'uuid';
import { AuthUserRepository } from '@/databases/repositories/auth-user.repository';
import { InvalidMfaCodeError } from '@/errors/response-errors/invalid-mfa-code.error';
import { InvalidMfaRecoveryCodeError } from '@/errors/response-errors/invalid-mfa-recovery-code-error';
import { TOTPService } from './totp.service';
@ -56,13 +57,13 @@ export class MfaService {
async validateMfa(
userId: string,
mfaToken: string | undefined,
mfaCode: string | undefined,
mfaRecoveryCode: string | undefined,
) {
const user = await this.authUserRepository.findOneByOrFail({ id: userId });
if (mfaToken) {
if (mfaCode) {
const decryptedSecret = this.cipher.decrypt(user.mfaSecret!);
return this.totp.verifySecret({ secret: decryptedSecret, token: mfaToken });
return this.totp.verifySecret({ secret: decryptedSecret, mfaCode });
}
if (mfaRecoveryCode) {
@ -85,12 +86,27 @@ export class MfaService {
return await this.authUserRepository.save(user);
}
async disableMfa(userId: string, mfaToken: string) {
const isValidToken = await this.validateMfa(userId, mfaToken, undefined);
async disableMfaWithMfaCode(userId: string, mfaCode: string) {
const isValidToken = await this.validateMfa(userId, mfaCode, undefined);
if (!isValidToken) {
throw new InvalidMfaCodeError();
}
await this.disableMfaForUser(userId);
}
async disableMfaWithRecoveryCode(userId: string, recoveryCode: string) {
const isValidToken = await this.validateMfa(userId, undefined, recoveryCode);
if (!isValidToken) {
throw new InvalidMfaRecoveryCodeError();
}
await this.disableMfaForUser(userId);
}
private async disableMfaForUser(userId: string) {
await this.authUserRepository.update(userId, {
mfaEnabled: false,
mfaSecret: null,

View file

@ -23,10 +23,14 @@ export class TOTPService {
}).toString();
}
verifySecret({ secret, token, window = 2 }: { secret: string; token: string; window?: number }) {
verifySecret({
secret,
mfaCode,
window = 2,
}: { secret: string; mfaCode: string; window?: number }) {
return new OTPAuth.TOTP({
secret: OTPAuth.Secret.fromBase32(secret),
}).validate({ token, window }) === null
}).validate({ token: mfaCode, window }) === null
? false
: true;
}

View file

@ -228,7 +228,7 @@ export declare namespace PasswordResetRequest {
export type NewPassword = AuthlessRequest<
{},
{},
Pick<PublicUser, 'password'> & { token?: string; userId?: string; mfaToken?: string }
Pick<PublicUser, 'password'> & { token?: string; userId?: string; mfaCode?: string }
>;
}
@ -306,7 +306,7 @@ export type LoginRequest = AuthlessRequest<
{
email: string;
password: string;
mfaToken?: string;
mfaCode?: string;
mfaRecoveryCode?: string;
}
>;
@ -316,9 +316,9 @@ export type LoginRequest = AuthlessRequest<
// ----------------------------------
export declare namespace MFA {
type Verify = AuthenticatedRequest<{}, {}, { token: string }, {}>;
type Activate = AuthenticatedRequest<{}, {}, { token: string }, {}>;
type Disable = AuthenticatedRequest<{}, {}, { token: string }, {}>;
type Verify = AuthenticatedRequest<{}, {}, { mfaCode: string }, {}>;
type Activate = AuthenticatedRequest<{}, {}, { mfaCode: string }, {}>;
type Disable = AuthenticatedRequest<{}, {}, { mfaCode?: string; mfaRecoveryCode?: string }, {}>;
type Config = AuthenticatedRequest<{}, {}, { login: { enabled: boolean } }, {}>;
type ValidateRecoveryCode = AuthenticatedRequest<
{},

View file

@ -76,6 +76,7 @@ describe('TaskRunnerProcess', () => {
'N8N_VERSION',
'ENVIRONMENT',
'DEPLOYMENT_NAME',
'GENERIC_TIMEZONE',
])('should propagate %s from env as is', async (envVar) => {
jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken');
process.env[envVar] = 'custom value';

View file

@ -54,6 +54,7 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
private readonly passthroughEnvVars = [
'PATH',
'GENERIC_TIMEZONE',
'NODE_FUNCTION_ALLOW_BUILTIN',
'NODE_FUNCTION_ALLOW_EXTERNAL',
'N8N_SENTRY_DSN',

View file

@ -119,7 +119,7 @@ export class InstanceRiskReporter implements RiskReporter {
node: WorkflowEntity['nodes'][number];
workflow: WorkflowEntity;
}) {
const childNodeNames = workflow.connections[node.name]?.main[0].map((i) => i.node);
const childNodeNames = workflow.connections[node.name]?.main[0]?.map((i) => i.node);
if (!childNodeNames) return false;

View file

@ -65,6 +65,7 @@ import '@/external-secrets/external-secrets.controller.ee';
import '@/license/license.controller';
import '@/evaluation/test-definitions.controller.ee';
import '@/evaluation/metrics.controller';
import '@/evaluation/test-runs.controller.ee';
import '@/workflows/workflow-history/workflow-history.controller.ee';
import '@/workflows/workflows.controller';

View file

@ -1,72 +0,0 @@
// eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import
import { In } from '@n8n/typeorm';
import { Service } from 'typedi';
import type { User } from '@/databases/entities/user';
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { UserService } from '@/services/user.service';
@Service()
export class UserOnboardingService {
constructor(
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
private readonly workflowRepository: WorkflowRepository,
private readonly userService: UserService,
) {}
/**
* Check if user owns more than 15 workflows or more than 2 workflows with at least 2 nodes.
* If user does, set flag in its settings.
*/
async isBelowThreshold(user: User): Promise<boolean> {
let belowThreshold = true;
const skippedTypes = ['n8n-nodes-base.start', 'n8n-nodes-base.stickyNote'];
const ownedWorkflowsIds = await this.sharedWorkflowRepository
.find({
where: {
project: {
projectRelations: {
role: 'project:personalOwner',
userId: user.id,
},
},
role: 'workflow:owner',
},
select: ['workflowId'],
})
.then((ownedWorkflows) => ownedWorkflows.map(({ workflowId }) => workflowId));
if (ownedWorkflowsIds.length > 15) {
belowThreshold = false;
} else {
// just fetch workflows' nodes to keep memory footprint low
const workflows = await this.workflowRepository.find({
where: { id: In(ownedWorkflowsIds) },
select: ['nodes'],
});
// valid workflow: 2+ nodes without start node
const validWorkflowCount = workflows.reduce((counter, workflow) => {
if (counter <= 2 && workflow.nodes.length > 2) {
const nodes = workflow.nodes.filter((node) => !skippedTypes.includes(node.type));
if (nodes.length >= 2) {
return counter + 1;
}
}
return counter;
}, 0);
// more than 2 valid workflows required
belowThreshold = validWorkflowCount <= 2;
}
// user is above threshold --> set flag in settings
if (!belowThreshold) {
void this.userService.updateSettings(user.id, { isOnboarded: true });
}
return belowThreshold;
}
}

View file

@ -1,5 +1,5 @@
import type { IUserSettings } from 'n8n-workflow';
import { ApplicationError, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
import { ApplicationError } from 'n8n-workflow';
import { Service } from 'typedi';
import type { User, AssignableRole } from '@/databases/entities/user';
@ -213,9 +213,8 @@ export class UserService {
),
);
} catch (error) {
ErrorReporter.error(error);
this.logger.error('Failed to create user shells', { userShells: createdUsers });
throw new InternalServerError('An error occurred during user creation');
throw new InternalServerError('An error occurred during user creation', error);
}
pendingUsersToInvite.forEach(({ email, id }) => createdUsers.set(email, id));

View file

@ -125,7 +125,7 @@ export class UserManagementMailer {
const error = toError(e);
throw new InternalServerError(`Please contact your administrator: ${error.message}`);
throw new InternalServerError(`Please contact your administrator: ${error.message}`, e);
}
}
@ -180,7 +180,7 @@ export class UserManagementMailer {
const error = toError(e);
throw new InternalServerError(`Please contact your administrator: ${error.message}`);
throw new InternalServerError(`Please contact your administrator: ${error.message}`, e);
}
}

View file

@ -58,7 +58,7 @@ export function isStringArray(value: unknown): value is string[] {
export const isIntegerString = (value: string) => /^\d+$/.test(value);
export function isObjectLiteral(item: unknown): item is { [key: string]: string } {
export function isObjectLiteral(item: unknown): item is { [key: string]: unknown } {
return typeof item === 'object' && item !== null && !Array.isArray(item);
}

View file

@ -1,6 +1,11 @@
import type * as express from 'express';
import { mock } from 'jest-mock-extended';
import type { IWebhookData, IWorkflowExecuteAdditionalData, Workflow } from 'n8n-workflow';
import type { ITaskData } from 'n8n-workflow';
import {
type IWebhookData,
type IWorkflowExecuteAdditionalData,
type Workflow,
} from 'n8n-workflow';
import { v4 as uuid } from 'uuid';
import { generateNanoId } from '@/databases/utils/generators';
@ -43,12 +48,16 @@ describe('TestWebhooks', () => {
jest.useFakeTimers();
});
beforeEach(() => {
jest.clearAllMocks();
});
describe('needsWebhook()', () => {
const args: Parameters<typeof testWebhooks.needsWebhook> = [
const args: Parameters<typeof testWebhooks.needsWebhook>[0] = {
userId,
workflowEntity,
mock<IWorkflowExecuteAdditionalData>(),
];
additionalData: mock<IWorkflowExecuteAdditionalData>(),
};
test('if webhook is needed, should register then create webhook and return true', async () => {
const workflow = mock<Workflow>();
@ -56,7 +65,7 @@ describe('TestWebhooks', () => {
jest.spyOn(testWebhooks, 'toWorkflow').mockReturnValueOnce(workflow);
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook]);
const needsWebhook = await testWebhooks.needsWebhook(...args);
const needsWebhook = await testWebhooks.needsWebhook(args);
const [registerOrder] = registrations.register.mock.invocationCallOrder;
const [createOrder] = workflow.createWebhookIfNotExists.mock.invocationCallOrder;
@ -72,7 +81,7 @@ describe('TestWebhooks', () => {
jest.spyOn(registrations, 'register').mockRejectedValueOnce(new Error(msg));
registrations.getAllRegistrations.mockResolvedValue([]);
const needsWebhook = testWebhooks.needsWebhook(...args);
const needsWebhook = testWebhooks.needsWebhook(args);
await expect(needsWebhook).rejects.toThrowError(msg);
});
@ -81,10 +90,55 @@ describe('TestWebhooks', () => {
webhook.webhookDescription.restartWebhook = true;
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook]);
const result = await testWebhooks.needsWebhook(...args);
const result = await testWebhooks.needsWebhook(args);
expect(result).toBe(false);
});
test('returns false if a triggerToStartFrom with triggerData is given', async () => {
const workflow = mock<Workflow>();
jest.spyOn(testWebhooks, 'toWorkflow').mockReturnValueOnce(workflow);
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook]);
const needsWebhook = await testWebhooks.needsWebhook({
...args,
triggerToStartFrom: {
name: 'trigger',
data: mock<ITaskData>(),
},
});
expect(needsWebhook).toBe(false);
});
test('returns true, registers and then creates webhook if triggerToStartFrom is given with no triggerData', async () => {
// ARRANGE
const workflow = mock<Workflow>();
const webhook2 = mock<IWebhookData>({
node: 'trigger',
httpMethod,
path,
workflowId: workflowEntity.id,
userId,
});
jest.spyOn(testWebhooks, 'toWorkflow').mockReturnValueOnce(workflow);
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook, webhook2]);
// ACT
const needsWebhook = await testWebhooks.needsWebhook({
...args,
triggerToStartFrom: { name: 'trigger' },
});
// ASSERT
const [registerOrder] = registrations.register.mock.invocationCallOrder;
const [createOrder] = workflow.createWebhookIfNotExists.mock.invocationCallOrder;
expect(registerOrder).toBeLessThan(createOrder);
expect(registrations.register.mock.calls[0][0].webhook.node).toBe(webhook2.node);
expect(workflow.createWebhookIfNotExists.mock.calls[0][0].node).toBe(webhook2.node);
expect(needsWebhook).toBe(true);
});
});
describe('executeWebhook()', () => {

View file

@ -23,6 +23,7 @@ import type { TestWebhookRegistration } from '@/webhooks/test-webhook-registrati
import { TestWebhookRegistrationsService } from '@/webhooks/test-webhook-registrations.service';
import * as WebhookHelpers from '@/webhooks/webhook-helpers';
import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data';
import type { WorkflowRequest } from '@/workflows/workflow.request';
import type {
IWebhookResponseCallbackData,
@ -218,25 +219,48 @@ export class TestWebhooks implements IWebhookManager {
* Return whether activating a workflow requires listening for webhook calls.
* For every webhook call to listen for, also activate the webhook.
*/
async needsWebhook(
userId: string,
workflowEntity: IWorkflowDb,
additionalData: IWorkflowExecuteAdditionalData,
runData?: IRunData,
pushRef?: string,
destinationNode?: string,
) {
async needsWebhook(options: {
userId: string;
workflowEntity: IWorkflowDb;
additionalData: IWorkflowExecuteAdditionalData;
runData?: IRunData;
pushRef?: string;
destinationNode?: string;
triggerToStartFrom?: WorkflowRequest.ManualRunPayload['triggerToStartFrom'];
}) {
const {
userId,
workflowEntity,
additionalData,
runData,
pushRef,
destinationNode,
triggerToStartFrom,
} = options;
if (!workflowEntity.id) throw new WorkflowMissingIdError(workflowEntity);
const workflow = this.toWorkflow(workflowEntity);
const webhooks = WebhookHelpers.getWorkflowWebhooks(
let webhooks = WebhookHelpers.getWorkflowWebhooks(
workflow,
additionalData,
destinationNode,
true,
);
// If we have a preferred trigger with data, we don't have to listen for a
// webhook.
if (triggerToStartFrom?.data) {
return false;
}
// If we have a preferred trigger without data we only want to listen for
// that trigger, not the other ones.
if (triggerToStartFrom) {
webhooks = webhooks.filter((w) => w.node === triggerToStartFrom.name);
}
if (!webhooks.some((w) => w.webhookDescription.restartWebhook !== true)) {
return false; // no webhooks found to start a workflow
}

View file

@ -762,7 +762,7 @@ export async function executeWebhook(
);
}
const internalServerError = new InternalServerError(e.message);
const internalServerError = new InternalServerError(e.message, e);
if (e instanceof ExecutionCancelledError) internalServerError.level = 'warning';
throw internalServerError;
});

View file

@ -52,7 +52,7 @@ import type { IWorkflowErrorData, UpdateExecutionPayload } from '@/interfaces';
import { NodeTypes } from '@/node-types';
import { Push } from '@/push';
import { WorkflowStatisticsService } from '@/services/workflow-statistics.service';
import { findSubworkflowStart, isWorkflowIdValid } from '@/utils';
import { findSubworkflowStart, isObjectLiteral, isWorkflowIdValid } from '@/utils';
import * as WorkflowHelpers from '@/workflow-helpers';
import { WorkflowRepository } from './databases/repositories/workflow.repository';
@ -80,11 +80,20 @@ export function objectToError(errorObject: unknown, workflow: Workflow): Error {
if (errorObject instanceof Error) {
// If it's already an Error instance, return it as is.
return errorObject;
} else if (errorObject && typeof errorObject === 'object' && 'message' in errorObject) {
} else if (
isObjectLiteral(errorObject) &&
'message' in errorObject &&
typeof errorObject.message === 'string'
) {
// If it's an object with a 'message' property, create a new Error instance.
let error: Error | undefined;
if ('node' in errorObject) {
const node = workflow.getNode((errorObject.node as { name: string }).name);
if (
'node' in errorObject &&
isObjectLiteral(errorObject.node) &&
typeof errorObject.node.name === 'string'
) {
const node = workflow.getNode(errorObject.node.name);
if (node) {
error = new NodeOperationError(
node,
@ -95,7 +104,7 @@ export function objectToError(errorObject: unknown, workflow: Workflow): Error {
}
if (error === undefined) {
error = new Error(errorObject.message as string);
error = new Error(errorObject.message);
}
if ('description' in errorObject) {

View file

@ -2,7 +2,14 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-shadow */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { InstanceSettings, WorkflowExecute } from 'n8n-core';
import * as a from 'assert/strict';
import {
DirectedGraph,
InstanceSettings,
WorkflowExecute,
filterDisabledNodes,
recreateNodeExecutionStack,
} from 'n8n-core';
import type {
ExecutionError,
IDeferredPromise,
@ -12,6 +19,7 @@ import type {
WorkflowExecuteMode,
WorkflowHooks,
IWorkflowExecutionDataProcess,
IRunExecutionData,
} from 'n8n-workflow';
import {
ErrorReporterProxy as ErrorReporter,
@ -203,6 +211,7 @@ export class WorkflowRunner {
}
/** Run the workflow in current process */
// eslint-disable-next-line complexity
private async runMainProcess(
executionId: string,
data: IWorkflowExecutionDataProcess,
@ -286,12 +295,50 @@ export class WorkflowRunner {
data.executionData,
);
workflowExecution = workflowExecute.processRunExecutionData(workflow);
} else if (data.triggerToStartFrom?.data && data.startNodes && !data.destinationNode) {
this.logger.debug(
`Execution ID ${executionId} had triggerToStartFrom. Starting from that trigger.`,
{ executionId },
);
const startNodes = data.startNodes.map((data) => {
const node = workflow.getNode(data.name);
a.ok(node, `Could not find a node named "${data.name}" in the workflow.`);
return node;
});
const runData = { [data.triggerToStartFrom.name]: [data.triggerToStartFrom.data] };
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
recreateNodeExecutionStack(
filterDisabledNodes(DirectedGraph.fromWorkflow(workflow)),
new Set(startNodes),
runData,
data.pinData ?? {},
);
const executionData: IRunExecutionData = {
resultData: { runData, pinData },
executionData: {
contextData: {},
metadata: {},
nodeExecutionStack,
waitingExecution,
waitingExecutionSource,
},
};
const workflowExecute = new WorkflowExecute(additionalData, 'manual', executionData);
workflowExecution = workflowExecute.processRunExecutionData(workflow);
} else if (
data.runData === undefined ||
data.startNodes === undefined ||
data.startNodes.length === 0
) {
// Full Execution
// TODO: When the old partial execution logic is removed this block can
// be removed and the previous one can be merged into
// `workflowExecute.runPartialWorkflow2`.
// Partial executions then require either a destination node from which
// everything else can be derived, or a triggerToStartFrom with
// triggerData.
this.logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {
executionId,
});

View file

@ -95,6 +95,7 @@ export class WorkflowExecutionService {
startNodes,
destinationNode,
dirtyNodeNames,
triggerToStartFrom,
}: WorkflowRequest.ManualRunPayload,
user: User,
pushRef?: string,
@ -117,14 +118,15 @@ export class WorkflowExecutionService {
) {
const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id);
const needsWebhook = await this.testWebhooks.needsWebhook(
user.id,
workflowData,
const needsWebhook = await this.testWebhooks.needsWebhook({
userId: user.id,
workflowEntity: workflowData,
additionalData,
runData,
pushRef,
destinationNode,
);
triggerToStartFrom,
});
if (needsWebhook) return { waitingForWebhook: true };
}
@ -144,6 +146,7 @@ export class WorkflowExecutionService {
userId: user.id,
partialExecutionVersion: partialExecutionVersion ?? '0',
dirtyNodeNames,
triggerToStartFrom,
};
const hasRunData = (node: INode) => runData !== undefined && !!runData[node.name];

View file

@ -1,4 +1,11 @@
import type { INode, IConnections, IWorkflowSettings, IRunData, StartNodeData } from 'n8n-workflow';
import type {
INode,
IConnections,
IWorkflowSettings,
IRunData,
StartNodeData,
ITaskData,
} from 'n8n-workflow';
import type { IWorkflowDb } from '@/interfaces';
import type { AuthenticatedRequest, ListQuery } from '@/requests';
@ -23,6 +30,10 @@ export declare namespace WorkflowRequest {
startNodes?: StartNodeData[];
destinationNode?: string;
dirtyNodeNames?: string[];
triggerToStartFrom?: {
name: string;
data?: ITaskData;
};
};
type Create = AuthenticatedRequest<{}, {}, CreateUpdatePayload>;

View file

@ -33,7 +33,6 @@ import * as ResponseHelper from '@/response-helper';
import { NamingService } from '@/services/naming.service';
import { ProjectService } from '@/services/project.service';
import { TagService } from '@/services/tag.service';
import { UserOnboardingService } from '@/services/user-onboarding.service';
import { UserManagementMailer } from '@/user-management/email';
import * as utils from '@/utils';
import * as WorkflowHelpers from '@/workflow-helpers';
@ -55,7 +54,6 @@ export class WorkflowsController {
private readonly workflowHistoryService: WorkflowHistoryService,
private readonly tagService: TagService,
private readonly namingService: NamingService,
private readonly userOnboardingService: UserOnboardingService,
private readonly workflowRepository: WorkflowRepository,
private readonly workflowService: WorkflowService,
private readonly workflowExecutionService: WorkflowExecutionService,
@ -213,13 +211,7 @@ export class WorkflowsController {
const requestedName = req.query.name ?? this.globalConfig.workflows.defaultName;
const name = await this.namingService.getUniqueWorkflowName(requestedName);
const onboardingFlowEnabled =
!this.globalConfig.workflows.onboardingFlowDisabled &&
!req.user.settings?.isOnboarded &&
(await this.userOnboardingService.isBelowThreshold(req.user));
return { name, onboardingFlowEnabled };
return { name };
}
@Get('/from-url')

View file

@ -89,7 +89,7 @@ describe('POST /login', () => {
const response = await testServer.authlessAgent.post('/login').send({
email: owner.email,
password: ownerPassword,
mfaToken: mfaService.totp.generateTOTP(secret),
mfaCode: mfaService.totp.generateTOTP(secret),
});
expect(response.statusCode).toBe(200);

View file

@ -0,0 +1,239 @@
import { Container } from 'typedi';
import type { TestDefinition } from '@/databases/entities/test-definition.ee';
import type { User } from '@/databases/entities/user';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { ProjectRepository } from '@/databases/repositories/project.repository';
import { TestDefinitionRepository } from '@/databases/repositories/test-definition.repository.ee';
import { TestRunRepository } from '@/databases/repositories/test-run.repository.ee';
import { createUserShell } from '@test-integration/db/users';
import { createWorkflow } from '@test-integration/db/workflows';
import * as testDb from '@test-integration/test-db';
import type { SuperAgentTest } from '@test-integration/types';
import * as utils from '@test-integration/utils';
let authOwnerAgent: SuperAgentTest;
let workflowUnderTest: WorkflowEntity;
let otherWorkflow: WorkflowEntity;
let testDefinition: TestDefinition;
let otherTestDefinition: TestDefinition;
let ownerShell: User;
const testServer = utils.setupTestServer({
endpointGroups: ['workflows', 'evaluation'],
enabledFeatures: ['feat:sharing'],
});
beforeAll(async () => {
ownerShell = await createUserShell('global:owner');
authOwnerAgent = testServer.authAgentFor(ownerShell);
});
beforeEach(async () => {
await testDb.truncate(['TestDefinition', 'TestRun', 'Workflow', 'SharedWorkflow']);
workflowUnderTest = await createWorkflow({ name: 'workflow-under-test' }, ownerShell);
testDefinition = Container.get(TestDefinitionRepository).create({
name: 'test',
workflow: { id: workflowUnderTest.id },
});
await Container.get(TestDefinitionRepository).save(testDefinition);
otherWorkflow = await createWorkflow({ name: 'other-workflow' });
otherTestDefinition = Container.get(TestDefinitionRepository).create({
name: 'other-test',
workflow: { id: otherWorkflow.id },
});
await Container.get(TestDefinitionRepository).save(otherTestDefinition);
});
describe('GET /evaluation/test-definitions/:testDefinitionId/runs', () => {
test('should retrieve empty list of runs for a test definition', async () => {
const resp = await authOwnerAgent.get(`/evaluation/test-definitions/${testDefinition.id}/runs`);
expect(resp.statusCode).toBe(200);
expect(resp.body.data).toEqual([]);
});
test('should retrieve 404 if test definition does not exist', async () => {
const resp = await authOwnerAgent.get('/evaluation/test-definitions/123/runs');
expect(resp.statusCode).toBe(404);
});
test('should retrieve 404 if user does not have access to test definition', async () => {
const resp = await authOwnerAgent.get(
`/evaluation/test-definitions/${otherTestDefinition.id}/runs`,
);
expect(resp.statusCode).toBe(404);
});
test('should retrieve list of runs for a test definition', async () => {
const testRunRepository = Container.get(TestRunRepository);
const testRun = await testRunRepository.createTestRun(testDefinition.id);
const resp = await authOwnerAgent.get(`/evaluation/test-definitions/${testDefinition.id}/runs`);
expect(resp.statusCode).toBe(200);
expect(resp.body.data).toEqual([
expect.objectContaining({
id: testRun.id,
status: 'new',
testDefinitionId: testDefinition.id,
runAt: null,
completedAt: null,
}),
]);
});
test('should retrieve list of runs for a test definition with pagination', async () => {
const testRunRepository = Container.get(TestRunRepository);
const testRun1 = await testRunRepository.createTestRun(testDefinition.id);
// Mark as running just to make a slight delay between the runs
await testRunRepository.markAsRunning(testRun1.id);
const testRun2 = await testRunRepository.createTestRun(testDefinition.id);
// Fetch the first page
const resp = await authOwnerAgent.get(
`/evaluation/test-definitions/${testDefinition.id}/runs?take=1`,
);
expect(resp.statusCode).toBe(200);
expect(resp.body.data).toEqual([
expect.objectContaining({
id: testRun2.id,
status: 'new',
testDefinitionId: testDefinition.id,
runAt: null,
completedAt: null,
}),
]);
// Fetch the second page
const resp2 = await authOwnerAgent.get(
`/evaluation/test-definitions/${testDefinition.id}/runs?take=1&skip=1`,
);
expect(resp2.statusCode).toBe(200);
expect(resp2.body.data).toEqual([
expect.objectContaining({
id: testRun1.id,
status: 'running',
testDefinitionId: testDefinition.id,
runAt: expect.any(String),
completedAt: null,
}),
]);
});
});
describe('GET /evaluation/test-definitions/:testDefinitionId/runs/:id', () => {
test('should retrieve test run for a test definition', async () => {
const testRunRepository = Container.get(TestRunRepository);
const testRun = await testRunRepository.createTestRun(testDefinition.id);
const resp = await authOwnerAgent.get(
`/evaluation/test-definitions/${testDefinition.id}/runs/${testRun.id}`,
);
expect(resp.statusCode).toBe(200);
expect(resp.body.data).toEqual(
expect.objectContaining({
id: testRun.id,
status: 'new',
testDefinitionId: testDefinition.id,
runAt: null,
completedAt: null,
}),
);
});
test('should retrieve 404 if test run does not exist', async () => {
const resp = await authOwnerAgent.get(
`/evaluation/test-definitions/${testDefinition.id}/runs/123`,
);
expect(resp.statusCode).toBe(404);
});
test('should retrieve 404 if user does not have access to test definition', async () => {
const testRunRepository = Container.get(TestRunRepository);
const testRun = await testRunRepository.createTestRun(otherTestDefinition.id);
const resp = await authOwnerAgent.get(
`/evaluation/test-definitions/${otherTestDefinition.id}/runs/${testRun.id}`,
);
expect(resp.statusCode).toBe(404);
});
test('should retrieve test run for a test definition of a shared workflow', async () => {
const memberShell = await createUserShell('global:member');
const memberAgent = testServer.authAgentFor(memberShell);
const memberPersonalProject = await Container.get(
ProjectRepository,
).getPersonalProjectForUserOrFail(memberShell.id);
// Share workflow with a member
const sharingResponse = await authOwnerAgent
.put(`/workflows/${workflowUnderTest.id}/share`)
.send({ shareWithIds: [memberPersonalProject.id] });
expect(sharingResponse.statusCode).toBe(200);
// Create a test run for the shared workflow
const testRunRepository = Container.get(TestRunRepository);
const testRun = await testRunRepository.createTestRun(testDefinition.id);
// Check if member can retrieve the test run of a shared workflow
const resp = await memberAgent.get(
`/evaluation/test-definitions/${testDefinition.id}/runs/${testRun.id}`,
);
expect(resp.statusCode).toBe(200);
expect(resp.body.data).toEqual(
expect.objectContaining({
id: testRun.id,
}),
);
});
});
describe('DELETE /evaluation/test-definitions/:testDefinitionId/runs/:id', () => {
test('should delete test run for a test definition', async () => {
const testRunRepository = Container.get(TestRunRepository);
const testRun = await testRunRepository.createTestRun(testDefinition.id);
const resp = await authOwnerAgent.delete(
`/evaluation/test-definitions/${testDefinition.id}/runs/${testRun.id}`,
);
expect(resp.statusCode).toBe(200);
expect(resp.body.data).toEqual({ success: true });
const testRunAfterDelete = await testRunRepository.findOne({ where: { id: testRun.id } });
expect(testRunAfterDelete).toBeNull();
});
test('should retrieve 404 if test run does not exist', async () => {
const resp = await authOwnerAgent.delete(
`/evaluation/test-definitions/${testDefinition.id}/runs/123`,
);
expect(resp.statusCode).toBe(404);
});
test('should retrieve 404 if user does not have access to test definition', async () => {
const testRunRepository = Container.get(TestRunRepository);
const testRun = await testRunRepository.createTestRun(otherTestDefinition.id);
const resp = await authOwnerAgent.delete(
`/evaluation/test-definitions/${otherTestDefinition.id}/runs/${testRun.id}`,
);
expect(resp.statusCode).toBe(404);
});
});

View file

@ -55,8 +55,8 @@ describe('Enable MFA setup', () => {
secondCall.body.data.recoveryCodes.join(''),
);
const token = new TOTPService().generateTOTP(firstCall.body.data.secret);
await testServer.authAgentFor(owner).post('/mfa/disable').send({ token }).expect(200);
const mfaCode = new TOTPService().generateTOTP(firstCall.body.data.secret);
await testServer.authAgentFor(owner).post('/mfa/disable').send({ mfaCode }).expect(200);
const thirdCall = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
@ -84,22 +84,22 @@ describe('Enable MFA setup', () => {
await testServer.authlessAgent.post('/mfa/verify').expect(401);
});
test('POST /verify should fail due to invalid MFA token', async () => {
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token: '123' }).expect(400);
test('POST /verify should fail due to invalid MFA code', async () => {
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode: '123' }).expect(400);
});
test('POST /verify should fail due to missing token parameter', async () => {
test('POST /verify should fail due to missing mfaCode parameter', async () => {
await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token: '' }).expect(400);
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode: '' }).expect(400);
});
test('POST /verify should validate MFA token', async () => {
test('POST /verify should validate MFA code', async () => {
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
const { secret } = response.body.data;
const token = new TOTPService().generateTOTP(secret);
const mfaCode = new TOTPService().generateTOTP(secret);
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token }).expect(200);
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode }).expect(200);
});
});
@ -108,13 +108,13 @@ describe('Enable MFA setup', () => {
await testServer.authlessAgent.post('/mfa/enable').expect(401);
});
test('POST /verify should fail due to missing token parameter', async () => {
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token: '' }).expect(400);
test('POST /verify should fail due to missing mfaCode parameter', async () => {
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode: '' }).expect(400);
});
test('POST /enable should fail due to invalid MFA token', async () => {
test('POST /enable should fail due to invalid MFA code', async () => {
await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
await testServer.authAgentFor(owner).post('/mfa/enable').send({ token: '123' }).expect(400);
await testServer.authAgentFor(owner).post('/mfa/enable').send({ mfaCode: '123' }).expect(400);
});
test('POST /enable should fail due to empty secret and recovery codes', async () => {
@ -125,10 +125,10 @@ describe('Enable MFA setup', () => {
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
const { secret } = response.body.data;
const token = new TOTPService().generateTOTP(secret);
const mfaCode = new TOTPService().generateTOTP(secret);
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token }).expect(200);
await testServer.authAgentFor(owner).post('/mfa/enable').send({ token }).expect(200);
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode }).expect(200);
await testServer.authAgentFor(owner).post('/mfa/enable').send({ mfaCode }).expect(200);
const user = await Container.get(AuthUserRepository).findOneOrFail({
where: {},
@ -145,13 +145,13 @@ describe('Enable MFA setup', () => {
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
const { secret } = response.body.data;
const token = new TOTPService().generateTOTP(secret);
const mfaCode = new TOTPService().generateTOTP(secret);
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token }).expect(200);
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode }).expect(200);
externalHooks.run.mockRejectedValue(new BadRequestError('Error message'));
await testServer.authAgentFor(owner).post('/mfa/enable').send({ token }).expect(400);
await testServer.authAgentFor(owner).post('/mfa/enable').send({ mfaCode }).expect(400);
const user = await Container.get(AuthUserRepository).findOneOrFail({
where: {},
@ -165,13 +165,13 @@ describe('Enable MFA setup', () => {
describe('Disable MFA setup', () => {
test('POST /disable should disable login with MFA', async () => {
const { user, rawSecret } = await createUserWithMfaEnabled();
const token = new TOTPService().generateTOTP(rawSecret);
const mfaCode = new TOTPService().generateTOTP(rawSecret);
await testServer
.authAgentFor(user)
.post('/mfa/disable')
.send({
token,
mfaCode,
})
.expect(200);
@ -184,21 +184,39 @@ describe('Disable MFA setup', () => {
expect(dbUser.mfaRecoveryCodes.length).toBe(0);
});
test('POST /disable should fail if invalid token is given', async () => {
test('POST /disable should fail if invalid MFA recovery code is given', async () => {
const { user } = await createUserWithMfaEnabled();
await testServer
.authAgentFor(user)
.post('/mfa/disable')
.send({
token: 'invalid token',
mfaRecoveryCode: 'invalid token',
})
.expect(403);
});
test('POST /disable should fail if invalid MFA code is given', async () => {
const { user } = await createUserWithMfaEnabled();
await testServer
.authAgentFor(user)
.post('/mfa/disable')
.send({
mfaCode: 'invalid token',
})
.expect(403);
});
test('POST /disable should fail if neither MFA code nor recovery code is sent', async () => {
const { user } = await createUserWithMfaEnabled();
await testServer.authAgentFor(user).post('/mfa/disable').send({ anotherParam: '' }).expect(400);
});
});
describe('Change password with MFA enabled', () => {
test('POST /change-password should fail due to missing MFA token', async () => {
test('POST /change-password should fail due to missing MFA code', async () => {
await createUserWithMfaEnabled();
const newPassword = randomValidPassword();
@ -210,7 +228,7 @@ describe('Change password with MFA enabled', () => {
.expect(404);
});
test('POST /change-password should fail due to invalid MFA token', async () => {
test('POST /change-password should fail due to invalid MFA code', async () => {
await createUserWithMfaEnabled();
const newPassword = randomValidPassword();
@ -221,7 +239,7 @@ describe('Change password with MFA enabled', () => {
.send({
password: newPassword,
token: resetPasswordToken,
mfaToken: randomInt(10),
mfaCode: randomInt(10),
})
.expect(404);
});
@ -235,14 +253,14 @@ describe('Change password with MFA enabled', () => {
const resetPasswordToken = Container.get(AuthService).generatePasswordResetToken(user);
const mfaToken = new TOTPService().generateTOTP(rawSecret);
const mfaCode = new TOTPService().generateTOTP(rawSecret);
await testServer.authlessAgent
.post('/change-password')
.send({
password: newPassword,
token: resetPasswordToken,
mfaToken,
mfaCode,
})
.expect(200);
@ -252,7 +270,7 @@ describe('Change password with MFA enabled', () => {
.send({
email: user.email,
password: newPassword,
mfaToken: new TOTPService().generateTOTP(rawSecret),
mfaCode: new TOTPService().generateTOTP(rawSecret),
})
.expect(200);
@ -315,7 +333,7 @@ describe('Login', () => {
await testServer.authlessAgent
.post('/login')
.send({ email: user.email, password: rawPassword, mfaToken: 'wrongvalue' })
.send({ email: user.email, password: rawPassword, mfaCode: 'wrongvalue' })
.expect(401);
});
@ -337,7 +355,7 @@ describe('Login', () => {
const response = await testServer.authlessAgent
.post('/login')
.send({ email: user.email, password: rawPassword, mfaToken: token })
.send({ email: user.email, password: rawPassword, mfaCode: token })
.expect(200);
const data = response.body.data;

View file

@ -76,6 +76,7 @@ const repositories = [
'Tag',
'TestDefinition',
'TestMetric',
'TestRun',
'User',
'Variables',
'Webhook',

View file

@ -281,6 +281,7 @@ export const setupTestServer = ({
case 'evaluation':
await import('@/evaluation/metrics.controller');
await import('@/evaluation/test-definitions.controller.ee');
await import('@/evaluation/test-runs.controller.ee');
break;
}
}

View file

@ -1,6 +1,6 @@
{
"name": "n8n-core",
"version": "1.70.0",
"version": "1.71.0",
"description": "Core functionality of n8n",
"main": "dist/index",
"types": "dist/index.d.ts",

View file

@ -448,7 +448,7 @@ export class DirectedGraph {
for (const [outputType, outputs] of Object.entries(iConnection)) {
for (const [outputIndex, conns] of outputs.entries()) {
for (const conn of conns) {
for (const conn of conns ?? []) {
// TODO: What's with the input type?
const { node: toNodeName, type: _inputType, index: inputIndex } = conn;
const to = workflow.getNode(toNodeName);

View file

@ -42,6 +42,28 @@ describe('DirectedGraph', () => {
);
});
// ┌─────┐ ┌─────┐──► null
// │node1├───►│node2| ┌─────┐
// └─────┘ └─────┘──►│node3|
// └─────┘
//
test('linear workflow with null connections', () => {
// ARRANGE
const node1 = createNodeData({ name: 'Node1' });
const node2 = createNodeData({ name: 'Node2' });
const node3 = createNodeData({ name: 'Node3' });
// ACT
const graph = new DirectedGraph()
.addNodes(node1, node2, node3)
.addConnections({ from: node1, to: node2 }, { from: node2, to: node3, outputIndex: 1 });
// ASSERT
expect(DirectedGraph.fromWorkflow(graph.toWorkflow({ ...defaultWorkflowParameter }))).toEqual(
graph,
);
});
describe('getChildren', () => {
// ┌─────┐ ┌─────┐ ┌─────┐
// │node1├───►│node2├──►│node3│

View file

@ -36,6 +36,7 @@ import type {
CloseFunction,
StartNodeData,
NodeExecutionHint,
NodeInputConnections,
} from 'n8n-workflow';
import {
LoggerProxy as Logger,
@ -208,6 +209,9 @@ export class WorkflowExecute {
// Get the data of the incoming connections
incomingSourceData = { main: [] };
for (const connections of incomingNodeConnections.main) {
if (!connections) {
continue;
}
for (let inputIndex = 0; inputIndex < connections.length; inputIndex++) {
connection = connections[inputIndex];
@ -249,6 +253,9 @@ export class WorkflowExecute {
incomingNodeConnections = workflow.connectionsByDestinationNode[destinationNode];
if (incomingNodeConnections !== undefined) {
for (const connections of incomingNodeConnections.main) {
if (!connections) {
continue;
}
for (let inputIndex = 0; inputIndex < connections.length; inputIndex++) {
connection = connections[inputIndex];
@ -642,7 +649,7 @@ export class WorkflowExecute {
}
for (const connectionDataCheck of workflow.connectionsBySourceNode[parentNodeName].main[
outputIndexParent
]) {
] ?? []) {
checkOutputNodes.push(connectionDataCheck.node);
}
}
@ -661,7 +668,7 @@ export class WorkflowExecute {
) {
for (const inputData of workflow.connectionsByDestinationNode[connectionData.node].main[
inputIndex
]) {
] ?? []) {
if (inputData.node === parentNodeName) {
// Is the node we come from so its data will be available for sure
continue;
@ -681,7 +688,7 @@ export class WorkflowExecute {
if (
!this.incomingConnectionIsEmpty(
this.runExecutionData.resultData.runData,
workflow.connectionsByDestinationNode[inputData.node].main[0],
workflow.connectionsByDestinationNode[inputData.node].main[0] ?? [],
runIndex,
)
) {
@ -770,7 +777,7 @@ export class WorkflowExecute {
} else if (
this.incomingConnectionIsEmpty(
this.runExecutionData.resultData.runData,
workflow.connectionsByDestinationNode[nodeToAdd].main[0],
workflow.connectionsByDestinationNode[nodeToAdd].main[0] ?? [],
runIndex,
)
) {
@ -1066,7 +1073,7 @@ export class WorkflowExecute {
if (workflow.connectionsByDestinationNode.hasOwnProperty(executionNode.name)) {
// Check if the node has incoming connections
if (workflow.connectionsByDestinationNode[executionNode.name].hasOwnProperty('main')) {
let inputConnections: IConnection[][];
let inputConnections: NodeInputConnections;
let connectionIndex: number;
// eslint-disable-next-line prefer-const
@ -1194,7 +1201,7 @@ export class WorkflowExecute {
}
if (nodeSuccessData instanceof NodeExecutionOutput) {
const hints: NodeExecutionHint[] = nodeSuccessData.getHints();
const hints = (nodeSuccessData as NodeExecutionOutput).getHints();
executionHints.push(...hints);
}
@ -1586,7 +1593,7 @@ export class WorkflowExecute {
// Iterate over all the different connections of this output
for (connectionData of workflow.connectionsBySourceNode[executionNode.name].main[
outputIndex
]) {
] ?? []) {
if (!workflow.nodes.hasOwnProperty(connectionData.node)) {
throw new ApplicationError('Destination node not found', {
extra: {

View file

@ -21,3 +21,4 @@ export { BinaryData } from './BinaryData/types';
export { isStoredMode as isValidNonDefaultMode } from './BinaryData/utils';
export * from './ExecutionMetadata';
export * from './node-execution-context';
export * from './PartialExecutionUtils';

View file

@ -1,6 +1,6 @@
{
"name": "n8n-design-system",
"version": "1.60.0",
"version": "1.61.0",
"main": "src/main.ts",
"import": "src/main.ts",
"scripts": {
@ -27,7 +27,7 @@
"@types/markdown-it-emoji": "^2.0.2",
"@types/markdown-it-link-attributes": "^3.0.5",
"@types/sanitize-html": "^2.11.0",
"@vitejs/plugin-vue": "^5.1.4",
"@vitejs/plugin-vue": "catalog:frontend",
"@vitest/coverage-v8": "catalog:frontend",
"autoprefixer": "^10.4.19",
"postcss": "^8.4.38",

View file

@ -2,6 +2,7 @@
import type { N8nLocale } from 'n8n-design-system/types';
export default {
'generic.retry': 'Retry',
'nds.auth.roles.owner': 'Owner',
'nds.userInfo.you': '(you)',
'nds.userSelect.selectUser': 'Select User',

View file

@ -1,12 +1,13 @@
{
"name": "n8n-editor-ui",
"version": "1.70.0",
"version": "1.71.0",
"description": "Workflow Editor UI for n8n",
"main": "index.js",
"scripts": {
"clean": "rimraf dist .turbo",
"build": "cross-env VUE_APP_PUBLIC_PATH=\"/{{BASE_PATH}}/\" NODE_OPTIONS=\"--max-old-space-size=8192\" vite build",
"typecheck": "vue-tsc --noEmit",
"typecheck:watch": "vue-tsc --watch --noEmit",
"dev": "pnpm serve",
"lint": "eslint src --ext .js,.ts,.vue --quiet",
"lintfix": "eslint src --ext .js,.ts,.vue --fix",
@ -94,6 +95,7 @@
"@types/lodash-es": "^4.17.6",
"@types/luxon": "^3.2.0",
"@types/uuid": "catalog:",
"@vitejs/plugin-vue": "catalog:frontend",
"@vitest/coverage-v8": "catalog:frontend",
"miragejs": "^0.1.48",
"unplugin-icons": "^0.19.0",

Some files were not shown because too many files have changed in this diff Show more