diff --git a/.dockerignore b/.dockerignore index d92acf83ba..b907ce51f2 100644 --- a/.dockerignore +++ b/.dockerignore @@ -10,3 +10,5 @@ packages/**/.turbo .git .github *.tsbuildinfo +packages/cli/dist/**/e2e.* +packages/cli/dist/ReloadNodesAndCredentials.* diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index fdc96ff873..91b6a5669c 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -23,10 +23,10 @@ A clear and concise description of what you expected to happen. **Environment (please complete the following information):** - OS: [e.g. Ubuntu Linux 22.04] -- n8n Version [e.g. 0.200.1] -- Node.js Version [e.g. 16.17.0] +- n8n Version [e.g. 1.0.1] +- Node.js Version [e.g. 18.16.0] - Database system [e.g. SQLite; n8n uses SQLite as default otherwise changed] -- Operation mode [e.g. own; operation modes are `own`, `main` and `queue`. Default is `own`] +- Operation mode [e.g. own; operation modes are `own`, `main` and `queue`. Default is `main`] **Additional context** Add any other context about the problem here. diff --git a/.github/scripts/check-tests.mjs b/.github/scripts/check-tests.mjs index b1b859c869..1694fad35d 100644 --- a/.github/scripts/check-tests.mjs +++ b/.github/scripts/check-tests.mjs @@ -17,6 +17,18 @@ const filterAsync = async (asyncPredicate, arr) => { return filterResults.filter(({shouldKeep}) => shouldKeep).map(({item}) => item); } +const isAbstractClass = (node) => { + if (ts.isClassDeclaration(node)) { + return node.modifiers?.some((modifier) => modifier.kind === ts.SyntaxKind.AbstractKeyword) || false; + } + return false; +} + +const isAbstractMethod = (node) => { + return ts.isMethodDeclaration(node) && Boolean(node.modifiers?.find((modifier) => modifier.kind === ts.SyntaxKind.AbstractKeyword)); +} + + // Function to check if a file has a function declaration, function expression, object method or class const hasFunctionOrClass = async filePath => { const fileContent = await readFileAsync(filePath, 'utf-8'); @@ -24,7 +36,13 @@ const hasFunctionOrClass = async filePath => { let hasFunctionOrClass = false; const visit = node => { - if (ts.isFunctionDeclaration(node) || ts.isFunctionExpression(node) || ts.isMethodDeclaration(node) || ts.isClassDeclaration(node)) { + if ( + ts.isFunctionDeclaration(node) + || ts.isFunctionExpression(node) + || ts.isArrowFunction(node) + || (ts.isMethodDeclaration(node) && !isAbstractMethod(node)) + || (ts.isClassDeclaration(node) && !isAbstractClass(node)) + ) { hasFunctionOrClass = true; } node.forEachChild(visit); diff --git a/.github/scripts/package.json b/.github/scripts/package.json index 3c5740b2f7..679eeebead 100644 --- a/.github/scripts/package.json +++ b/.github/scripts/package.json @@ -1,8 +1,10 @@ { "dependencies": { - "conventional-changelog-cli": "^2.2.2", - "glob": "^10.2.7", - "semver": "^7.3.8", + "add-stream": "^1.0.0", + "conventional-changelog": "^4.0.0", + "glob": "^10.3.0", + "semver": "^7.5.2", + "tempfile": "^5.0.0", "typescript": "*" } } diff --git a/.github/scripts/update-changelog.mjs b/.github/scripts/update-changelog.mjs new file mode 100644 index 0000000000..1b41a671be --- /dev/null +++ b/.github/scripts/update-changelog.mjs @@ -0,0 +1,42 @@ +import addStream from 'add-stream'; +import createTempFile from 'tempfile'; +import conventionalChangelog from 'conventional-changelog'; +import { resolve } from 'path'; +import { createReadStream, createWriteStream } from 'fs'; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import stream from 'stream'; +import { promisify } from 'util'; +import packageJson from '../../package.json' assert { type: 'json' }; + +const pipeline = promisify(stream.pipeline); + +const baseDir = resolve(dirname(fileURLToPath(import.meta.url)), '../..'); +const fullChangelogFile = resolve(baseDir, 'CHANGELOG.md'); +const versionChangelogFile = resolve(baseDir, `CHANGELOG-${packageJson.version}.md`); + +const changelogStream = conventionalChangelog({ + preset: 'angular', + releaseCount: 1, + tagPrefix: 'n8n@', + transform: (commit, callback) => { + callback(null, commit.header.includes('(no-changelog)') ? undefined : commit); + }, +}).on('error', (err) => { + console.error(err.stack); + process.exit(1); +}); + +// We need to duplicate the stream here to pipe the changelog into two separate files +const stream1 = new stream.PassThrough(); +const stream2 = new stream.PassThrough(); +changelogStream.pipe(stream1); +changelogStream.pipe(stream2); + +await pipeline(stream1, createWriteStream(versionChangelogFile)); + +// Since we can't read and write from the same file at the same time, +// we use a temporary file to output the updated changelog to. +const tmpFile = createTempFile(); +await pipeline(stream2, addStream(createReadStream(fullChangelogFile)), createWriteStream(tmpFile)), + await pipeline(createReadStream(tmpFile), createWriteStream(fullChangelogFile)); diff --git a/.github/workflows/check-issue-template.yml b/.github/workflows/check-issue-template.yml new file mode 100644 index 0000000000..755307a1dd --- /dev/null +++ b/.github/workflows/check-issue-template.yml @@ -0,0 +1,18 @@ +name: Check Issue Template + +on: + issues: + types: [opened, edited] + +jobs: + check-issue: + name: Check Issue Template + runs-on: ubuntu-latest + steps: + - name: Run Check Issue Template + uses: n8n-io/GH-actions-playground@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + + + diff --git a/.github/workflows/checklist.yml b/.github/workflows/checklist.yml new file mode 100644 index 0000000000..dfcfca852f --- /dev/null +++ b/.github/workflows/checklist.yml @@ -0,0 +1,22 @@ +name: PR Checklist + +on: + pull_request_target: + types: + - opened + - synchronize + branches: + - master + +jobs: + checklist_job: + runs-on: ubuntu-latest + name: Checklist job + steps: + - name: Checkout + uses: actions/checkout@v1 + - name: Checklist + uses: wyozi/contextual-qa-checklist-action@master + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} + comment-footer: Make sure to check off this list before asking for review. diff --git a/.github/workflows/ci-master.yml b/.github/workflows/ci-master.yml index 9d5ca23b6c..15c83efd44 100644 --- a/.github/workflows/ci-master.yml +++ b/.github/workflows/ci-master.yml @@ -13,7 +13,7 @@ jobs: strategy: matrix: - node-version: [16.x, 18.x] + node-version: [18.x, 20.x] steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/ci-pull-requests.yml b/.github/workflows/ci-pull-requests.yml index b58567227d..714e6b4938 100644 --- a/.github/workflows/ci-pull-requests.yml +++ b/.github/workflows/ci-pull-requests.yml @@ -1,4 +1,4 @@ -name: Build, unit/smoke test and lint branch +name: Build, unit test and lint branch on: [pull_request] @@ -107,29 +107,3 @@ jobs: env: ESLINT_PLUGIN_DIFF_COMMIT: ${{ github.event.pull_request.base.ref }} run: pnpm lint - - smoke-test: - name: E2E [Electron/Node 18] - uses: ./.github/workflows/e2e-reusable.yml - with: - branch: ${{ github.event.pull_request.base.ref }} - user: ${{ github.event.inputs.user || 'PR User' }} - spec: ${{ github.event.inputs.spec || 'e2e/0-smoke.cy.ts' }} - record: false - parallel: false - pr_number: ${{ github.event.number }} - containers: '[1]' - secrets: - CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} - - checklist_job: - runs-on: ubuntu-latest - name: Checklist job - steps: - - name: Checkout - uses: actions/checkout@v1 - - name: Checklist - uses: wyozi/contextual-qa-checklist-action@master - with: - gh-token: ${{ secrets.GITHUB_TOKEN }} - comment-footer: Make sure to check off this list before asking for review. diff --git a/.github/workflows/docker-base-image.yml b/.github/workflows/docker-base-image.yml index 7351e30805..098da1d6b7 100644 --- a/.github/workflows/docker-base-image.yml +++ b/.github/workflows/docker-base-image.yml @@ -7,10 +7,11 @@ on: description: 'Node.js version to build this image with.' type: choice required: true - default: '16' + default: '18' options: - '16' - '18' + - '20' jobs: build: diff --git a/.github/workflows/docker-image-v1-rc.yml b/.github/workflows/docker-image-v1-rc.yml deleted file mode 100644 index 2fcb394a3b..0000000000 --- a/.github/workflows/docker-image-v1-rc.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: Docker Image - V1 RC - -on: - schedule: - - cron: '0 2 * * *' - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - ref: release-v1 - - - uses: pnpm/action-setup@v2.2.4 - - uses: actions/setup-node@v3 - with: - node-version: 18.x - - run: npm install --prefix=.github/scripts --no-package-lock - - - name: Bump package versions to 1.0.0 - run: | - RELEASE_TYPE=major node .github/scripts/bump-versions.mjs - pnpm i --lockfile-only - - - uses: docker/setup-qemu-action@v2 - - uses: docker/setup-buildx-action@v2 - - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Build and push - uses: docker/build-push-action@v4 - with: - context: . - file: ./docker/images/n8n-custom/Dockerfile - platforms: linux/amd64 - provenance: false - push: true - tags: ${{ secrets.DOCKER_USERNAME }}/n8n:1.0.0-rc - no-cache: true diff --git a/.github/workflows/docker-images.yml b/.github/workflows/docker-images.yml index 0064f7eabc..ce80e69188 100644 --- a/.github/workflows/docker-images.yml +++ b/.github/workflows/docker-images.yml @@ -8,10 +8,6 @@ jobs: build: runs-on: ubuntu-latest - strategy: - matrix: - docker-context: ['', '-debian'] - steps: - uses: actions/checkout@v3 @@ -41,12 +37,12 @@ jobs: - name: Build uses: docker/build-push-action@v4 with: - context: ./docker/images/n8n${{ matrix.docker-context }} + context: ./docker/images/n8n build-args: | N8N_VERSION=${{ steps.vars.outputs.tag }} platforms: linux/amd64,linux/arm64,linux/arm/v7 provenance: false push: true tags: | - ${{ secrets.DOCKER_USERNAME }}/n8n:${{ steps.vars.outputs.tag }}${{ matrix.docker-context }} - ghcr.io/${{ github.repository_owner }}/n8n:${{ steps.vars.outputs.tag }}${{ matrix.docker-context }} + ${{ secrets.DOCKER_USERNAME }}/n8n:${{ steps.vars.outputs.tag }} + ghcr.io/${{ github.repository_owner }}/n8n:${{ steps.vars.outputs.tag }} diff --git a/.github/workflows/release-create-pr.yml b/.github/workflows/release-create-pr.yml index a7942cd6ec..7617a35476 100644 --- a/.github/workflows/release-create-pr.yml +++ b/.github/workflows/release-create-pr.yml @@ -35,37 +35,33 @@ jobs: fetch-depth: 0 ref: ${{ github.event.inputs.base-branch }} - - name: Push the base branch - run: | - git checkout -b "release/${{ github.event.inputs.release-type }}" - git push -f origin "release/${{ github.event.inputs.release-type }}" - - uses: pnpm/action-setup@v2.2.4 - uses: actions/setup-node@v3 with: node-version: 18.x + - run: npm install --prefix=.github/scripts --no-package-lock - name: Bump package versions run: | echo "NEXT_RELEASE=$(node .github/scripts/bump-versions.mjs)" >> $GITHUB_ENV - pnpm i --lockfile-only env: RELEASE_TYPE: ${{ github.event.inputs.release-type }} - - name: Generate Changelog - run: npx conventional-changelog-cli -p angular -i CHANGELOG.md -s -t n8n@ + - name: Update Changelog + run: node .github/scripts/update-changelog.mjs + + - name: Push the base branch + run: | + git push -f origin refs/remotes/origin/${{ github.event.inputs.base-branch }}:refs/heads/release/${{ env.NEXT_RELEASE }} - name: Push the release branch, and Create the PR - uses: peter-evans/create-pull-request@v4 + uses: peter-evans/create-pull-request@v5 with: - base: 'release/${{ github.event.inputs.release-type }}' - branch: 'release/${{ env.NEXT_RELEASE }}' + base: 'release/${{ env.NEXT_RELEASE }}' + branch: '${{ env.NEXT_RELEASE }}-pr' commit-message: ':rocket: Release ${{ env.NEXT_RELEASE }}' delete-branch: true labels: 'release' title: ':rocket: Release ${{ env.NEXT_RELEASE }}' - # 'TODO: add generated changelog to the body. create a script to generate custom changelog' - body: '' - - # TODO: post PR link to slack + body-path: 'CHANGELOG-${{ env.NEXT_RELEASE }}.md' diff --git a/.github/workflows/release-publish.yml b/.github/workflows/release-publish.yml index 6a577c6da8..6882b4a9bb 100644 --- a/.github/workflows/release-publish.yml +++ b/.github/workflows/release-publish.yml @@ -5,8 +5,7 @@ on: types: - closed branches: - - 'release/patch' - - 'release/minor' + - 'release/*' jobs: publish-release: @@ -50,6 +49,7 @@ jobs: tag: 'n8n@${{env.RELEASE}}' prerelease: true makeLatest: false + body: ${{github.event.pull_request.body}} - name: Trigger a release note continue-on-error: true diff --git a/.gitignore b/.gitignore index b25164b547..3060b870c1 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,4 @@ packages/**/.turbo cypress/videos/* cypress/screenshots/* *.swp +CHANGELOG-*.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 012b11285d..4ee57a4b13 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,122 @@ +## [1.0.1](https://github.com/n8n-io/n8n/compare/n8n@1.0.0...n8n@1.0.1) (2023-07-05) + + +### Bug Fixes + +* **core:** Fix credentials test ([#6569](https://github.com/n8n-io/n8n/issues/6569)) ([8f244df](https://github.com/n8n-io/n8n/commit/8f244df0f9efcb087a78dd8d9481489c484c77b7)) +* **core:** Fix migrations for MySQL/MariaDB ([#6591](https://github.com/n8n-io/n8n/issues/6591)) ([b9da67b](https://github.com/n8n-io/n8n/commit/b9da67b653bf19f39d0d1506d3140c71432efaed)) +* **core:** Make node execution order configurable, and backward-compatible ([#6507](https://github.com/n8n-io/n8n/issues/6507)) ([d97edbc](https://github.com/n8n-io/n8n/commit/d97edbcffa966a693548eed033ac41d4a404fc23)) +* **core:** Update pruning related config defaults for v1 ([#6577](https://github.com/n8n-io/n8n/issues/6577)) ([ffb4e47](https://github.com/n8n-io/n8n/commit/ffb4e470b56222ae11891d478e96ea9c31675afe)) +* **editor:** Restore expression completions ([#6566](https://github.com/n8n-io/n8n/issues/6566)) ([516e572](https://github.com/n8n-io/n8n/commit/516e5728f73da6393defe7633533cc142c531c7a)) +* **editor:** Show retry information in execution list only when it exists ([#6587](https://github.com/n8n-io/n8n/issues/6587)) ([2580286](https://github.com/n8n-io/n8n/commit/2580286a198e53c3bf3db6e56faed301b606db07)) +* **Sendy Node:** Fix issue with brand id not being sent ([#6530](https://github.com/n8n-io/n8n/issues/6530)) ([b9e5211](https://github.com/n8n-io/n8n/commit/b9e52117355d939e77a2e3c59a7f67ac21e31b22)) +* **Strapi Node:** Fix issue with pagination ([#4991](https://github.com/n8n-io/n8n/issues/4991)) ([4253b48](https://github.com/n8n-io/n8n/commit/4253b48b26d1625cd2fb7f38159f9528cea45f34)) +* **XML Node:** Fix issue with not returning valid data ([#6565](https://github.com/n8n-io/n8n/issues/6565)) ([c2b9d5a](https://github.com/n8n-io/n8n/commit/c2b9d5ac506375ecc316e8c79a3ce0bf143e9406)) + + +### Features + +* Add missing input panels to some trigger nodes ([#6518](https://github.com/n8n-io/n8n/issues/6518)) ([3b12864](https://github.com/n8n-io/n8n/commit/3b12864460a458f23b57a6f3f4b40d0d364ef6e6)) + + + +# [1.0.0](https://github.com/n8n-io/n8n/compare/n8n@0.234.0...n8n@1.0.0) (2023-06-27) + + +### ⚠️ BREAKING CHANGES +* **core** Docker containers now run as the user `node` instead of `root` ([#6365](https://github.com/n8n-io/n8n/pull/6365)) ([f636616](https://github.com/n8n-io/n8n/commit/f6366160a476f42cb0612d10c5777a154d8665dd)) +* **core** Drop `debian` and `rhel7` images ([#6365](https://github.com/n8n-io/n8n/pull/6365)) ([f636616](https://github.com/n8n-io/n8n/commit/f6366160a476f42cb0612d10c5777a154d8665dd)) +* **core** Drop support for deprecated `WEBHOOK_TUNNEL_URL` env variable ([#6363](https://github.com/n8n-io/n8n/pull/6363)) +* **core** Execution mode defaults to `main` now, instead of `own` ([#6363](https://github.com/n8n-io/n8n/pull/6363)) +* **core** Default push backend is `websocket` now, instead of `sse` ([#6363](https://github.com/n8n-io/n8n/pull/6363)) +* **core** Stop loading custom/community nodes from n8n's `node_modules` folder ([#6396](https://github.com/n8n-io/n8n/pull/6396)) ([a45a2c8](https://github.com/n8n-io/n8n/commit/a45a2c8c41eb7ffb2d62d5a8877c34eb45799fa9)) +* **core** User management is mandatory now. basic-auth, external-jwt-auth, and no-auth options are removed ([#6362](https://github.com/n8n-io/n8n/pull/6362)) ([8c008f5](https://github.com/n8n-io/n8n/commit/8c008f5d2217030e93d79e2baca0f2965d4d643e)) +* **core** Allow syntax errors and expression errors to fail executions ([#6352](https://github.com/n8n-io/n8n/pull/6352)) ([1197811](https://github.com/n8n-io/n8n/commit/1197811a1e3bc4ad7464d53d7e4860d0e62335a3)) +* **core** Drop support for `request` library and `N8N_USE_DEPRECATED_REQUEST_LIB` env variable ([#6413](https://github.com/n8n-io/n8n/pull/6413)) ([632ea27](https://github.com/n8n-io/n8n/commit/632ea275b7fa352d4af23339208bed66bb948da8)) +* **core** Make date extensions outputs match inputs ([#6435](https://github.com/n8n-io/n8n/pull/6435)) ([85372aa](https://github.com/n8n-io/n8n/commit/85372aabdfc52493504d4723ee1829e2ea15151d)) +* **core** Drop support for `executeSingle` method on nodes ([#4853](https://github.com/n8n-io/n8n/pull/4853)) ([9194d8b](https://github.com/n8n-io/n8n/commit/9194d8bb0ecf81e52d47ddfc4b75dc4e0efd492d)) +* **core** Change data processing for multi-input-nodes ([#4238](https://github.com/n8n-io/n8n/pull/4238)) ([b8458a5](https://github.com/n8n-io/n8n/commit/b8458a53f66b79903f0fdb168f6febdefb36d13a)) + + +### Bug Fixes + +* **core:** All migrations should run in a transaction ([#6519](https://github.com/n8n-io/n8n/issues/6519)) ([e152cfe](https://github.com/n8n-io/n8n/commit/e152cfe27cf3396f4b278614f1d46d9dd723f36e)) +* **Edit Image Node:** Fix transparent operation ([#6513](https://github.com/n8n-io/n8n/issues/6513)) ([4a4bcbc](https://github.com/n8n-io/n8n/commit/4a4bcbca298bf90c54d3597103e6a231855abbd2)) +* **Google Drive Node:** URL parsing ([#6527](https://github.com/n8n-io/n8n/issues/6527)) ([18aa9f3](https://github.com/n8n-io/n8n/commit/18aa9f3c62149cd603c560c2944c3146cd31e9e7)) +* **Google Sheets Node:** Incorrect read of 0 and false ([#6525](https://github.com/n8n-io/n8n/issues/6525)) ([b6202b5](https://github.com/n8n-io/n8n/commit/b6202b5585f864d97dc114e1e49a6a7dae5c674a)) +* **Merge Node:** Enrich input 2 fix ([#6526](https://github.com/n8n-io/n8n/issues/6526)) ([70822ce](https://github.com/n8n-io/n8n/commit/70822ce988543476719089c132e1d10af0d03e78)) +* **Notion Node:** Version fix ([#6531](https://github.com/n8n-io/n8n/issues/6531)) ([d3d8522](https://github.com/n8n-io/n8n/commit/d3d8522e8f0c702f56997667a252892296540450)) +* Show error when referencing node that exist but has not been executed ([#6496](https://github.com/n8n-io/n8n/issues/6496)) ([3db2707](https://github.com/n8n-io/n8n/commit/3db2707b8e47ea539f4f6c40497a928b51b40274)) + + +### Features + +* **core:** Change node execution order (most top-left one first) ([#6246](https://github.com/n8n-io/n8n/issues/6246)) ([0287d5b](https://github.com/n8n-io/n8n/commit/0287d5becdce30a9c0de2a0d6ad4a0db50e198d7)) +* **core:** Remove conditional defaults in V1 release ([#6363](https://github.com/n8n-io/n8n/issues/6363)) ([f636616](https://github.com/n8n-io/n8n/commit/f6366160a476f42cb0612d10c5777a154d8665dd)) +* **editor:** Add v1 banner ([#6443](https://github.com/n8n-io/n8n/issues/6443)) ([0fe415a](https://github.com/n8n-io/n8n/commit/0fe415add2baa8e70e29087f7a90312bd1ab38af)) +* **editor:** SQL editor overhaul ([#6282](https://github.com/n8n-io/n8n/issues/6282)) ([beedfb6](https://github.com/n8n-io/n8n/commit/beedfb609ccde2ef202e08566580a2e1a6b6eafa)) +* **HTTP Request Node:** Notice about dev console ([#6516](https://github.com/n8n-io/n8n/issues/6516)) ([d431117](https://github.com/n8n-io/n8n/commit/d431117c9e5db9ff0ec6a1e7371bbf58698957c9)) + + + +# [0.236.0](https://github.com/n8n-io/n8n/compare/n8n@0.235.0...n8n@0.236.0) (2023-07-05) + + +### Bug Fixes + +* **Brevo Node:** Rename SendInBlue node to Brevo node ([#6521](https://github.com/n8n-io/n8n/issues/6521)) ([e63b398](https://github.com/n8n-io/n8n/commit/e63b3982d200ade34461b9159eb1e988f494c025)) +* **core:** Fix credentials test ([#6569](https://github.com/n8n-io/n8n/issues/6569)) ([1abd172](https://github.com/n8n-io/n8n/commit/1abd172f73e171e37c4cc3ccfaa395c6a46bdf48)) +* **core:** Fix migrations for MySQL/MariaDB ([#6591](https://github.com/n8n-io/n8n/issues/6591)) ([29882a6](https://github.com/n8n-io/n8n/commit/29882a6f39dddcd1c8c107c20a548ce8dc665cba)) +* **core:** Improve the performance of last 2 sqlite migrations ([#6522](https://github.com/n8n-io/n8n/issues/6522)) ([31cba87](https://github.com/n8n-io/n8n/commit/31cba87d307183d613890c7e6d627636b5280b52)) +* **core:** Remove typeorm patches, but still enforce transactions on every migration ([#6594](https://github.com/n8n-io/n8n/issues/6594)) ([9def7a7](https://github.com/n8n-io/n8n/commit/9def7a729b52cd6b4698c47e190e9e2bd7894da5)), closes [#6519](https://github.com/n8n-io/n8n/issues/6519) +* **core:** Use owners file to export wf owners ([#6547](https://github.com/n8n-io/n8n/issues/6547)) ([4b755fb](https://github.com/n8n-io/n8n/commit/4b755fb0b441a37eb804c9e70d4b071a341f7155)) +* **editor:** Show retry information in execution list only when it exists ([#6587](https://github.com/n8n-io/n8n/issues/6587)) ([3ca66be](https://github.com/n8n-io/n8n/commit/3ca66be38082e7a3866d53d07328be58e913067f)) +* **Salesforce Node:** Fix typo for adding a contact to a campaign ([#6598](https://github.com/n8n-io/n8n/issues/6598)) ([7ffe3cb](https://github.com/n8n-io/n8n/commit/7ffe3cb36adeecaca6cc6ddf067a701ee55c18d1)) +* **Strapi Node:** Fix issue with pagination ([#4991](https://github.com/n8n-io/n8n/issues/4991)) ([54444fa](https://github.com/n8n-io/n8n/commit/54444fa388da12d75553e66e53a8cf6f8a99b6fc)) +* **XML Node:** Fix issue with not returning valid data ([#6565](https://github.com/n8n-io/n8n/issues/6565)) ([cdd215f](https://github.com/n8n-io/n8n/commit/cdd215f642b47413c05f229e641074d0d4048f68)) + + +### Features + +* Add crowd.dev node and trigger node ([#6082](https://github.com/n8n-io/n8n/issues/6082)) ([238a78f](https://github.com/n8n-io/n8n/commit/238a78f0582dbf439a9799de0edcb2e9bef29978)) +* Add various source control improvements ([#6533](https://github.com/n8n-io/n8n/issues/6533)) ([68fdc20](https://github.com/n8n-io/n8n/commit/68fdc2078928be478a286774f2889feba1c3f5fe)) +* **HTTP Request Node:** New http request generic custom auth credential ([#5798](https://github.com/n8n-io/n8n/issues/5798)) ([b17b458](https://github.com/n8n-io/n8n/commit/b17b4582a059104665888a2369c3e2256db4c1ed)) +* **Microsoft To Do Node:** Add an option to set a reminder when creating a task ([#5757](https://github.com/n8n-io/n8n/issues/5757)) ([b19833d](https://github.com/n8n-io/n8n/commit/b19833d673bd554ba86c0b234e8d13633912563a)) +* **Notion Node:** Add option to update icon when updating a page ([#5670](https://github.com/n8n-io/n8n/issues/5670)) ([225e849](https://github.com/n8n-io/n8n/commit/225e849960ce65d7f85b482f05fb3d7ffb4f9427)) +* **Strava Node:** Add hide_from_home field in Activity Update ([#5883](https://github.com/n8n-io/n8n/issues/5883)) ([7495e31](https://github.com/n8n-io/n8n/commit/7495e31a5b25e97683c7ea38225ba253d8fae8b7)) +* **Twitter Node:** Node overhaul ([#4788](https://github.com/n8n-io/n8n/issues/4788)) ([42721db](https://github.com/n8n-io/n8n/commit/42721dba80077fb796086a2bf0ecce256bf3a50f)) + + + +# [0.235.0](https://github.com/n8n-io/n8n/compare/n8n@0.234.0...n8n@0.235.0) (2023-06-28) + + +### Bug Fixes + +* **core:** Add empty credential value marker to show empty pw field ([#6532](https://github.com/n8n-io/n8n/issues/6532)) ([9294e2d](https://github.com/n8n-io/n8n/commit/9294e2da3c7c99c2099f5865e610fa7217bf06be)) +* **core:** All migrations should run in a transaction ([#6519](https://github.com/n8n-io/n8n/issues/6519)) ([e152cfe](https://github.com/n8n-io/n8n/commit/e152cfe27cf3396f4b278614f1d46d9dd723f36e)) +* **core:** Rename to credential_stubs and variable_stubs.json ([#6528](https://github.com/n8n-io/n8n/issues/6528)) ([b06462f](https://github.com/n8n-io/n8n/commit/b06462f4415bd1143a00b4a66e6e626da8c52196)) +* **Edit Image Node:** Fix transparent operation ([#6513](https://github.com/n8n-io/n8n/issues/6513)) ([4a4bcbc](https://github.com/n8n-io/n8n/commit/4a4bcbca298bf90c54d3597103e6a231855abbd2)) +* **editor:** Add default author name and email to source control settings ([#6543](https://github.com/n8n-io/n8n/issues/6543)) ([e1a02c7](https://github.com/n8n-io/n8n/commit/e1a02c76257de30e08878279dea33d7854d46938)) +* **editor:** Change default branchColor and remove label ([#6541](https://github.com/n8n-io/n8n/issues/6541)) ([186271e](https://github.com/n8n-io/n8n/commit/186271e939bca19ec9c94d9455e9430d8b8cf9d7)) +* **Google Drive Node:** URL parsing ([#6527](https://github.com/n8n-io/n8n/issues/6527)) ([d9ed0b3](https://github.com/n8n-io/n8n/commit/d9ed0b31b538320a67ee4e5c0cae34656c9f4334)) +* **Google Sheets Node:** Incorrect read of 0 and false ([#6525](https://github.com/n8n-io/n8n/issues/6525)) ([806d134](https://github.com/n8n-io/n8n/commit/806d13460240abe94843e569b1820cd8d0d8edd1)) +* **Merge Node:** Enrich input 2 fix ([#6526](https://github.com/n8n-io/n8n/issues/6526)) ([c82c7f1](https://github.com/n8n-io/n8n/commit/c82c7f19128df3a11d6d0f18e8d8dab57e6a3b8f)) +* **Notion Node:** Version fix ([#6531](https://github.com/n8n-io/n8n/issues/6531)) ([38dc784](https://github.com/n8n-io/n8n/commit/38dc784d2eed25aae777c5c3c3fda1a35e20bd24)) +* **Sendy Node:** Fix issue with brand id not being sent ([#6530](https://github.com/n8n-io/n8n/issues/6530)) ([2e8dfb8](https://github.com/n8n-io/n8n/commit/2e8dfb86d4636781b319d6190e8be12e7661ee16)) + + +### Features + +* Add missing input panels to some trigger nodes ([#6518](https://github.com/n8n-io/n8n/issues/6518)) ([fdf8a42](https://github.com/n8n-io/n8n/commit/fdf8a428ed38bb3ceb2bc0e50b002b34843d8fc4)) +* **editor:** Prevent saving of workflow when canvas is loading ([#6497](https://github.com/n8n-io/n8n/issues/6497)) ([f89ef83](https://github.com/n8n-io/n8n/commit/f89ef83c766fafb1d0497ed91a74b93e8d2af1ec)) +* **editor:** SQL editor overhaul ([#6282](https://github.com/n8n-io/n8n/issues/6282)) ([beedfb6](https://github.com/n8n-io/n8n/commit/beedfb609ccde2ef202e08566580a2e1a6b6eafa)) +* **Google Drive Node:** Overhaul ([#5941](https://github.com/n8n-io/n8n/issues/5941)) ([d70a1cb](https://github.com/n8n-io/n8n/commit/d70a1cb0c82ee0a4b92776684c6c9079020d028f)) +* **HTTP Request Node:** Notice about dev console ([#6516](https://github.com/n8n-io/n8n/issues/6516)) ([d431117](https://github.com/n8n-io/n8n/commit/d431117c9e5db9ff0ec6a1e7371bbf58698957c9)) +* **Matrix Node:** Allow setting filename if the binary data has none ([#6536](https://github.com/n8n-io/n8n/issues/6536)) ([8b76e98](https://github.com/n8n-io/n8n/commit/8b76e980852062b192a95593035697c43d6f808e)) + + + # [0.234.0](https://github.com/n8n-io/n8n/compare/n8n@0.233.0...n8n@0.234.0) (2023-06-22) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6960d5ce28..529bab60aa 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,8 +54,8 @@ The most important directories: ## Development setup -If you want to change or extend n8n you have to make sure that all needed -dependencies are installed and the packages get linked correctly. Here a short guide on how that can be done: +If you want to change or extend n8n you have to make sure that all the needed +dependencies are installed and the packages get linked correctly. Here's a short guide on how that can be done: ### Requirements @@ -69,7 +69,7 @@ dependencies are installed and the packages get linked correctly. Here a short g ##### pnpm workspaces -n8n is split up in different modules which are all in a single mono repository. +n8n is split up into different modules which are all in a single mono repository. To facilitate the module management, [pnpm workspaces](https://pnpm.io/workspaces) are used. This automatically sets up file-links between modules which depend on each other. @@ -113,24 +113,24 @@ No additional packages required. > **IMPORTANT**: All the steps below have to get executed at least once to get the development setup up and running! -Now that everything n8n requires to run is installed the actual n8n code can be +Now that everything n8n requires to run is installed, the actual n8n code can be checked out and set up: -1. [Fork](https://guides.github.com/activities/forking/#fork) the n8n repository +1. [Fork](https://guides.github.com/activities/forking/#fork) the n8n repository. -2. Clone your forked repository +2. Clone your forked repository: ``` git clone https://github.com//n8n.git ``` -3. Go into repository folder +3. Go into repository folder: ``` cd n8n ``` -4. Add the original n8n repository as `upstream` to your forked repository +4. Add the original n8n repository as `upstream` to your forked repository: ``` git remote add upstream https://github.com/n8n-io/n8n.git @@ -172,13 +172,13 @@ automatically build your code, restart the backend and refresh the frontend pnpm dev ``` 1. Hack, hack, hack -1. Check if everything still runs in production mode +1. Check if everything still runs in production mode: ``` pnpm build pnpm start ``` 1. Create tests -1. Run all [tests](#test-suite) +1. Run all [tests](#test-suite): ``` pnpm test ``` @@ -198,7 +198,7 @@ tests of all packages. ## Releasing -To start a release, trigger [this workflow](https://github.com/n8n-io/n8n/actions/workflows/release-create-pr.yml) with the SemVer release type, and select a branch to cut this release from. This workflow will then +To start a release, trigger [this workflow](https://github.com/n8n-io/n8n/actions/workflows/release-create-pr.yml) with the SemVer release type, and select a branch to cut this release from. This workflow will then: 1. Bump versions of packages that have changed or have dependencies that have changed 2. Update the Changelog @@ -206,7 +206,7 @@ To start a release, trigger [this workflow](https://github.com/n8n-io/n8n/action 4. Create a new pull-request to track any further changes that need to be included in this release Once ready to release, simply merge the pull-request. -This triggers [another workflow](https://github.com/n8n-io/n8n/actions/workflows/release-publish.yml), that will +This triggers [another workflow](https://github.com/n8n-io/n8n/actions/workflows/release-publish.yml), that will: 1. Build and publish the packages that have a new version in this release 2. Create a new tag, and GitHub release from squashed release commit @@ -226,4 +226,4 @@ That we do not have any potential problems later it is sadly necessary to sign a We used the most simple one that exists. It is from [Indie Open Source](https://indieopensource.com/forms/cla) which uses plain English and is literally only a few lines long. -A bot will automatically comment on the pull request once it got opened asking for the agreement to be signed. Before it did not get signed it is sadly not possible to merge it in. +Once a pull request is opened, an automated bot will promptly leave a comment requesting the agreement to be signed. The pull request can only be merged once the signature is obtained. diff --git a/cypress.config.js b/cypress.config.js index b6cea71083..cdcae02e65 100644 --- a/cypress.config.js +++ b/cypress.config.js @@ -1,10 +1,9 @@ -const fetch = require('node-fetch'); const { defineConfig } = require('cypress'); const BASE_URL = 'http://localhost:5678'; module.exports = defineConfig({ - projectId: "5hbsdn", + projectId: '5hbsdn', retries: { openMode: 0, runMode: 2, @@ -19,31 +18,5 @@ module.exports = defineConfig({ screenshotOnRunFailure: true, experimentalInteractiveRunEvents: true, experimentalSessionAndOrigin: true, - - setupNodeEvents(on, config) { - on('task', { - reset: () => fetch(BASE_URL + '/e2e/db/reset', { method: 'POST' }), - 'setup-owner': (payload) => { - try { - return fetch(BASE_URL + '/e2e/db/setup-owner', { - method: 'POST', - body: JSON.stringify(payload), - headers: { 'Content-Type': 'application/json' }, - }) - } catch (error) { - console.error("setup-owner failed with: ", error) - return null - } - }, - 'set-feature': ({ feature, enabled }) => { - return fetch(BASE_URL + `/e2e/feature/${feature}`, { - method: 'PATCH', - body: JSON.stringify({ enabled }), - headers: { 'Content-Type': 'application/json' } - }) - }, - }); - }, }, }); - diff --git a/cypress/constants.ts b/cypress/constants.ts index a7e2966577..dcb96b5ffd 100644 --- a/cypress/constants.ts +++ b/cypress/constants.ts @@ -1,9 +1,32 @@ -export const BACKEND_BASE_URL = 'http://localhost:5678'; +import { randFirstName, randLastName } from '@ngneat/falso'; +export const BASE_URL = 'http://localhost:5678'; +export const BACKEND_BASE_URL = 'http://localhost:5678'; export const N8N_AUTH_COOKIE = 'n8n-auth'; -export const DEFAULT_USER_EMAIL = 'nathan@n8n.io'; -export const DEFAULT_USER_PASSWORD = 'CypressTest123'; +const DEFAULT_USER_PASSWORD = 'CypressTest123'; + +export const INSTANCE_OWNER = { + email: 'nathan@n8n.io', + password: DEFAULT_USER_PASSWORD, + firstName: randFirstName(), + lastName: randLastName(), +}; + +export const INSTANCE_MEMBERS = [ + { + email: 'rebecca@n8n.io', + password: DEFAULT_USER_PASSWORD, + firstName: randFirstName(), + lastName: randLastName(), + }, + { + email: 'mustafa@n8n.io', + password: DEFAULT_USER_PASSWORD, + firstName: randFirstName(), + lastName: randLastName(), + }, +]; export const MANUAL_TRIGGER_NODE_NAME = 'Manual Trigger'; export const MANUAL_TRIGGER_NODE_DISPLAY_NAME = 'When clicking "Execute Workflow"'; diff --git a/cypress/e2e/0-smoke.cy.ts b/cypress/e2e/0-smoke.cy.ts deleted file mode 100644 index 09d9842922..0000000000 --- a/cypress/e2e/0-smoke.cy.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { DEFAULT_USER_EMAIL, DEFAULT_USER_PASSWORD } from '../constants'; -import { randFirstName, randLastName } from '@ngneat/falso'; - -const email = DEFAULT_USER_EMAIL; -const password = DEFAULT_USER_PASSWORD; -const firstName = randFirstName(); -const lastName = randLastName(); - -describe('Authentication', () => { - beforeEach(() => { - cy.resetAll(); - }); - - it('should setup owner', () => { - cy.setup({ email, firstName, lastName, password }); - }); - - it('should sign user in', () => { - cy.setupOwner({ email, password, firstName, lastName }); - cy.on('uncaught:exception', (err, runnable) => { - expect(err.message).to.include('Not logged in'); - - return false; - }); - - cy.signin({ email, password }); - }); -}); diff --git a/cypress/e2e/1-workflows.cy.ts b/cypress/e2e/1-workflows.cy.ts index 65f545aa22..3f3dbc2cbf 100644 --- a/cypress/e2e/1-workflows.cy.ts +++ b/cypress/e2e/1-workflows.cy.ts @@ -8,10 +8,6 @@ const WorkflowPage = new WorkflowPageClass(); const multipleWorkflowsCount = 5; describe('Workflows', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { cy.visit(WorkflowsPage.url); }); diff --git a/cypress/e2e/10-settings-log-streaming.cy.ts b/cypress/e2e/10-settings-log-streaming.cy.ts index 10b1d4d79f..1261940df2 100644 --- a/cypress/e2e/10-settings-log-streaming.cy.ts +++ b/cypress/e2e/10-settings-log-streaming.cy.ts @@ -1,22 +1,8 @@ -import { randFirstName, randLastName } from '@ngneat/falso'; -import { DEFAULT_USER_EMAIL, DEFAULT_USER_PASSWORD } from '../constants'; import { SettingsLogStreamingPage } from '../pages'; -const email = DEFAULT_USER_EMAIL; -const password = DEFAULT_USER_PASSWORD; -const firstName = randFirstName(); -const lastName = randLastName(); const settingsLogStreamingPage = new SettingsLogStreamingPage(); describe('Log Streaming Settings', () => { - before(() => { - cy.setup({ email, firstName, lastName, password }); - }); - - beforeEach(() => { - cy.signin({ email, password }); - }); - it('should show the unlicensed view when the feature is disabled', () => { cy.visit('/settings/log-streaming'); settingsLogStreamingPage.getters.getActionBoxUnlicensed().should('be.visible'); @@ -25,7 +11,7 @@ describe('Log Streaming Settings', () => { }); it('should show the licensed view when the feature is enabled', () => { - cy.enableFeature('feat:logStreaming'); + cy.enableFeature('logStreaming'); cy.visit('/settings/log-streaming'); settingsLogStreamingPage.getters.getActionBoxLicensed().should('be.visible'); settingsLogStreamingPage.getters.getAddFirstDestinationButton().should('be.visible'); diff --git a/cypress/e2e/10-undo-redo.cy.ts b/cypress/e2e/10-undo-redo.cy.ts index 90e9e558eb..059777e3b9 100644 --- a/cypress/e2e/10-undo-redo.cy.ts +++ b/cypress/e2e/10-undo-redo.cy.ts @@ -10,10 +10,6 @@ const WorkflowPage = new WorkflowPageClass(); const ndv = new NDV(); describe('Undo/Redo', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { WorkflowPage.actions.visit(); }); @@ -125,17 +121,17 @@ describe('Undo/Redo', () => { WorkflowPage.getters .canvasNodes() .last() - .should('have.attr', 'style', 'left: 740px; top: 360px;'); + .should('have.attr', 'style', 'left: 740px; top: 320px;'); WorkflowPage.actions.hitUndo(); WorkflowPage.getters .canvasNodes() .last() - .should('have.attr', 'style', 'left: 640px; top: 260px;'); + .should('have.attr', 'style', 'left: 640px; top: 220px;'); WorkflowPage.actions.hitRedo(); WorkflowPage.getters .canvasNodes() .last() - .should('have.attr', 'style', 'left: 740px; top: 360px;'); + .should('have.attr', 'style', 'left: 740px; top: 320px;'); }); it('should undo/redo deleting a connection by pressing delete button', () => { @@ -285,7 +281,7 @@ describe('Undo/Redo', () => { WorkflowPage.getters .canvasNodes() .first() - .should('have.attr', 'style', 'left: 420px; top: 260px;'); + .should('have.attr', 'style', 'left: 420px; top: 220px;'); // Third undo: Should enable last node WorkflowPage.actions.hitUndo(); WorkflowPage.getters.disabledNodes().should('have.length', 0); @@ -298,7 +294,7 @@ describe('Undo/Redo', () => { WorkflowPage.getters .canvasNodes() .first() - .should('have.attr', 'style', 'left: 540px; top: 400px;'); + .should('have.attr', 'style', 'left: 540px; top: 360px;'); // Third redo: Should delete the Set node WorkflowPage.actions.hitRedo(); WorkflowPage.getters.canvasNodes().should('have.length', 3); diff --git a/cypress/e2e/11-inline-expression-editor.cy.ts b/cypress/e2e/11-inline-expression-editor.cy.ts index 4d9a46bb31..702dd2eac9 100644 --- a/cypress/e2e/11-inline-expression-editor.cy.ts +++ b/cypress/e2e/11-inline-expression-editor.cy.ts @@ -3,16 +3,14 @@ import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; const WorkflowPage = new WorkflowPageClass(); describe('Inline expression editor', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { WorkflowPage.actions.visit(); WorkflowPage.actions.addInitialNodeToCanvas('Manual'); WorkflowPage.actions.addNodeToCanvas('Hacker News'); WorkflowPage.actions.openNode('Hacker News'); WorkflowPage.actions.openInlineExpressionEditor(); + + cy.on('uncaught:exception', (err) => err.name !== 'ExpressionError'); }); it('should resolve primitive resolvables', () => { diff --git a/cypress/e2e/12-canvas-actions.cy.ts b/cypress/e2e/12-canvas-actions.cy.ts index 40bd9d168e..d336294f48 100644 --- a/cypress/e2e/12-canvas-actions.cy.ts +++ b/cypress/e2e/12-canvas-actions.cy.ts @@ -11,10 +11,6 @@ import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; const WorkflowPage = new WorkflowPageClass(); describe('Canvas Actions', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { WorkflowPage.actions.visit(); }); @@ -103,7 +99,7 @@ describe('Canvas Actions', () => { WorkflowPage.getters .canvasNodes() .last() - .should('have.attr', 'style', 'left: 860px; top: 260px;'); + .should('have.attr', 'style', 'left: 860px; top: 220px;'); }); it('should delete connections by pressing the delete button', () => { diff --git a/cypress/e2e/12-canvas.cy.ts b/cypress/e2e/12-canvas.cy.ts index 65428acda8..625b8b98f8 100644 --- a/cypress/e2e/12-canvas.cy.ts +++ b/cypress/e2e/12-canvas.cy.ts @@ -5,9 +5,7 @@ import { SCHEDULE_TRIGGER_NODE_NAME, SET_NODE_NAME, SWITCH_NODE_NAME, - IF_NODE_NAME, MERGE_NODE_NAME, - HTTP_REQUEST_NODE_NAME, } from './../constants'; import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; @@ -21,10 +19,6 @@ const ZOOM_OUT_X2_FACTOR = 0.64; const RENAME_NODE_NAME = 'Something else'; describe('Canvas Node Manipulation and Navigation', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { WorkflowPage.actions.visit(); }); @@ -168,7 +162,7 @@ describe('Canvas Node Manipulation and Navigation', () => { WorkflowPage.getters .canvasNodes() .last() - .should('have.attr', 'style', 'left: 740px; top: 360px;'); + .should('have.attr', 'style', 'left: 740px; top: 320px;'); }); it('should zoom in', () => { diff --git a/cypress/e2e/13-pinning.cy.ts b/cypress/e2e/13-pinning.cy.ts index 004e728845..7f445b52d2 100644 --- a/cypress/e2e/13-pinning.cy.ts +++ b/cypress/e2e/13-pinning.cy.ts @@ -10,10 +10,6 @@ const workflowPage = new WorkflowPage(); const ndv = new NDV(); describe('Data pinning', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); }); diff --git a/cypress/e2e/14-data-transformation-expressions.cy.ts b/cypress/e2e/14-data-transformation-expressions.cy.ts index 43dbded37a..099e79ae7d 100644 --- a/cypress/e2e/14-data-transformation-expressions.cy.ts +++ b/cypress/e2e/14-data-transformation-expressions.cy.ts @@ -4,10 +4,6 @@ const wf = new WorkflowPage(); const ndv = new NDV(); describe('Data transformation expressions', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { wf.actions.visit(); diff --git a/cypress/e2e/14-mapping.cy.ts b/cypress/e2e/14-mapping.cy.ts index c7035396d3..9ccda6b6a4 100644 --- a/cypress/e2e/14-mapping.cy.ts +++ b/cypress/e2e/14-mapping.cy.ts @@ -9,10 +9,6 @@ const workflowPage = new WorkflowPage(); const ndv = new NDV(); describe('Data mapping', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); @@ -192,7 +188,11 @@ describe('Data mapping', () => { ndv.getters .inlineExpressionEditorInput() .should('have.text', `{{ $('${SCHEDULE_TRIGGER_NODE_NAME}').item.json.input[0].count }}`); - ndv.getters.parameterExpressionPreview('value').should('not.exist'); + ndv.getters + .parameterExpressionPreview('value') + .invoke('text') + .invoke('replace', /\u00a0/g, ' ') + .should('equal', '[ERROR: no data, execute "Schedule Trigger" node first]'); ndv.actions.switchInputMode('Table'); ndv.actions.mapDataFromHeader(1, 'value'); diff --git a/cypress/e2e/15-scheduler-node.cy.ts b/cypress/e2e/15-scheduler-node.cy.ts index cc4b3d7758..d58a541652 100644 --- a/cypress/e2e/15-scheduler-node.cy.ts +++ b/cypress/e2e/15-scheduler-node.cy.ts @@ -6,10 +6,6 @@ const workflowPage = new WorkflowPage(); const ndv = new NDV(); describe('Schedule Trigger node', async () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); }); diff --git a/cypress/e2e/16-webhook-node.cy.ts b/cypress/e2e/16-webhook-node.cy.ts index 178350a32b..2ba59a8cfb 100644 --- a/cypress/e2e/16-webhook-node.cy.ts +++ b/cypress/e2e/16-webhook-node.cy.ts @@ -92,10 +92,6 @@ const simpleWebhookCall = (options: SimpleWebhookCallOptions) => { }; describe('Webhook Trigger node', async () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); diff --git a/cypress/e2e/17-sharing.cy.ts b/cypress/e2e/17-sharing.cy.ts index 6cfb226005..cf0f4ccd35 100644 --- a/cypress/e2e/17-sharing.cy.ts +++ b/cypress/e2e/17-sharing.cy.ts @@ -1,4 +1,4 @@ -import { DEFAULT_USER_EMAIL, DEFAULT_USER_PASSWORD } from '../constants'; +import { INSTANCE_MEMBERS, INSTANCE_OWNER } from '../constants'; import { CredentialsModal, CredentialsPage, @@ -28,47 +28,12 @@ const workflowPage = new WorkflowPage(); const workflowSharingModal = new WorkflowSharingModal(); const ndv = new NDV(); -const instanceOwner = { - email: `${DEFAULT_USER_EMAIL}one`, - password: DEFAULT_USER_PASSWORD, - firstName: 'User', - lastName: 'U1', -}; - -const users = [ - { - email: `${DEFAULT_USER_EMAIL}two`, - password: DEFAULT_USER_PASSWORD, - firstName: 'User', - lastName: 'U2', - }, - { - email: `${DEFAULT_USER_EMAIL}three`, - password: DEFAULT_USER_PASSWORD, - firstName: 'User', - lastName: 'U3', - }, -]; - -describe('Sharing', () => { - before(() => { - cy.setupOwner(instanceOwner); - }); - - beforeEach(() => { - cy.on('uncaught:exception', (err, runnable) => { - expect(err.message).to.include('Not logged in'); - return false; - }); - }); - - it('should invite User U2 and User U3 to instance', () => { - cy.inviteUsers({ instanceOwner, users }); - }); +describe('Sharing', { disableAutoLogin: true }, () => { + before(() => cy.enableFeature('sharing', true)); let workflowW2Url = ''; it('should create C1, W1, W2, share W1 with U3, as U2', () => { - cy.signin(users[0]); + cy.signin(INSTANCE_MEMBERS[0]); cy.visit(credentialsPage.url); credentialsPage.getters.emptyListCreateCredentialButton().click(); @@ -87,7 +52,7 @@ describe('Sharing', () => { ndv.actions.close(); workflowPage.actions.openShareModal(); - workflowSharingModal.actions.addUser(users[1].email); + workflowSharingModal.actions.addUser(INSTANCE_MEMBERS[1].email); workflowSharingModal.actions.save(); workflowPage.actions.saveWorkflowOnButtonClick(); @@ -100,23 +65,23 @@ describe('Sharing', () => { }); it('should create C2, share C2 with U1 and U2, as U3', () => { - cy.signin(users[1]); + cy.signin(INSTANCE_MEMBERS[1]); cy.visit(credentialsPage.url); credentialsPage.getters.emptyListCreateCredentialButton().click(); - credentialsModal.getters.newCredentialTypeOption('Airtable API').click(); + credentialsModal.getters.newCredentialTypeOption('Airtable Personal Access Token API').click(); credentialsModal.getters.newCredentialTypeButton().click(); - credentialsModal.getters.connectionParameter('API Key').type('1234567890'); + credentialsModal.getters.connectionParameter('Access Token').type('1234567890'); credentialsModal.actions.setName('Credential C2'); credentialsModal.actions.changeTab('Sharing'); - credentialsModal.actions.addUser(instanceOwner.email); - credentialsModal.actions.addUser(users[0].email); + credentialsModal.actions.addUser(INSTANCE_OWNER.email); + credentialsModal.actions.addUser(INSTANCE_MEMBERS[0].email); credentialsModal.actions.save(); credentialsModal.actions.close(); }); it('should open W1, add node using C2 as U3', () => { - cy.signin(users[1]); + cy.signin(INSTANCE_MEMBERS[1]); cy.visit(workflowsPage.url); workflowsPage.getters.workflowCards().should('have.length', 1); @@ -136,7 +101,7 @@ describe('Sharing', () => { }); it('should not have access to W2, as U3', () => { - cy.signin(users[1]); + cy.signin(INSTANCE_MEMBERS[1]); cy.visit(workflowW2Url); cy.waitForLoad(); @@ -145,7 +110,7 @@ describe('Sharing', () => { }); it('should have access to W1, W2, as U1', () => { - cy.signin(instanceOwner); + cy.signin(INSTANCE_OWNER); cy.visit(workflowsPage.url); workflowsPage.getters.workflowCards().should('have.length', 2); @@ -165,7 +130,7 @@ describe('Sharing', () => { }); it('should automatically test C2 when opened by U2 sharee', () => { - cy.signin(users[0]); + cy.signin(INSTANCE_MEMBERS[0]); cy.visit(credentialsPage.url); credentialsPage.getters.credentialCard('Credential C2').click(); diff --git a/cypress/e2e/17-workflow-tags.cy.ts b/cypress/e2e/17-workflow-tags.cy.ts index d18c48cf7b..56b548747d 100644 --- a/cypress/e2e/17-workflow-tags.cy.ts +++ b/cypress/e2e/17-workflow-tags.cy.ts @@ -5,10 +5,6 @@ const wf = new WorkflowPage(); const TEST_TAGS = ['Tag 1', 'Tag 2', 'Tag 3', 'Tag 4', 'Tag 5']; describe('Workflow tags', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { wf.actions.visit(); }); diff --git a/cypress/e2e/18-user-management.cy.ts b/cypress/e2e/18-user-management.cy.ts index 9f51f561d9..6af5ba6b60 100644 --- a/cypress/e2e/18-user-management.cy.ts +++ b/cypress/e2e/18-user-management.cy.ts @@ -1,6 +1,5 @@ -import { MainSidebar } from './../pages/sidebar/main-sidebar'; -import { DEFAULT_USER_EMAIL, DEFAULT_USER_PASSWORD } from '../constants'; -import { SettingsSidebar, SettingsUsersPage, WorkflowPage, WorkflowsPage } from '../pages'; +import { INSTANCE_MEMBERS, INSTANCE_OWNER } from '../constants'; +import { SettingsUsersPage, WorkflowPage } from '../pages'; import { PersonalSettingsPage } from '../pages/settings-personal'; /** @@ -15,28 +14,6 @@ import { PersonalSettingsPage } from '../pages/settings-personal'; * C2 - Credential owned by User C, shared with User A and User B */ -const instanceOwner = { - email: `${DEFAULT_USER_EMAIL}A`, - password: DEFAULT_USER_PASSWORD, - firstName: 'User', - lastName: 'A', -}; - -const users = [ - { - email: `${DEFAULT_USER_EMAIL}B`, - password: DEFAULT_USER_PASSWORD, - firstName: 'User', - lastName: 'B', - }, - { - email: `${DEFAULT_USER_EMAIL}C`, - password: DEFAULT_USER_PASSWORD, - firstName: 'User', - lastName: 'C', - }, -]; - const updatedPersonalData = { newFirstName: 'Something', newLastName: 'Else', @@ -49,47 +26,38 @@ const usersSettingsPage = new SettingsUsersPage(); const workflowPage = new WorkflowPage(); const personalSettingsPage = new PersonalSettingsPage(); -describe('User Management', () => { - before(() => { - cy.setupOwner(instanceOwner); - }); - - beforeEach(() => { - cy.on('uncaught:exception', (err, runnable) => { - expect(err.message).to.include('Not logged in'); - return false; - }); - }); - - it(`should invite User B and User C to instance`, () => { - cy.inviteUsers({ instanceOwner, users }); - }); +describe('User Management', { disableAutoLogin: true }, () => { + before(() => cy.enableFeature('sharing')); it('should prevent non-owners to access UM settings', () => { - usersSettingsPage.actions.loginAndVisit(users[0].email, users[0].password, false); + usersSettingsPage.actions.loginAndVisit( + INSTANCE_MEMBERS[0].email, + INSTANCE_MEMBERS[0].password, + false, + ); }); it('should allow instance owner to access UM settings', () => { - usersSettingsPage.actions.loginAndVisit(instanceOwner.email, instanceOwner.password, true); + usersSettingsPage.actions.loginAndVisit(INSTANCE_OWNER.email, INSTANCE_OWNER.password, true); }); it('should properly render UM settings page for instance owners', () => { - usersSettingsPage.actions.loginAndVisit(instanceOwner.email, instanceOwner.password, true); + usersSettingsPage.actions.loginAndVisit(INSTANCE_OWNER.email, INSTANCE_OWNER.password, true); // All items in user list should be there usersSettingsPage.getters.userListItems().should('have.length', 3); // List item for current user should have the `Owner` badge usersSettingsPage.getters - .userItem(instanceOwner.email) + .userItem(INSTANCE_OWNER.email) .find('.n8n-badge:contains("Owner")') .should('exist'); // Other users list items should contain action pop-up list - usersSettingsPage.getters.userActionsToggle(users[0].email).should('exist'); - usersSettingsPage.getters.userActionsToggle(users[1].email).should('exist'); + usersSettingsPage.getters.userActionsToggle(INSTANCE_MEMBERS[0].email).should('exist'); + usersSettingsPage.getters.userActionsToggle(INSTANCE_MEMBERS[1].email).should('exist'); }); it('should delete user and their data', () => { - usersSettingsPage.actions.loginAndVisit(instanceOwner.email, instanceOwner.password, true); - usersSettingsPage.actions.opedDeleteDialog(users[0].email); + usersSettingsPage.actions.loginAndVisit(INSTANCE_OWNER.email, INSTANCE_OWNER.password, true); + usersSettingsPage.actions.opedDeleteDialog(INSTANCE_MEMBERS[0].email); usersSettingsPage.getters.deleteDataRadioButton().realClick(); usersSettingsPage.getters.deleteDataInput().type('delete all data'); usersSettingsPage.getters.deleteUserButton().realClick(); @@ -97,8 +65,8 @@ describe('User Management', () => { }); it('should delete user and transfer their data', () => { - usersSettingsPage.actions.loginAndVisit(instanceOwner.email, instanceOwner.password, true); - usersSettingsPage.actions.opedDeleteDialog(users[1].email); + usersSettingsPage.actions.loginAndVisit(INSTANCE_OWNER.email, INSTANCE_OWNER.password, true); + usersSettingsPage.actions.opedDeleteDialog(INSTANCE_MEMBERS[1].email); usersSettingsPage.getters.transferDataRadioButton().realClick(); usersSettingsPage.getters.userSelectDropDown().realClick(); usersSettingsPage.getters.userSelectOptions().first().realClick(); @@ -107,7 +75,7 @@ describe('User Management', () => { }); it(`should allow user to change their personal data`, () => { - personalSettingsPage.actions.loginAndVisit(instanceOwner.email, instanceOwner.password); + personalSettingsPage.actions.loginAndVisit(INSTANCE_OWNER.email, INSTANCE_OWNER.password); personalSettingsPage.actions.updateFirstAndLastName( updatedPersonalData.newFirstName, updatedPersonalData.newLastName, @@ -119,14 +87,14 @@ describe('User Management', () => { }); it(`shouldn't allow user to set weak password`, () => { - personalSettingsPage.actions.loginAndVisit(instanceOwner.email, instanceOwner.password); + personalSettingsPage.actions.loginAndVisit(INSTANCE_OWNER.email, INSTANCE_OWNER.password); for (let weakPass of updatedPersonalData.invalidPasswords) { - personalSettingsPage.actions.tryToSetWeakPassword(instanceOwner.password, weakPass); + personalSettingsPage.actions.tryToSetWeakPassword(INSTANCE_OWNER.password, weakPass); } }); it(`shouldn't allow user to change password if old password is wrong`, () => { - personalSettingsPage.actions.loginAndVisit(instanceOwner.email, instanceOwner.password); + personalSettingsPage.actions.loginAndVisit(INSTANCE_OWNER.email, INSTANCE_OWNER.password); personalSettingsPage.actions.updatePassword('iCannotRemember', updatedPersonalData.newPassword); workflowPage.getters .errorToast() @@ -135,21 +103,21 @@ describe('User Management', () => { }); it(`should change current user password`, () => { - personalSettingsPage.actions.loginAndVisit(instanceOwner.email, instanceOwner.password); + personalSettingsPage.actions.loginAndVisit(INSTANCE_OWNER.email, INSTANCE_OWNER.password); personalSettingsPage.actions.updatePassword( - instanceOwner.password, + INSTANCE_OWNER.password, updatedPersonalData.newPassword, ); workflowPage.getters.successToast().should('contain', 'Password updated'); personalSettingsPage.actions.loginWithNewData( - instanceOwner.email, + INSTANCE_OWNER.email, updatedPersonalData.newPassword, ); }); it(`shouldn't allow users to set invalid email`, () => { personalSettingsPage.actions.loginAndVisit( - instanceOwner.email, + INSTANCE_OWNER.email, updatedPersonalData.newPassword, ); // try without @ part @@ -160,7 +128,7 @@ describe('User Management', () => { it(`should change user email`, () => { personalSettingsPage.actions.loginAndVisit( - instanceOwner.email, + INSTANCE_OWNER.email, updatedPersonalData.newPassword, ); personalSettingsPage.actions.updateEmail(updatedPersonalData.newEmail); diff --git a/cypress/e2e/19-execution.cy.ts b/cypress/e2e/19-execution.cy.ts index 983e5e4bba..a6d635de9b 100644 --- a/cypress/e2e/19-execution.cy.ts +++ b/cypress/e2e/19-execution.cy.ts @@ -1,15 +1,10 @@ import { v4 as uuid } from 'uuid'; import { NDV, WorkflowPage as WorkflowPageClass, WorkflowsPage } from '../pages'; -const workflowsPage = new WorkflowsPage(); const workflowPage = new WorkflowPageClass(); const ndv = new NDV(); describe('Execution', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); }); diff --git a/cypress/e2e/2-credentials.cy.ts b/cypress/e2e/2-credentials.cy.ts index d8f3fc5e0e..7d4f743a98 100644 --- a/cypress/e2e/2-credentials.cy.ts +++ b/cypress/e2e/2-credentials.cy.ts @@ -6,24 +6,14 @@ import { NEW_QUERY_AUTH_ACCOUNT_NAME, } from './../constants'; import { - DEFAULT_USER_EMAIL, - DEFAULT_USER_PASSWORD, GMAIL_NODE_NAME, NEW_GOOGLE_ACCOUNT_NAME, NEW_TRELLO_ACCOUNT_NAME, SCHEDULE_TRIGGER_NODE_NAME, TRELLO_NODE_NAME, } from '../constants'; -import { randFirstName, randLastName } from '@ngneat/falso'; import { CredentialsPage, CredentialsModal, WorkflowPage, NDV } from '../pages'; -import CustomNodeWithN8nCredentialFixture from '../fixtures/Custom_node_n8n_credential.json'; -import CustomNodeWithCustomCredentialFixture from '../fixtures/Custom_node_custom_credential.json'; -import CustomCredential from '../fixtures/Custom_credential.json'; -const email = DEFAULT_USER_EMAIL; -const password = DEFAULT_USER_PASSWORD; -const firstName = randFirstName(); -const lastName = randLastName(); const credentialsPage = new CredentialsPage(); const credentialsModal = new CredentialsModal(); const workflowPage = new WorkflowPage(); @@ -32,10 +22,6 @@ const nodeDetailsView = new NDV(); const NEW_CREDENTIAL_NAME = 'Something else'; describe('Credentials', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { cy.visit(credentialsPage.url); }); diff --git a/cypress/e2e/20-workflow-executions.cy.ts b/cypress/e2e/20-workflow-executions.cy.ts index fe40633c38..bdc7c3b711 100644 --- a/cypress/e2e/20-workflow-executions.cy.ts +++ b/cypress/e2e/20-workflow-executions.cy.ts @@ -6,10 +6,6 @@ const executionsTab = new WorkflowExecutionsTab(); // Test suite for executions tab describe('Current Workflow Executions', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); cy.createFixtureWorkflow('Test_workflow_4_executions_view.json', `My test workflow`); @@ -36,7 +32,6 @@ describe('Current Workflow Executions', () => { }); const createMockExecutions = () => { - workflowPage.actions.turnOnManualExecutionSaving(); executionsTab.actions.createManualExecutions(5); // Make some failed executions by enabling Code node with syntax error executionsTab.actions.toggleNodeEnabled('Error'); diff --git a/cypress/e2e/21-community-nodes.cy.ts b/cypress/e2e/21-community-nodes.cy.ts index d48d365c4f..cfc76e46e0 100644 --- a/cypress/e2e/21-community-nodes.cy.ts +++ b/cypress/e2e/21-community-nodes.cy.ts @@ -13,9 +13,6 @@ const workflowPage = new WorkflowPage(); // so the /nodes and /credentials endpoints are intercepted and non-cached. // We want to keep the other tests as fast as possible so we don't want to break the cache in those. describe('Community Nodes', () => { - before(() => { - cy.skipSetup(); - }) beforeEach(() => { cy.intercept('/types/nodes.json', { middleware: true }, (req) => { req.headers['cache-control'] = 'no-cache, no-store'; @@ -36,6 +33,7 @@ describe('Community Nodes', () => { credentials.push(CustomCredential); }) }) + workflowPage.actions.visit(); }); diff --git a/cypress/e2e/23-variables.cy.ts b/cypress/e2e/23-variables.cy.ts index 90ccfedae2..ce6a49fb99 100644 --- a/cypress/e2e/23-variables.cy.ts +++ b/cypress/e2e/23-variables.cy.ts @@ -1,22 +1,10 @@ import { VariablesPage } from '../pages/variables'; -import { DEFAULT_USER_EMAIL, DEFAULT_USER_PASSWORD } from '../constants'; -import { randFirstName, randLastName } from '@ngneat/falso'; const variablesPage = new VariablesPage(); -const email = DEFAULT_USER_EMAIL; -const password = DEFAULT_USER_PASSWORD; -const firstName = randFirstName(); -const lastName = randLastName(); - describe('Variables', () => { - before(() => { - cy.setup({ email, firstName, lastName, password }); - }); - it('should show the unlicensed action box when the feature is disabled', () => { - cy.disableFeature('feat:variables'); - cy.signin({ email, password }); + cy.disableFeature('variables', false); cy.visit(variablesPage.url); variablesPage.getters.unavailableResourcesList().should('be.visible'); @@ -25,11 +13,10 @@ describe('Variables', () => { describe('licensed', () => { before(() => { - cy.enableFeature('feat:variables'); + cy.enableFeature('variables'); }); beforeEach(() => { - cy.signin({ email, password }); cy.intercept('GET', '/rest/variables').as('loadVariables'); cy.visit(variablesPage.url); diff --git a/cypress/e2e/24-ndv-paired-item.cy.ts b/cypress/e2e/24-ndv-paired-item.cy.ts index b7f6c0f437..3bbd2f0b23 100644 --- a/cypress/e2e/24-ndv-paired-item.cy.ts +++ b/cypress/e2e/24-ndv-paired-item.cy.ts @@ -5,10 +5,6 @@ const workflowPage = new WorkflowPage(); const ndv = new NDV(); describe('NDV', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); workflowPage.actions.renameWorkflow(uuid()); @@ -277,7 +273,11 @@ describe('NDV', () => { .should('equal', 'hovering-item'); ndv.actions.close(); + workflowPage.actions.openNode('Set5'); + + ndv.actions.switchInputBranch('True Branch'); + ndv.actions.changeOutputRunSelector('1 of 2 (2 items)') ndv.getters.outputTableRow(1) .should('have.text', '8888') .realHover(); @@ -288,16 +288,21 @@ describe('NDV', () => { .realHover(); ndv.getters.outputHoveringItem().should('not.exist'); - ndv.actions.switchIntputBranch('False Branch'); + ndv.actions.switchInputBranch('False Branch'); + ndv.getters.inputTableRow(1) + .should('have.text', '8888') + .realHover(); + + ndv.actions.changeOutputRunSelector('2 of 2 (4 items)') + ndv.getters.outputTableRow(1) + .should('have.text', '1111') + .realHover(); + + ndv.actions.changeOutputRunSelector('1 of 2 (2 items)') ndv.getters.inputTableRow(1) .should('have.text', '8888') .realHover(); ndv.getters.outputHoveringItem().should('have.text', '8888'); - - ndv.actions.changeOutputRunSelector('1 of 2 (4 items)') - ndv.getters.outputTableRow(1) - .should('have.text', '1111') - .realHover(); // todo there's a bug here need to fix ADO-534 // ndv.getters.outputHoveringItem().should('not.exist'); }); diff --git a/cypress/e2e/25-stickies.cy.ts b/cypress/e2e/25-stickies.cy.ts index 13396efe25..afb2db3b5f 100644 --- a/cypress/e2e/25-stickies.cy.ts +++ b/cypress/e2e/25-stickies.cy.ts @@ -15,10 +15,6 @@ function checkStickiesStyle( top: number, left: number, height: number, width: n } describe('Canvas Actions', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); @@ -94,66 +90,66 @@ describe('Canvas Actions', () => { moveSticky({ left: 600, top: 200 }); cy.drag('[data-test-id="sticky"] [data-dir="left"]', [100, 100]); - checkStickiesStyle(140, 510, 160, 150); + checkStickiesStyle(100, 510, 160, 150); cy.drag('[data-test-id="sticky"] [data-dir="left"]', [-50, -50]); - checkStickiesStyle(140, 466, 160, 194); + checkStickiesStyle(100, 466, 160, 194); }); it('expands/shrinks sticky from the top edge', () => { workflowPage.actions.addSticky(); cy.drag('[data-test-id="sticky"]', [100, 100]); // move away from canvas button - checkStickiesStyle(360, 620, 160, 240); + checkStickiesStyle(300, 620, 160, 240); cy.drag('[data-test-id="sticky"] [data-dir="top"]', [100, 100]); - checkStickiesStyle(440, 620, 80, 240); + checkStickiesStyle(380, 620, 80, 240); cy.drag('[data-test-id="sticky"] [data-dir="top"]', [-50, -50]); - checkStickiesStyle(384, 620, 136, 240); + checkStickiesStyle(324, 620, 136, 240); }); it('expands/shrinks sticky from the bottom edge', () => { workflowPage.actions.addSticky(); cy.drag('[data-test-id="sticky"]', [100, 100]); // move away from canvas button - checkStickiesStyle(360, 620, 160, 240); + checkStickiesStyle(300, 620, 160, 240); cy.drag('[data-test-id="sticky"] [data-dir="bottom"]', [100, 100]); - checkStickiesStyle(360, 620, 254, 240); + checkStickiesStyle(300, 620, 254, 240); cy.drag('[data-test-id="sticky"] [data-dir="bottom"]', [-50, -50]); - checkStickiesStyle(360, 620, 198, 240); + checkStickiesStyle(300, 620, 198, 240); }); it('expands/shrinks sticky from the bottom right edge', () => { workflowPage.actions.addSticky(); cy.drag('[data-test-id="sticky"]', [-100, -100]); // move away from canvas button - checkStickiesStyle(160, 420, 160, 240); + checkStickiesStyle(100, 420, 160, 240); cy.drag('[data-test-id="sticky"] [data-dir="bottomRight"]', [100, 100]); - checkStickiesStyle(160, 420, 254, 346); + checkStickiesStyle(100, 420, 254, 346); cy.drag('[data-test-id="sticky"] [data-dir="bottomRight"]', [-50, -50]); - checkStickiesStyle(160, 420, 198, 302); + checkStickiesStyle(100, 420, 198, 302); }); it('expands/shrinks sticky from the top right edge', () => { addDefaultSticky(); cy.drag('[data-test-id="sticky"] [data-dir="topRight"]', [100, 100]); - checkStickiesStyle(420, 400, 80, 346); + checkStickiesStyle(360, 400, 80, 346); cy.drag('[data-test-id="sticky"] [data-dir="topRight"]', [-50, -50]); - checkStickiesStyle(364, 400, 136, 302); + checkStickiesStyle(304, 400, 136, 302); }); it('expands/shrinks sticky from the top left edge, and reach min height/width', () => { addDefaultSticky(); cy.drag('[data-test-id="sticky"] [data-dir="topLeft"]', [100, 100]); - checkStickiesStyle(420, 490, 80, 150); + checkStickiesStyle(360, 490, 80, 150); cy.drag('[data-test-id="sticky"] [data-dir="topLeft"]', [-150, -150]); - checkStickiesStyle(264, 346, 236, 294); + checkStickiesStyle(204, 346, 236, 294); }); it('sets sticky behind node', () => { @@ -161,7 +157,7 @@ describe('Canvas Actions', () => { addDefaultSticky(); cy.drag('[data-test-id="sticky"] [data-dir="topLeft"]', [-150, -150]); - checkStickiesStyle(184, 256, 316, 384, -121); + checkStickiesStyle(124, 256, 316, 384, -121); workflowPage.getters.canvasNodes().eq(0) .should(($el) => { @@ -239,7 +235,7 @@ function addDefaultSticky() { } function stickyShouldBePositionedCorrectly(position: Position) { - const yOffset = -60; + const yOffset = -100; const xOffset = -180; workflowPage.getters.stickies() .should(($el) => { diff --git a/cypress/e2e/26-resource-locator.cy.ts b/cypress/e2e/26-resource-locator.cy.ts index 3a00dded78..cedcfd628e 100644 --- a/cypress/e2e/26-resource-locator.cy.ts +++ b/cypress/e2e/26-resource-locator.cy.ts @@ -8,10 +8,6 @@ const NO_CREDENTIALS_MESSAGE = 'Please add your credential'; const INVALID_CREDENTIALS_MESSAGE = 'Please check your credential'; describe('Resource Locator', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); }); diff --git a/cypress/e2e/3-default-owner.cy.ts b/cypress/e2e/3-default-owner.cy.ts deleted file mode 100644 index bd9b29f036..0000000000 --- a/cypress/e2e/3-default-owner.cy.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { randFirstName, randLastName } from '@ngneat/falso'; -import { DEFAULT_USER_EMAIL, DEFAULT_USER_PASSWORD } from '../constants'; -import { - SettingsUsersPage, - SignupPage, - WorkflowsPage, - WorkflowPage, - CredentialsPage, - CredentialsModal, - MessageBox, -} from '../pages'; -import { SettingsUsagePage } from '../pages/settings-usage'; - -import { MainSidebar, SettingsSidebar } from '../pages/sidebar'; - -const mainSidebar = new MainSidebar(); -const settingsSidebar = new SettingsSidebar(); - -const workflowsPage = new WorkflowsPage(); -const signupPage = new SignupPage(); -const workflowPage = new WorkflowPage(); - -const credentialsPage = new CredentialsPage(); -const credentialsModal = new CredentialsModal(); - -const settingsUsersPage = new SettingsUsersPage(); -const settingsUsagePage = new SettingsUsagePage(); - -const messageBox = new MessageBox(); - -const email = DEFAULT_USER_EMAIL; -const password = DEFAULT_USER_PASSWORD; -const firstName = randFirstName(); -const lastName = randLastName(); - -describe('Default owner', () => { - it('should be able to create workflows', () => { - cy.skipSetup(); - cy.createFixtureWorkflow('Test_workflow_1.json', `Test workflow`); - - // reload page, ensure owner still has access - cy.reload(); - cy.waitForLoad(); - workflowPage.getters.workflowNameInput().should('contain.value', 'Test workflow'); - }); - - it('should be able to add new credentials', () => { - cy.visit(credentialsPage.url); - - credentialsPage.getters.emptyListCreateCredentialButton().click(); - - credentialsModal.getters.newCredentialModal().should('be.visible'); - credentialsModal.getters.newCredentialTypeSelect().should('be.visible'); - credentialsModal.getters.newCredentialTypeOption('Notion API').click(); - - credentialsModal.getters.newCredentialTypeButton().click(); - - credentialsModal.getters.connectionParameter('API Key').type('1234567890'); - - credentialsModal.actions.setName('My awesome Notion account'); - credentialsModal.actions.save(); - - credentialsModal.actions.close(); - - credentialsModal.getters.newCredentialModal().should('not.exist'); - credentialsModal.getters.editCredentialModal().should('not.exist'); - - credentialsPage.getters.credentialCards().should('have.length', 1); - }); - - it('should be able to setup UM from settings', () => { - cy.visit('/'); - mainSidebar.getters.settings().should('be.visible'); - mainSidebar.actions.goToSettings(); - cy.url().should('include', settingsUsagePage.url); - - settingsSidebar.actions.goToUsers(); - cy.url().should('include', settingsUsersPage.url); - - settingsUsersPage.actions.goToOwnerSetup(); - - cy.url().should('include', signupPage.url); - }); - - it('should be able to setup instance and migrate workflows and credentials', () => { - cy.setup({ email, firstName, lastName, password }, true); - - messageBox.getters.content().should('contain.text', '1 existing workflow and 1 credential'); - - messageBox.actions.confirm(); - cy.wait('@setupRequest'); - cy.url().should('include', settingsUsersPage.url); - settingsSidebar.actions.back(); - - cy.url().should('include', workflowsPage.url); - - workflowsPage.getters.workflowCards().should('have.length', 1); - }); - - it('can click back to main menu and have migrated credential after setup', () => { - cy.signin({ email, password }); - cy.visit(workflowsPage.url); - - mainSidebar.actions.goToCredentials(); - - cy.url().should('include', credentialsPage.url); - - credentialsPage.getters.credentialCards().should('have.length', 1); - }); -}); diff --git a/cypress/e2e/4-node-creator.cy.ts b/cypress/e2e/4-node-creator.cy.ts index 187345e940..fb0887a683 100644 --- a/cypress/e2e/4-node-creator.cy.ts +++ b/cypress/e2e/4-node-creator.cy.ts @@ -7,10 +7,6 @@ const WorkflowPage = new WorkflowPageClass(); const NDVModal = new NDV(); describe('Node Creator', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { WorkflowPage.actions.visit(); }); @@ -271,7 +267,7 @@ describe('Node Creator', () => { NDVModal.actions.close(); WorkflowPage.getters.canvasNodes().should('have.length', 2); WorkflowPage.actions.zoomToFit(); - WorkflowPage.actions.addNodeBetweenNodes('n8n', 'n8n1', 'Item Lists') + WorkflowPage.actions.addNodeBetweenNodes('n8n', 'n8n1', 'Item Lists', 'Summarize') WorkflowPage.getters.canvasNodes().should('have.length', 3); }) }); diff --git a/cypress/e2e/5-ndv.cy.ts b/cypress/e2e/5-ndv.cy.ts index 93198a2391..6e3f38ca5b 100644 --- a/cypress/e2e/5-ndv.cy.ts +++ b/cypress/e2e/5-ndv.cy.ts @@ -5,10 +5,6 @@ const workflowPage = new WorkflowPage(); const ndv = new NDV(); describe('NDV', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { workflowPage.actions.visit(); workflowPage.actions.renameWorkflow(uuid()); @@ -68,15 +64,15 @@ describe('NDV', () => { it('should show validation errors only after blur or re-opening of NDV', () => { workflowPage.actions.addNodeToCanvas('Manual'); - workflowPage.actions.addNodeToCanvas('Airtable', true, true, 'Read data from a table'); + workflowPage.actions.addNodeToCanvas('Airtable', true, true, 'Search records'); ndv.getters.container().should('be.visible'); - cy.get('.has-issues').should('have.length', 0); + // cy.get('.has-issues').should('have.length', 0); ndv.getters.parameterInput('table').find('input').eq(1).focus().blur(); - ndv.getters.parameterInput('application').find('input').eq(1).focus().blur(); - cy.get('.has-issues').should('have.length', 2); + ndv.getters.parameterInput('base').find('input').eq(1).focus().blur(); + cy.get('.has-issues').should('have.length', 0); ndv.getters.backToCanvas().click(); workflowPage.actions.openNode('Airtable'); - cy.get('.has-issues').should('have.length', 3); + cy.get('.has-issues').should('have.length', 2); cy.get('[class*=hasIssues]').should('have.length', 1); }); diff --git a/cypress/e2e/6-code-node.cy.ts b/cypress/e2e/6-code-node.cy.ts index 9a12dfccf4..4a987dd6cd 100644 --- a/cypress/e2e/6-code-node.cy.ts +++ b/cypress/e2e/6-code-node.cy.ts @@ -5,8 +5,8 @@ const WorkflowPage = new WorkflowPageClass(); const ndv = new NDV(); describe('Code node', () => { - before(() => { - cy.skipSetup(); + beforeEach(() => { + WorkflowPage.actions.visit(); }); it('should execute the placeholder in all-items mode successfully', () => { @@ -20,7 +20,6 @@ describe('Code node', () => { }); it('should execute the placeholder in each-item mode successfully', () => { - WorkflowPage.actions.visit(); WorkflowPage.actions.addInitialNodeToCanvas('Manual'); WorkflowPage.actions.addNodeToCanvas('Code'); WorkflowPage.actions.openNode('Code'); diff --git a/cypress/e2e/7-workflow-actions.cy.ts b/cypress/e2e/7-workflow-actions.cy.ts index b7f948744c..7f9ee21c7c 100644 --- a/cypress/e2e/7-workflow-actions.cy.ts +++ b/cypress/e2e/7-workflow-actions.cy.ts @@ -5,6 +5,7 @@ import { SCHEDULE_TRIGGER_NODE_NAME, } from '../constants'; import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; +import { WorkflowsPage as WorkflowsPageClass } from '../pages/workflows'; const NEW_WORKFLOW_NAME = 'Something else'; const IMPORT_WORKFLOW_URL = 'https://gist.githubusercontent.com/OlegIvaniv/010bd3f45c8a94f8eb7012e663a8b671/raw/3afea1aec15573cc168d9af7e79395bd76082906/test-workflow.json'; @@ -12,12 +13,9 @@ const DUPLICATE_WORKFLOW_NAME = 'Duplicated workflow'; const DUPLICATE_WORKFLOW_TAG = 'Duplicate'; const WorkflowPage = new WorkflowPageClass(); +const WorkflowPages = new WorkflowsPageClass(); describe('Workflow Actions', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { WorkflowPage.actions.visit(); }); @@ -66,6 +64,42 @@ describe('Workflow Actions', () => { .should('eq', NEW_WORKFLOW_NAME); }); + it('should not save workflow if canvas is loading', () => { + let interceptCalledCount = 0; + + // There's no way in Cypress to check if intercept was not called + // so we'll count the number of times it was called + cy.intercept('PATCH', '/rest/workflows/*', () => { + interceptCalledCount++; + }).as('saveWorkflow'); + + WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + WorkflowPage.actions.saveWorkflowOnButtonClick(); + cy.intercept( + { + url: '/rest/workflows/*', + method: 'GET', + middleware: true, + }, + (req) => { + // Delay the response to give time for the save to be triggered + req.on('response', async (res) => { + await new Promise((resolve) => setTimeout(resolve, 2000)) + res.send(); + }) + } + ) + cy.reload(); + cy.get('.el-loading-mask').should('exist'); + cy.get('body').type(META_KEY, { release: false }).type('s'); + cy.get('body').type(META_KEY, { release: false }).type('s'); + cy.get('body').type(META_KEY, { release: false }).type('s'); + cy.wrap(null).then(() => expect(interceptCalledCount).to.eq(0)); + WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); + cy.get('body').type(META_KEY, { release: false }).type('s'); + cy.wait('@saveWorkflow'); + cy.wrap(null).then(() => expect(interceptCalledCount).to.eq(1)); + }) it('should copy nodes', () => { WorkflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME); WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); @@ -110,64 +144,70 @@ describe('Workflow Actions', () => { }); it('should update workflow settings', () => { - WorkflowPage.actions.visit(); - // Open settings dialog - WorkflowPage.actions.saveWorkflowOnButtonClick(); - WorkflowPage.getters.workflowMenu().should('be.visible'); - WorkflowPage.getters.workflowMenu().click(); - WorkflowPage.getters.workflowMenuItemSettings().should('be.visible'); - WorkflowPage.getters.workflowMenuItemSettings().click(); - // Change all settings - WorkflowPage.getters.workflowSettingsErrorWorkflowSelect().find('li').should('have.length', 7); - WorkflowPage.getters - .workflowSettingsErrorWorkflowSelect() - .find('li') - .last() - .click({ force: true }); - WorkflowPage.getters.workflowSettingsTimezoneSelect().find('li').should('exist'); - WorkflowPage.getters.workflowSettingsTimezoneSelect().find('li').eq(1).click({ force: true }); - WorkflowPage.getters - .workflowSettingsSaveFiledExecutionsSelect() - .find('li') - .should('have.length', 3); - WorkflowPage.getters - .workflowSettingsSaveFiledExecutionsSelect() - .find('li') - .last() - .click({ force: true }); - WorkflowPage.getters - .workflowSettingsSaveSuccessExecutionsSelect() - .find('li') - .should('have.length', 3); - WorkflowPage.getters - .workflowSettingsSaveSuccessExecutionsSelect() - .find('li') - .last() - .click({ force: true }); - WorkflowPage.getters - .workflowSettingsSaveManualExecutionsSelect() - .find('li') - .should('have.length', 3); - WorkflowPage.getters - .workflowSettingsSaveManualExecutionsSelect() - .find('li') - .last() - .click({ force: true }); - WorkflowPage.getters - .workflowSettingsSaveExecutionProgressSelect() - .find('li') - .should('have.length', 3); - WorkflowPage.getters - .workflowSettingsSaveExecutionProgressSelect() - .find('li') - .last() - .click({ force: true }); - WorkflowPage.getters.workflowSettingsTimeoutWorkflowSwitch().click(); - WorkflowPage.getters.workflowSettingsTimeoutForm().find('input').first().type('1'); - // Save settings - WorkflowPage.getters.workflowSettingsSaveButton().click(); - WorkflowPage.getters.workflowSettingsModal().should('not.exist'); - WorkflowPage.getters.successToast().should('exist'); + cy.visit(WorkflowPages.url); + WorkflowPages.getters.workflowCards().then((cards) => { + const totalWorkflows = cards.length; + + WorkflowPage.actions.visit(); + // Open settings dialog + WorkflowPage.actions.saveWorkflowOnButtonClick(); + WorkflowPage.getters.workflowMenu().should('be.visible'); + WorkflowPage.getters.workflowMenu().click(); + WorkflowPage.getters.workflowMenuItemSettings().should('be.visible'); + WorkflowPage.getters.workflowMenuItemSettings().click(); + // Change all settings + // totalWorkflows + 1 (current workflow) + 1 (no workflow option) + WorkflowPage.getters.workflowSettingsErrorWorkflowSelect().find('li').should('have.length', totalWorkflows + 2); + WorkflowPage.getters + .workflowSettingsErrorWorkflowSelect() + .find('li') + .last() + .click({ force: true }); + WorkflowPage.getters.workflowSettingsTimezoneSelect().find('li').should('exist'); + WorkflowPage.getters.workflowSettingsTimezoneSelect().find('li').eq(1).click({ force: true }); + WorkflowPage.getters + .workflowSettingsSaveFiledExecutionsSelect() + .find('li') + .should('have.length', 3); + WorkflowPage.getters + .workflowSettingsSaveFiledExecutionsSelect() + .find('li') + .last() + .click({ force: true }); + WorkflowPage.getters + .workflowSettingsSaveSuccessExecutionsSelect() + .find('li') + .should('have.length', 3); + WorkflowPage.getters + .workflowSettingsSaveSuccessExecutionsSelect() + .find('li') + .last() + .click({ force: true }); + WorkflowPage.getters + .workflowSettingsSaveManualExecutionsSelect() + .find('li') + .should('have.length', 3); + WorkflowPage.getters + .workflowSettingsSaveManualExecutionsSelect() + .find('li') + .last() + .click({ force: true }); + WorkflowPage.getters + .workflowSettingsSaveExecutionProgressSelect() + .find('li') + .should('have.length', 3); + WorkflowPage.getters + .workflowSettingsSaveExecutionProgressSelect() + .find('li') + .last() + .click({ force: true }); + WorkflowPage.getters.workflowSettingsTimeoutWorkflowSwitch().click(); + WorkflowPage.getters.workflowSettingsTimeoutForm().find('input').first().type('1'); + // Save settings + WorkflowPage.getters.workflowSettingsSaveButton().click(); + WorkflowPage.getters.workflowSettingsModal().should('not.exist'); + WorkflowPage.getters.successToast().should('exist'); + }) }); it('should not be able to delete unsaved workflow', () => { diff --git a/cypress/e2e/8-http-request-node.cy.ts b/cypress/e2e/8-http-request-node.cy.ts index a40d37cf23..c7f44e3494 100644 --- a/cypress/e2e/8-http-request-node.cy.ts +++ b/cypress/e2e/8-http-request-node.cy.ts @@ -4,8 +4,8 @@ const workflowPage = new WorkflowPage(); const ndv = new NDV(); describe('HTTP Request node', () => { - before(() => { - cy.skipSetup(); + beforeEach(() => { + workflowPage.actions.visit(); }); it('should make a request with a URL and receive a response', () => { diff --git a/cypress/e2e/9-expression-editor-modal.cy.ts b/cypress/e2e/9-expression-editor-modal.cy.ts index 6b0412cc21..46affa0d62 100644 --- a/cypress/e2e/9-expression-editor-modal.cy.ts +++ b/cypress/e2e/9-expression-editor-modal.cy.ts @@ -3,16 +3,14 @@ import { WorkflowPage as WorkflowPageClass } from '../pages/workflow'; const WorkflowPage = new WorkflowPageClass(); describe('Expression editor modal', () => { - before(() => { - cy.skipSetup(); - }); - beforeEach(() => { WorkflowPage.actions.visit(); WorkflowPage.actions.addInitialNodeToCanvas('Manual'); WorkflowPage.actions.addNodeToCanvas('Hacker News'); WorkflowPage.actions.openNode('Hacker News'); WorkflowPage.actions.openExpressionEditorModal(); + + cy.on('uncaught:exception', (err) => err.name !== 'ExpressionError'); }); it('should resolve primitive resolvables', () => { diff --git a/cypress/pages/index.ts b/cypress/pages/index.ts index 35ef30d5ec..33ddcda6e5 100644 --- a/cypress/pages/index.ts +++ b/cypress/pages/index.ts @@ -1,7 +1,5 @@ export * from './base'; export * from './credentials'; -export * from './signin'; -export * from './signup'; export * from './workflows'; export * from './workflow'; export * from './modals'; diff --git a/cypress/pages/ndv.ts b/cypress/pages/ndv.ts index 2a9b6edc4f..3d580c1bf4 100644 --- a/cypress/pages/ndv.ts +++ b/cypress/pages/ndv.ts @@ -154,7 +154,7 @@ export class NDV extends BasePage { switchOutputBranch: (name: string) => { this.getters.outputBranches().get('span').contains(name).click(); }, - switchIntputBranch: (name: string) => { + switchInputBranch: (name: string) => { this.getters.inputBranches().get('span').contains(name).click(); }, setRLCValue: (paramName: string, value: string) => { diff --git a/cypress/pages/sidebar/main-sidebar.ts b/cypress/pages/sidebar/main-sidebar.ts index b86ed326f1..fc9d8557a2 100644 --- a/cypress/pages/sidebar/main-sidebar.ts +++ b/cypress/pages/sidebar/main-sidebar.ts @@ -26,9 +26,5 @@ export class MainSidebar extends BasePage { openUserMenu: () => { this.getters.userMenu().find('[role="button"]').last().click(); }, - signout: () => { - this.actions.openUserMenu(); - cy.getByTestId('workflow-menu-item-logout').click(); - }, }; } diff --git a/cypress/pages/signin.ts b/cypress/pages/signin.ts deleted file mode 100644 index b54a30173f..0000000000 --- a/cypress/pages/signin.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { BasePage } from './base'; - -export class SigninPage extends BasePage { - url = '/signin'; - getters = { - form: () => cy.getByTestId('auth-form'), - email: () => cy.getByTestId('email'), - password: () => cy.getByTestId('password'), - submit: () => cy.get('button'), - }; -} diff --git a/cypress/pages/signup.ts b/cypress/pages/signup.ts deleted file mode 100644 index f647720ce4..0000000000 --- a/cypress/pages/signup.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { BasePage } from './base'; - -// todo rename to setup -export class SignupPage extends BasePage { - url = '/setup'; - getters = { - form: () => cy.getByTestId('auth-form'), - email: () => cy.getByTestId('email'), - firstName: () => cy.getByTestId('firstName'), - lastName: () => cy.getByTestId('lastName'), - password: () => cy.getByTestId('password'), - submit: () => cy.get('button'), - skip: () => cy.get('a'), - }; -} diff --git a/cypress/pages/workflow.ts b/cypress/pages/workflow.ts index 302cc0dc4c..2176d9b91e 100644 --- a/cypress/pages/workflow.ts +++ b/cypress/pages/workflow.ts @@ -178,7 +178,7 @@ export class WorkflowPage extends BasePage { }, saveWorkflowUsingKeyboardShortcut: () => { cy.intercept('POST', '/rest/workflows').as('createWorkflow'); - cy.get('body').type('{meta}', { release: false }).type('s'); + cy.get('body').type(META_KEY, { release: false }).type('s'); }, deleteNode: (name: string) => { this.getters.canvasNodeByName(name).first().click(); @@ -242,14 +242,15 @@ export class WorkflowPage extends BasePage { executeWorkflow: () => { this.getters.executeWorkflowButton().click(); }, - addNodeBetweenNodes: (sourceNodeName: string, targetNodeName: string, newNodeName: string) => { + addNodeBetweenNodes: (sourceNodeName: string, targetNodeName: string, newNodeName: string, action?: string) => { this.getters.getConnectionBetweenNodes(sourceNodeName, targetNodeName).first().realHover(); this.getters .getConnectionActionsBetweenNodes(sourceNodeName, targetNodeName) .find('.add') .first() .click({ force: true }); - this.actions.addNodeToCanvas(newNodeName, false); + + this.actions.addNodeToCanvas(newNodeName, false, false, action); }, deleteNodeBetweenNodes: ( sourceNodeName: string, @@ -281,23 +282,5 @@ export class WorkflowPage extends BasePage { .type(content) .type('{esc}'); }, - turnOnManualExecutionSaving: () => { - this.getters.workflowMenu().click(); - this.getters.workflowMenuItemSettings().click(); - cy.get('.el-loading-mask').should('not.be.visible'); - this.getters - .workflowSettingsSaveManualExecutionsSelect() - .find('li:contains("Yes")') - .click({ force: true }); - - this.getters.workflowSettingsSaveManualExecutionsSelect().should('contain', 'Yes'); - this.getters.workflowSettingsSaveButton().click(); - this.getters.successToast().should('exist'); - - this.getters.workflowMenu().click(); - this.getters.workflowMenuItemSettings().click(); - this.getters.workflowSettingsSaveManualExecutionsSelect().should('contain', 'Yes'); - this.getters.workflowSettingsSaveButton().click(); - }, }; } diff --git a/cypress/pages/workflows.ts b/cypress/pages/workflows.ts index de2a38a101..416528e85c 100644 --- a/cypress/pages/workflows.ts +++ b/cypress/pages/workflows.ts @@ -36,8 +36,10 @@ export class WorkflowsPage extends BasePage { cy.visit(this.url); this.getters.workflowCardActions(name).click(); this.getters.workflowDeleteButton().click(); + cy.intercept('DELETE', '/rest/workflows/*').as('deleteWorkflow'); cy.get('button').contains('delete').click(); + cy.wait('@deleteWorkflow'); }, }; } diff --git a/cypress/support/commands.ts b/cypress/support/commands.ts index d0e9ddbaa0..505022934a 100644 --- a/cypress/support/commands.ts +++ b/cypress/support/commands.ts @@ -1,32 +1,6 @@ -// *********************************************** -// This example commands.js shows you how to -// create various custom commands and overwrite -// existing commands. -// -// For more comprehensive examples of custom -// commands please read more here: -// https://on.cypress.io/custom-commands -// *********************************************** -// -// -// -- This is a parent command -- -// Cypress.Commands.add('login', (email, password) => { ... }) -// -// -// -- This is a child command -- -// Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... }) -// -// -// -- This is a dual command -- -// Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... }) -// -// -// -- This will overwrite an existing command -- -// Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... }) import 'cypress-real-events'; -import { WorkflowsPage, SigninPage, SignupPage, SettingsUsersPage, WorkflowPage } from '../pages'; -import { N8N_AUTH_COOKIE } from '../constants'; -import { MessageBox } from '../pages/modals/message-box'; +import { WorkflowPage } from '../pages'; +import { BASE_URL, N8N_AUTH_COOKIE } from '../constants'; Cypress.Commands.add('getByTestId', (selector, ...args) => { return cy.get(`[data-test-id="${selector}"]`, ...args); @@ -59,164 +33,35 @@ Cypress.Commands.add('waitForLoad', (waitForIntercepts = true) => { // we can't set them up here because at this point it would be too late // and the requests would already have been made if (waitForIntercepts) { - cy.wait(['@loadSettings', '@loadLogin']); + cy.wait(['@loadSettings']); } cy.getByTestId('node-view-loader', { timeout: 20000 }).should('not.exist'); cy.get('.el-loading-mask', { timeout: 20000 }).should('not.exist'); }); Cypress.Commands.add('signin', ({ email, password }) => { - const signinPage = new SigninPage(); - const workflowsPage = new WorkflowsPage(); - - cy.session( - [email, password], - () => { - cy.visit(signinPage.url); - - signinPage.getters.form().within(() => { - signinPage.getters.email().type(email); - signinPage.getters.password().type(password); - signinPage.getters.submit().click(); - }); - - // we should be redirected to /workflows - cy.url().should('include', workflowsPage.url); + Cypress.session.clearAllSavedSessions(); + cy.session([email, password], () => cy.request('POST', '/rest/login', { email, password }), { + validate() { + cy.getCookie(N8N_AUTH_COOKIE).should('exist'); }, - { - validate() { - cy.getCookie(N8N_AUTH_COOKIE).should('exist'); - }, - }, - ); + }); }); Cypress.Commands.add('signout', () => { - cy.visit('/signout'); - cy.waitForLoad(); - cy.url().should('include', '/signin'); + cy.request('POST', '/rest/logout'); cy.getCookie(N8N_AUTH_COOKIE).should('not.exist'); }); -Cypress.Commands.add('signup', ({ firstName, lastName, password, url }) => { - const signupPage = new SignupPage(); - - cy.visit(url); - - signupPage.getters.form().within(() => { - cy.url().then((url) => { - cy.intercept('/rest/users/*').as('userSignup') - signupPage.getters.firstName().type(firstName); - signupPage.getters.lastName().type(lastName); - signupPage.getters.password().type(password); - signupPage.getters.submit().click(); - cy.wait('@userSignup'); - }); - }); -}); - -Cypress.Commands.add('setup', ({ email, firstName, lastName, password }, skipIntercept = false) => { - const signupPage = new SignupPage(); - - cy.intercept('GET', signupPage.url).as('setupPage'); - cy.visit(signupPage.url); - cy.wait('@setupPage'); - - signupPage.getters.form().within(() => { - cy.url().then((url) => { - if (url.includes(signupPage.url)) { - signupPage.getters.email().type(email); - signupPage.getters.firstName().type(firstName); - signupPage.getters.lastName().type(lastName); - signupPage.getters.password().type(password); - - cy.intercept('POST', '/rest/owner/setup').as('setupRequest'); - signupPage.getters.submit().click(); - - if(!skipIntercept) { - cy.wait('@setupRequest'); - } - } else { - cy.log('User already signed up'); - } - }); - }); -}); - Cypress.Commands.add('interceptREST', (method, url) => { cy.intercept(method, `http://localhost:5678/rest${url}`); }); -Cypress.Commands.add('inviteUsers', ({ instanceOwner, users }) => { - const settingsUsersPage = new SettingsUsersPage(); +const setFeature = (feature: string, enabled: boolean) => + cy.request('PATCH', `${BASE_URL}/rest/e2e/feature`, { feature: `feat:${feature}`, enabled }); - cy.signin(instanceOwner); - - users.forEach((user) => { - cy.signin(instanceOwner); - cy.visit(settingsUsersPage.url); - - cy.interceptREST('POST', '/users').as('inviteUser'); - - settingsUsersPage.getters.inviteButton().click(); - settingsUsersPage.getters.inviteUsersModal().within((modal) => { - settingsUsersPage.getters.inviteUsersModalEmailsInput().type(user.email).type('{enter}'); - }); - - cy.wait('@inviteUser').then((interception) => { - const inviteLink = interception.response!.body.data[0].user.inviteAcceptUrl; - cy.log(JSON.stringify(interception.response!.body.data[0].user)); - cy.log(inviteLink); - cy.signout(); - cy.signup({ ...user, url: inviteLink }); - }); - }); -}); - -Cypress.Commands.add('skipSetup', () => { - const signupPage = new SignupPage(); - const workflowPage = new WorkflowPage(); - const Confirmation = new MessageBox(); - - cy.intercept('GET', signupPage.url).as('setupPage'); - cy.visit(signupPage.url); - cy.wait('@setupPage'); - - signupPage.getters.form().within(() => { - cy.url().then((url) => { - if (url.endsWith(signupPage.url)) { - signupPage.getters.skip().click(); - - Confirmation.getters.header().should('contain.text', 'Skip owner account setup?'); - Confirmation.actions.confirm(); - - // we should be redirected to empty canvas - cy.intercept('GET', '/rest/workflows/new').as('loading'); - cy.url().should('include', workflowPage.url); - cy.wait('@loading'); - } else { - cy.log('User already signed up'); - } - }); - }); -}); - -Cypress.Commands.add('resetAll', () => { - cy.task('reset'); - Cypress.session.clearAllSavedSessions(); -}); - -Cypress.Commands.add('setupOwner', (payload) => { - cy.task('setup-owner', payload); -}); - -Cypress.Commands.add('enableFeature', (feature) => { - cy.task('set-feature', { feature, enabled: true }); -}); - -Cypress.Commands.add('disableFeature', (feature) => { - cy.task('set-feature', { feature, enabled: false }); -}); +Cypress.Commands.add('enableFeature', (feature: string) => setFeature(feature, true)); +Cypress.Commands.add('disableFeature', (feature): string => setFeature(feature, false)); Cypress.Commands.add('grantBrowserPermissions', (...permissions: string[]) => { if (Cypress.isBrowser('chrome')) { @@ -256,7 +101,7 @@ Cypress.Commands.add('drag', (selector, pos, options) => { const originalLocation = Cypress.$(selector)[index].getBoundingClientRect(); - element.trigger('mousedown'); + element.trigger('mousedown', { force: true }); element.trigger('mousemove', { which: 1, pageX: options?.abs ? xDiff : originalLocation.right + xDiff, diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index 1df0199296..456ba9efd9 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -1,28 +1,19 @@ -// *********************************************************** -// This example support/e2e.js is processed and -// loaded automatically before your test files. -// -// This is a great place to put global configuration and -// behavior that modifies Cypress. -// -// You can change the location of this file or turn off -// automatically serving support files with the -// 'supportFile' configuration option. -// -// You can read more here: -// https://on.cypress.io/configuration -// *********************************************************** - +import { BASE_URL, INSTANCE_MEMBERS, INSTANCE_OWNER } from '../constants'; import './commands'; before(() => { - cy.resetAll(); + cy.request('POST', `${BASE_URL}/rest/e2e/reset`, { + owner: INSTANCE_OWNER, + members: INSTANCE_MEMBERS, + }); }); -// Load custom nodes and credentials fixtures beforeEach(() => { + if (!cy.config('disableAutoLogin')) { + cy.signin({ email: INSTANCE_OWNER.email, password: INSTANCE_OWNER.password }); + } + cy.intercept('GET', '/rest/settings').as('loadSettings'); - cy.intercept('GET', '/rest/login').as('loadLogin'); // Always intercept the request to test credentials and return a success cy.intercept('POST', '/rest/credentials/test', { diff --git a/cypress/support/index.ts b/cypress/support/index.ts index 7b1b15db2b..196a14d9ec 100644 --- a/cypress/support/index.ts +++ b/cypress/support/index.ts @@ -8,25 +8,14 @@ interface SigninPayload { password: string; } -interface SetupPayload { - email: string; - password: string; - firstName: string; - lastName: string; -} - -interface SignupPayload extends SetupPayload { - url: string; -} - -interface InviteUsersPayload { - instanceOwner: SigninPayload; - users: SetupPayload[]; -} - declare global { namespace Cypress { + interface SuiteConfigOverrides { + disableAutoLogin: boolean; + } + interface Chainable { + config(key: keyof SuiteConfigOverrides): boolean; getByTestId( selector: string, ...args: (Partial | undefined)[] @@ -35,13 +24,7 @@ declare global { createFixtureWorkflow(fixtureKey: string, workflowName: string): void; signin(payload: SigninPayload): void; signout(): void; - signup(payload: SignupPayload): void; - setup(payload: SetupPayload, skipIntercept?: boolean): void; - setupOwner(payload: SetupPayload): void; - inviteUsers(payload: InviteUsersPayload): void; interceptREST(method: string, url: string): Chainable; - skipSetup(): void; - resetAll(): void; enableFeature(feature: string): void; disableFeature(feature: string): void; waitForLoad(waitForIntercepts?: boolean): void; diff --git a/docker/compose/subfolderWithSSL/.env b/docker/compose/subfolderWithSSL/.env index 7008bd631a..c0b6bb180d 100644 --- a/docker/compose/subfolderWithSSL/.env +++ b/docker/compose/subfolderWithSSL/.env @@ -11,12 +11,6 @@ N8N_PATH=/app1/ # DOMAIN_NAME and SUBDOMAIN combined decide where n8n will be reachable from # above example would result in: https://example.com/n8n/ -# The user name to use for autentication - IMPORTANT ALWAYS CHANGE! -N8N_BASIC_AUTH_USER=user - -# The password to use for autentication - IMPORTANT ALWAYS CHANGE! -N8N_BASIC_AUTH_PASSWORD=password - # Optional timezone to set which gets used by Cron-Node by default # If not set New York time will be used GENERIC_TIMEZONE=Europe/Berlin diff --git a/docker/compose/subfolderWithSSL/docker-compose.yml b/docker/compose/subfolderWithSSL/docker-compose.yml index 37a4b25f18..9c4cc247c3 100644 --- a/docker/compose/subfolderWithSSL/docker-compose.yml +++ b/docker/compose/subfolderWithSSL/docker-compose.yml @@ -41,9 +41,6 @@ services: - traefik.http.middlewares.n8n.headers.STSIncludeSubdomains=true - traefik.http.middlewares.n8n.headers.STSPreload=true environment: - - N8N_BASIC_AUTH_ACTIVE=true - - N8N_BASIC_AUTH_USER - - N8N_BASIC_AUTH_PASSWORD - N8N_HOST=${DOMAIN_NAME} - N8N_PORT=5678 - N8N_PROTOCOL=https diff --git a/docker/compose/withMariaDB/.env b/docker/compose/withMariaDB/.env deleted file mode 100644 index 48f9efcd64..0000000000 --- a/docker/compose/withMariaDB/.env +++ /dev/null @@ -1,8 +0,0 @@ -MARIADB_ROOT_PASSWORD=changePassword - -MARIADB_DATABASE=n8n -MARIADB_USER=changeUser -MARIADB_PASSWORD=changePassword - -N8N_BASIC_AUTH_USER=changeUser -N8N_BASIC_AUTH_PASSWORD=changePassword diff --git a/docker/compose/withMariaDB/README.md b/docker/compose/withMariaDB/README.md deleted file mode 100644 index 6af2fa8c8a..0000000000 --- a/docker/compose/withMariaDB/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# n8n with MariaDB - -Starts n8n with MariaDB as database. - -## Start - -To start n8n with MariaDB simply start docker-compose by executing the following -command in the current folder. - -**IMPORTANT:** But before you do that change the default users and passwords in the [`.env`](.env) file! - -``` -docker-compose up -d -``` - -To stop it execute: - -``` -docker-compose stop -``` - -## Configuration - -The default name of the database, user and password for MariaDB can be changed in the [`.env`](.env) file in the current directory. diff --git a/docker/compose/withMariaDB/docker-compose.yml b/docker/compose/withMariaDB/docker-compose.yml deleted file mode 100644 index e7a1db74e0..0000000000 --- a/docker/compose/withMariaDB/docker-compose.yml +++ /dev/null @@ -1,43 +0,0 @@ -version: '3.8' - -volumes: - db_storage: - n8n_storage: - -services: - db: - image: mariadb:10.7 - restart: always - environment: - - MARIADB_ROOT_PASSWORD - - MARIADB_DATABASE - - MARIADB_USER - - MARIADB_PASSWORD - - MARIADB_MYSQL_LOCALHOST_USER=true - volumes: - - db_storage:/var/lib/mysql - healthcheck: - test: "/usr/bin/mysql --user=${MARIADB_USER} --password=${MARIADB_PASSWORD} --execute 'SELECT 1;'" - interval: 10s - timeout: 5s - retries: 10 - - n8n: - image: docker.n8n.io/n8nio/n8n - restart: always - environment: - - DB_TYPE=mariadb - - DB_MYSQLDB_HOST=db - - DB_MYSQLDB_DATABASE=${MARIADB_DATABASE} - - DB_MYSQLDB_USER=${MARIADB_USER} - - DB_MYSQLDB_PASSWORD=${MARIADB_PASSWORD} - ports: - - 5678:5678 - links: - - db - volumes: - - n8n_storage:/home/node/.n8n - command: n8n start --tunnel - depends_on: - db: - condition: service_healthy diff --git a/docker/compose/withPostgres/.env b/docker/compose/withPostgres/.env index c074f42c9e..90b6726ead 100644 --- a/docker/compose/withPostgres/.env +++ b/docker/compose/withPostgres/.env @@ -4,6 +4,3 @@ POSTGRES_DB=n8n POSTGRES_NON_ROOT_USER=changeUser POSTGRES_NON_ROOT_PASSWORD=changePassword - -N8N_BASIC_AUTH_USER=changeUser -N8N_BASIC_AUTH_PASSWORD=changePassword diff --git a/docker/compose/withPostgres/docker-compose.yml b/docker/compose/withPostgres/docker-compose.yml index 9b3ab5b83e..57f83f328a 100644 --- a/docker/compose/withPostgres/docker-compose.yml +++ b/docker/compose/withPostgres/docker-compose.yml @@ -33,16 +33,12 @@ services: - DB_POSTGRESDB_DATABASE=${POSTGRES_DB} - DB_POSTGRESDB_USER=${POSTGRES_NON_ROOT_USER} - DB_POSTGRESDB_PASSWORD=${POSTGRES_NON_ROOT_PASSWORD} - - N8N_BASIC_AUTH_ACTIVE=true - - N8N_BASIC_AUTH_USER - - N8N_BASIC_AUTH_PASSWORD ports: - 5678:5678 links: - postgres volumes: - n8n_storage:/home/node/.n8n - command: /bin/sh -c "n8n start --tunnel" depends_on: postgres: condition: service_healthy diff --git a/docker/compose/withPostgresAndWorker/.env b/docker/compose/withPostgresAndWorker/.env index c074f42c9e..90b6726ead 100644 --- a/docker/compose/withPostgresAndWorker/.env +++ b/docker/compose/withPostgresAndWorker/.env @@ -4,6 +4,3 @@ POSTGRES_DB=n8n POSTGRES_NON_ROOT_USER=changeUser POSTGRES_NON_ROOT_PASSWORD=changePassword - -N8N_BASIC_AUTH_USER=changeUser -N8N_BASIC_AUTH_PASSWORD=changePassword diff --git a/docker/compose/withPostgresAndWorker/docker-compose.yml b/docker/compose/withPostgresAndWorker/docker-compose.yml index 9f947d735d..038490e941 100644 --- a/docker/compose/withPostgresAndWorker/docker-compose.yml +++ b/docker/compose/withPostgresAndWorker/docker-compose.yml @@ -7,6 +7,7 @@ volumes: x-shared: &shared restart: always + image: docker.n8n.io/n8nio/n8n environment: - DB_TYPE=postgresdb - DB_POSTGRESDB_HOST=postgres @@ -17,9 +18,6 @@ x-shared: &shared - EXECUTIONS_MODE=queue - QUEUE_BULL_REDIS_HOST=redis - QUEUE_HEALTH_CHECK_ACTIVE=true - - N8N_BASIC_AUTH_ACTIVE=true - - N8N_BASIC_AUTH_USER - - N8N_BASIC_AUTH_PASSWORD links: - postgres - redis @@ -63,14 +61,11 @@ services: n8n: <<: *shared - image: docker.n8n.io/n8nio/n8n - command: /bin/sh -c "n8n start --tunnel" ports: - 5678:5678 n8n-worker: <<: *shared - image: docker.n8n.io/n8nio/n8n - command: /bin/sh -c "sleep 5; n8n worker" + command: worker depends_on: - n8n diff --git a/docker/images/n8n-base/Dockerfile b/docker/images/n8n-base/Dockerfile index bd7b5b96f2..022325e31a 100644 --- a/docker/images/n8n-base/Dockerfile +++ b/docker/images/n8n-base/Dockerfile @@ -1,12 +1,12 @@ -ARG NODE_VERSION=16 +ARG NODE_VERSION=18 FROM node:${NODE_VERSION}-alpine WORKDIR /home/node COPY .npmrc /usr/local/etc/npmrc RUN \ - apk add --update git openssh graphicsmagick tini tzdata ca-certificates && \ - npm install -g npm@8.19.2 full-icu && \ + apk add --update git openssh graphicsmagick tini tzdata ca-certificates libc6-compat && \ + npm install -g npm@9.5.1 full-icu && \ rm -rf /var/cache/apk/* /root/.npm /tmp/* && \ # Install fonts apk --no-cache add --virtual fonts msttcorefonts-installer fontconfig && \ diff --git a/docker/images/n8n-custom/Dockerfile b/docker/images/n8n-custom/Dockerfile index 8f7343a6cc..03dc11c4b9 100644 --- a/docker/images/n8n-custom/Dockerfile +++ b/docker/images/n8n-custom/Dockerfile @@ -8,7 +8,7 @@ COPY --chown=node:node scripts ./scripts COPY --chown=node:node packages ./packages COPY --chown=node:node patches ./patches -RUN apk add --update libc6-compat jq +RUN apk add --update jq RUN corepack enable && corepack prepare --activate USER node @@ -28,7 +28,8 @@ RUN rm -rf patches .npmrc *.yaml node_modules/.cache packages/**/node_modules/.c FROM n8nio/base:${NODE_VERSION} COPY --from=builder /home/node /usr/local/lib/node_modules/n8n RUN ln -s /usr/local/lib/node_modules/n8n/packages/cli/bin/n8n /usr/local/bin/n8n -COPY docker/images/n8n-custom/docker-entrypoint.sh / + +COPY docker/images/n8n/docker-entrypoint.sh / RUN \ mkdir .n8n && \ diff --git a/docker/images/n8n-custom/docker-entrypoint.sh b/docker/images/n8n-custom/docker-entrypoint.sh deleted file mode 100755 index 42419b3924..0000000000 --- a/docker/images/n8n-custom/docker-entrypoint.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -if [ "$#" -gt 0 ]; then - # Got started with arguments - node "$@" -else - # Got started without arguments - n8n -fi diff --git a/docker/images/n8n-debian/Dockerfile b/docker/images/n8n-debian/Dockerfile deleted file mode 100644 index 8bc7cc982c..0000000000 --- a/docker/images/n8n-debian/Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -FROM node:16 - -ARG N8N_VERSION - -RUN if [ -z "$N8N_VERSION" ] ; then echo "The N8N_VERSION argument is missing!" ; exit 1; fi - -ENV N8N_VERSION=${N8N_VERSION} -RUN \ - apt-get update && \ - apt-get -y install graphicsmagick gosu git - -# Set a custom user to not have n8n run as root -USER root - -RUN npm_config_user=root npm install -g npm@8.19.2 full-icu n8n@${N8N_VERSION} - -ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu - -WORKDIR /data - -COPY docker-entrypoint.sh /docker-entrypoint.sh -ENTRYPOINT ["/docker-entrypoint.sh"] - -EXPOSE 5678/tcp diff --git a/docker/images/n8n-debian/README.md b/docker/images/n8n-debian/README.md deleted file mode 100644 index 236305e8ea..0000000000 --- a/docker/images/n8n-debian/README.md +++ /dev/null @@ -1,20 +0,0 @@ -## n8n - Debian Docker Image - -Dockerfile to build n8n with Debian. - -For information about how to run n8n with Docker check the generic -[Docker-Readme](https://github.com/n8n-io/n8n/tree/master/docker/images/n8n/README.md) - -``` -docker build --build-arg N8N_VERSION= -t n8nio/n8n: . - -# For example: -docker build --build-arg N8N_VERSION=0.43.0 -t n8nio/n8n:0.43.0-debian . -``` - -``` -docker run -it --rm \ - --name n8n \ - -p 5678:5678 \ - n8nio/n8n:0.43.0-debian -``` diff --git a/docker/images/n8n-debian/docker-entrypoint.sh b/docker/images/n8n-debian/docker-entrypoint.sh deleted file mode 100755 index 80a252f31e..0000000000 --- a/docker/images/n8n-debian/docker-entrypoint.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh - -if [ -d /root/.n8n ] ; then - chmod o+rx /root - chown -R node /root/.n8n - ln -s /root/.n8n /home/node/ -fi - -if [ "$#" -gt 0 ]; then - # Got started with arguments - exec gosu node "$@" -else - # Got started without arguments - exec gosu node n8n -fi diff --git a/docker/images/n8n-rhel7/Dockerfile b/docker/images/n8n-rhel7/Dockerfile deleted file mode 100644 index 09fd0ad019..0000000000 --- a/docker/images/n8n-rhel7/Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -FROM richxsl/rhel7 - -ARG N8N_VERSION - -RUN if [ -z "$N8N_VERSION" ] ; then echo "The N8N_VERSION argument is missing!" ; exit 1; fi - -ENV N8N_VERSION=${N8N_VERSION} -RUN \ - yum install -y gcc-c++ make - -RUN \ - curl -sL https://rpm.nodesource.com/setup_12.x | sudo -E bash - - -RUN \ - sudo yum install nodejs - -# Set a custom user to not have n8n run as root -USER root - -RUN npm_config_user=root npm install -g npm@8.19.2 n8n@${N8N_VERSION} - -WORKDIR /data - -CMD "n8n" diff --git a/docker/images/n8n-rhel7/README.md b/docker/images/n8n-rhel7/README.md deleted file mode 100644 index 559aec3fa2..0000000000 --- a/docker/images/n8n-rhel7/README.md +++ /dev/null @@ -1,15 +0,0 @@ -## Build Docker-Image - -``` -docker build --build-arg N8N_VERSION= -t n8nio/n8n: . - -# For example: -docker build --build-arg N8N_VERSION=0.36.1 -t n8nio/n8n:0.36.1-rhel7 . -``` - -``` -docker run -it --rm \ - --name n8n \ - -p 5678:5678 \ - n8nio/n8n:0.25.0-ubuntu -``` diff --git a/docker/images/n8n/Dockerfile b/docker/images/n8n/Dockerfile index c85ef12a66..34da16ada2 100644 --- a/docker/images/n8n/Dockerfile +++ b/docker/images/n8n/Dockerfile @@ -1,4 +1,4 @@ -ARG NODE_VERSION=16 +ARG NODE_VERSION=18 FROM n8nio/base:${NODE_VERSION} ARG N8N_VERSION @@ -18,9 +18,10 @@ RUN set -eux; \ find /usr/local/lib/node_modules/n8n -type f -name "*.ts" -o -name "*.js.map" -o -name "*.vue" | xargs rm && \ rm -rf /root/.npm -# Set a custom user to not have n8n run as root -USER root -WORKDIR /data -RUN apk --no-cache add su-exec -COPY docker-entrypoint.sh /docker-entrypoint.sh +COPY docker-entrypoint.sh / + +RUN \ + mkdir .n8n && \ + chown node:node .n8n +USER node ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"] diff --git a/docker/images/n8n/README.md b/docker/images/n8n/README.md index 376c4494ab..75cfba3855 100644 --- a/docker/images/n8n/README.md +++ b/docker/images/n8n/README.md @@ -8,21 +8,27 @@ n8n is an extendable workflow automation tool. With a [fair-code](http://faircod ## Contents -- [Demo](#demo) -- [Available integrations](#available-integrations) -- [Documentation](#documentation) -- [Start n8n in Docker](#start-n8n-in-docker) -- [Start with tunnel](#start-with-tunnel) -- [Securing n8n](#securing-n8n) -- [Persist data](#persist-data) -- [Passing Sensitive Data via File](#passing-sensitive-data-via-file) -- [Updating a Running docker-compose Instance](#updating-a-running-docker-compose-instance) -- [Example Setup with Lets Encrypt](#example-setup-with-lets-encrypt) -- [What does n8n mean and how do you pronounce it](#what-does-n8n-mean-and-how-do-you-pronounce-it) -- [Support](#support) -- [Jobs](#jobs) -- [Upgrading](#upgrading) -- [License](#license) +- [n8n - Workflow automation tool](#n8n---workflow-automation-tool) + - [Contents](#contents) + - [Demo](#demo) + - [Available integrations](#available-integrations) + - [Documentation](#documentation) + - [Start n8n in Docker](#start-n8n-in-docker) + - [Start with tunnel](#start-with-tunnel) + - [Persist data](#persist-data) + - [Start with other Database](#start-with-other-database) + - [Use with PostgresDB](#use-with-postgresdb) + - [Use with MySQL](#use-with-mysql) + - [Passing Sensitive Data via File](#passing-sensitive-data-via-file) + - [Example Setup with Lets Encrypt](#example-setup-with-lets-encrypt) + - [Updating a running docker-compose instance](#updating-a-running-docker-compose-instance) + - [Setting Timezone](#setting-timezone) + - [Build Docker-Image](#build-docker-image) + - [What does n8n mean and how do you pronounce it?](#what-does-n8n-mean-and-how-do-you-pronounce-it) + - [Support](#support) + - [Jobs](#jobs) + - [Upgrading](#upgrading) + - [License](#license) ## Demo @@ -71,20 +77,6 @@ docker run -it --rm \ n8n start --tunnel ``` -## Securing n8n - -By default n8n can be accessed by everybody. This is OK if you have it only running -locally but if you deploy it on a server which is accessible from the web you have -to make sure that n8n is protected! -Right now we have very basic protection via basic-auth in place. It can be activated -by setting the following environment variables: - -```text -N8N_BASIC_AUTH_ACTIVE=true -N8N_BASIC_AUTH_USER= -N8N_BASIC_AUTH_PASSWORD= -``` - ## Persist data The workflow data gets by default saved in an SQLite database in the user @@ -171,7 +163,7 @@ docker run -it --rm \ To avoid passing sensitive information via environment variables "\_FILE" may be appended to some environment variables. It will then load the data from a file with the given name. That makes it possible to load data easily from -Docker- and Kubernetes-Secrets. +Docker and Kubernetes secrets. The following environment variables support file input: @@ -181,8 +173,6 @@ The following environment variables support file input: - DB_POSTGRESDB_PORT_FILE - DB_POSTGRESDB_USER_FILE - DB_POSTGRESDB_SCHEMA_FILE -- N8N_BASIC_AUTH_PASSWORD_FILE -- N8N_BASIC_AUTH_USER_FILE ## Example Setup with Lets Encrypt @@ -193,26 +183,25 @@ A basic step by step example setup of n8n with docker-compose and Lets Encrypt i 1. Pull the latest version from the registry - `docker pull docker.n8n.io/n8nio/n8n` + `docker pull docker.n8n.io/n8nio/n8n` 2. Stop the current setup - `sudo docker-compose stop` + `sudo docker-compose stop` 3. Delete it (will only delete the docker-containers, data is stored separately) - `sudo docker-compose rm` + `sudo docker-compose rm` 4. Then start it again - `sudo docker-compose up -d` + `sudo docker-compose up -d` ## Setting Timezone To define the timezone n8n should use, the environment variable `GENERIC_TIMEZONE` can -be set. This gets used by for example the Cron-Node. -Apart from that can also the timezone of the system be set separately. Which controls what -some scripts and commands return like `$ date`. The system timezone can be set via +be set. One instance where this variable is implemented is in the Schedule node. Furthermore, the system's timezone can be set separately, +which controls the output of certain scripts and commands such as `$ date`. The system timezone can be set via the environment variable `TZ`. Example to use the same timezone for both: diff --git a/docker/images/n8n/docker-entrypoint.sh b/docker/images/n8n/docker-entrypoint.sh index fa81b3713c..508655ea33 100755 --- a/docker/images/n8n/docker-entrypoint.sh +++ b/docker/images/n8n/docker-entrypoint.sh @@ -1,17 +1,8 @@ #!/bin/sh - -if [ -d /root/.n8n ] ; then - chmod o+rx /root - chown -R node /root/.n8n - ln -s /root/.n8n /home/node/ -fi - -chown -R node /home/node - if [ "$#" -gt 0 ]; then # Got started with arguments - exec su-exec node "$@" + n8n "$@" else # Got started without arguments - exec su-exec node n8n + n8n fi diff --git a/docker/images/n8n/hooks/build b/docker/images/n8n/hooks/build deleted file mode 100644 index a09c622456..0000000000 --- a/docker/images/n8n/hooks/build +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -docker build --build-arg N8N_VERSION=$DOCKER_TAG -f $DOCKERFILE_PATH -t $IMAGE_NAME . diff --git a/package.json b/package.json index 2692bef62c..44d263d153 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,10 @@ { "name": "n8n", - "version": "0.234.0", + "version": "1.0.1", "private": true, "homepage": "https://n8n.io", "engines": { - "node": ">=16.9", + "node": ">=18.10", "pnpm": ">=8.6" }, "packageManager": "pnpm@8.6.1", @@ -30,7 +30,6 @@ "cypress:open": "CYPRESS_BASE_URL=http://localhost:8080 cypress open", "test:e2e:ui": "cross-env E2E_TESTS=true NODE_OPTIONS=--dns-result-order=ipv4first start-server-and-test start http://localhost:5678/favicon.ico 'cypress open'", "test:e2e:dev": "cross-env E2E_TESTS=true NODE_OPTIONS=--dns-result-order=ipv4first CYPRESS_BASE_URL=http://localhost:8080 start-server-and-test dev http://localhost:8080/favicon.ico 'cypress open'", - "test:e2e:smoke": "cross-env E2E_TESTS=true NODE_OPTIONS=--dns-result-order=ipv4first start-server-and-test start http://localhost:5678/favicon.ico 'cypress run --headless --spec \"cypress/e2e/0-smoke.cy.ts\"'", "test:e2e:all": "cross-env E2E_TESTS=true NODE_OPTIONS=--dns-result-order=ipv4first start-server-and-test start http://localhost:5678/favicon.ico 'cypress run --headless'" }, "dependencies": { @@ -52,7 +51,6 @@ "jest-mock": "^29.5.0", "jest-mock-extended": "^3.0.4", "nock": "^13.2.9", - "node-fetch": "^2.6.7", "p-limit": "^3.1.0", "prettier": "^2.8.3", "rimraf": "^3.0.2", @@ -82,6 +80,7 @@ "http-cache-semantics": "4.1.1", "jsonwebtoken": "9.0.0", "prettier": "^2.8.3", + "tough-cookie": "^4.1.3", "tslib": "^2.5.0", "ts-node": "^10.9.1", "typescript": "^5.1.3", @@ -93,8 +92,8 @@ "typedi@0.10.0": "patches/typedi@0.10.0.patch", "@sentry/cli@2.17.0": "patches/@sentry__cli@2.17.0.patch", "pkce-challenge@3.0.0": "patches/pkce-challenge@3.0.0.patch", - "typeorm@0.3.12": "patches/typeorm@0.3.12.patch", - "element-plus@2.3.6": "patches/element-plus@2.3.6.patch" + "element-plus@2.3.6": "patches/element-plus@2.3.6.patch", + "pyodide@0.23.4": "patches/pyodide@0.23.4.patch" } } } diff --git a/packages/@n8n/client-oauth2/package.json b/packages/@n8n/client-oauth2/package.json index 20cfdbdcfb..b3acee86b2 100644 --- a/packages/@n8n/client-oauth2/package.json +++ b/packages/@n8n/client-oauth2/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/client-oauth2", - "version": "0.3.0", + "version": "0.4.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/client-oauth2/src/CodeFlow.ts b/packages/@n8n/client-oauth2/src/CodeFlow.ts index fceb71cdd5..7d3b842329 100644 --- a/packages/@n8n/client-oauth2/src/CodeFlow.ts +++ b/packages/@n8n/client-oauth2/src/CodeFlow.ts @@ -57,7 +57,6 @@ export class CodeFlow { opts?: Partial, ): Promise { const options = { ...this.client.options, ...opts }; - expects(options, 'clientId', 'accessTokenUri'); const url = uri instanceof URL ? uri : new URL(uri, DEFAULT_URL_BASE); diff --git a/packages/@n8n/client-oauth2/src/CredentialsFlow.ts b/packages/@n8n/client-oauth2/src/CredentialsFlow.ts index 1b6eb70e8e..d83450a412 100644 --- a/packages/@n8n/client-oauth2/src/CredentialsFlow.ts +++ b/packages/@n8n/client-oauth2/src/CredentialsFlow.ts @@ -21,7 +21,6 @@ export class CredentialsFlow { */ async getToken(opts?: Partial): Promise { const options = { ...this.client.options, ...opts }; - expects(options, 'clientId', 'clientSecret', 'accessTokenUri'); const body: CredentialsFlowBody = { diff --git a/packages/cli/.npmignore b/packages/cli/.npmignore deleted file mode 100644 index e0c1232042..0000000000 --- a/packages/cli/.npmignore +++ /dev/null @@ -1 +0,0 @@ -dist/ReloadNodesAndCredentials.* diff --git a/packages/cli/BREAKING-CHANGES.md b/packages/cli/BREAKING-CHANGES.md index 2269ed9456..6744029d43 100644 --- a/packages/cli/BREAKING-CHANGES.md +++ b/packages/cli/BREAKING-CHANGES.md @@ -2,6 +2,33 @@ This list shows all the versions which include breaking changes and how to upgrade. +## 1.0.0 + +### What changed? + +The minimum Node.js version required for n8n is now v18. + +### When is action necessary? + +If you're using n8n via npm or PM2 or if you're contributing to n8n. + +### How to upgrade: + +Update the Node.js version to v18 or above. + +## 0.234.0 + +### What changed? + +This release introduces two irreversible changes: + +* The n8n database will use strings instead of numeric values to identify workflows and credentials +* Execution data is split into a separate database table + +### When is action necessary? + +It will not be possible to read a n8n@0.234.0 database with older versions of n8n, so we recommend that you take a full backup before migrating. + ## 0.232.0 ### What changed? diff --git a/packages/cli/bin/n8n b/packages/cli/bin/n8n index 92c38b416b..1f81419a84 100755 --- a/packages/cli/bin/n8n +++ b/packages/cli/bin/n8n @@ -21,10 +21,10 @@ if (process.argv.length === 2) { const nodeVersion = process.versions.node; const nodeVersionMajor = require('semver').major(nodeVersion); -if (![16, 18].includes(nodeVersionMajor)) { +if (![18, 20].includes(nodeVersionMajor)) { console.log(` Your Node.js version (${nodeVersion}) is currently not supported by n8n. - Please use Node.js v16 (recommended), or v18 instead! + Please use Node.js v18 (recommended), or v20 instead! `); process.exit(1); } diff --git a/packages/cli/package.json b/packages/cli/package.json index a00f90620e..3949647730 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "0.234.0", + "version": "1.0.1", "description": "n8n Workflow Automation Tool", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -53,13 +53,15 @@ "workflow" ], "engines": { - "node": ">=16.9" + "node": ">=18.10" }, "files": [ "bin", "templates", "dist", - "oclif.manifest.json" + "oclif.manifest.json", + "!dist/**/e2e.*", + "!dist/ReloadNodesAndCredentials.*" ], "devDependencies": { "@apidevtools/swagger-cli": "4.0.0", @@ -72,7 +74,7 @@ "@types/convict": "^6.1.1", "@types/cookie-parser": "^1.4.2", "@types/express": "^4.17.6", - "@types/json-diff": "^0.5.1", + "@types/json-diff": "^1.0.0", "@types/jsonwebtoken": "^9.0.1", "@types/localtunnel": "^1.9.0", "@types/lodash": "^4.14.195", @@ -137,7 +139,7 @@ "handlebars": "4.7.7", "inquirer": "^7.0.1", "ioredis": "^5.2.4", - "json-diff": "^0.5.4", + "json-diff": "^1.0.6", "jsonschema": "^1.4.1", "jsonwebtoken": "^9.0.0", "jwks-rsa": "^3.0.1", diff --git a/packages/cli/scripts/build.mjs b/packages/cli/scripts/build.mjs index 63a9cb2f6f..4c2b43e74c 100644 --- a/packages/cli/scripts/build.mjs +++ b/packages/cli/scripts/build.mjs @@ -9,12 +9,9 @@ const ROOT_DIR = path.resolve(__dirname, '..'); const SPEC_FILENAME = 'openapi.yml'; const SPEC_THEME_FILENAME = 'swaggerTheme.css'; -const userManagementEnabled = process.env.N8N_USER_MANAGEMENT_DISABLED !== 'true'; const publicApiEnabled = process.env.N8N_PUBLIC_API_DISABLED !== 'true'; -if (userManagementEnabled) { - copyUserManagementEmailTemplates(); -} +copyUserManagementEmailTemplates(); if (publicApiEnabled) { copySwaggerTheme(); diff --git a/packages/cli/src/ActiveExecutions.ts b/packages/cli/src/ActiveExecutions.ts index db84d1da8a..2cccb4fb6c 100644 --- a/packages/cli/src/ActiveExecutions.ts +++ b/packages/cli/src/ActiveExecutions.ts @@ -2,6 +2,7 @@ /* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-non-null-assertion */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ +import { Container, Service } from 'typedi'; import type { IDeferredPromise, IExecuteResponsePromiseData, @@ -19,8 +20,7 @@ import type { IWorkflowExecutionDataProcess, } from '@/Interfaces'; import { isWorkflowIdValid } from '@/utils'; -import Container, { Service } from 'typedi'; -import { ExecutionRepository } from './databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; @Service() export class ActiveExecutions { diff --git a/packages/cli/src/CredentialTypes.ts b/packages/cli/src/CredentialTypes.ts index 597ff1dfa7..6ea919cea4 100644 --- a/packages/cli/src/CredentialTypes.ts +++ b/packages/cli/src/CredentialTypes.ts @@ -26,16 +26,13 @@ export class CredentialTypes implements ICredentialTypes { * Returns all parent types of the given credential type */ getParentTypes(typeName: string): string[] { - const credentialType = this.getByName(typeName); - if (credentialType?.extends === undefined) return []; - - const types: string[] = []; - credentialType.extends.forEach((type: string) => { - types.push(type); - types.push(...this.getParentTypes(type)); - }); - - return types; + const extendsArr = this.knownCredentials[typeName]?.extends ?? []; + if (extendsArr.length) { + extendsArr.forEach((type) => { + extendsArr.push(...this.getParentTypes(type)); + }); + } + return extendsArr; } private getCredential(type: string): LoadedClass { diff --git a/packages/cli/src/ErrorReporting.ts b/packages/cli/src/ErrorReporting.ts index ab40038080..0388be4fc0 100644 --- a/packages/cli/src/ErrorReporting.ts +++ b/packages/cli/src/ErrorReporting.ts @@ -46,7 +46,6 @@ export const initErrorHandling = async () => { process.on('uncaughtException', (error) => { ErrorReporterProxy.error(error); - if (error.constructor?.name !== 'AxiosError') throw error; }); ErrorReporterProxy.init({ diff --git a/packages/cli/src/GenericHelpers.ts b/packages/cli/src/GenericHelpers.ts index 13b2aaaeb7..e012accd58 100644 --- a/packages/cli/src/GenericHelpers.ts +++ b/packages/cli/src/GenericHelpers.ts @@ -13,6 +13,7 @@ import type { WorkflowExecuteMode, } from 'n8n-workflow'; import { validate } from 'class-validator'; +import { Container } from 'typedi'; import { Like } from 'typeorm'; import config from '@/config'; import * as Db from '@/Db'; @@ -23,8 +24,7 @@ import type { CredentialsEntity } from '@db/entities/CredentialsEntity'; import type { TagEntity } from '@db/entities/TagEntity'; import type { User } from '@db/entities/User'; import type { UserUpdatePayload } from '@/requests'; -import Container from 'typedi'; -import { ExecutionRepository } from './databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; /** * Returns the base URL n8n is reachable from diff --git a/packages/cli/src/Interfaces.ts b/packages/cli/src/Interfaces.ts index f53af09f94..c57ee5df27 100644 --- a/packages/cli/src/Interfaces.ts +++ b/packages/cli/src/Interfaces.ts @@ -61,6 +61,7 @@ import type { WorkflowStatisticsRepository, WorkflowTagMappingRepository, } from '@db/repositories'; +import type { LICENSE_FEATURES, LICENSE_QUOTAS } from './constants'; export interface IActivationError { time: number; @@ -306,7 +307,6 @@ export interface IDiagnosticInfo { databaseType: DatabaseType; notificationsEnabled: boolean; disableProductionWebhooksOnMainProcess: boolean; - basicAuthActive: boolean; systemInfo: { os: { type?: string; @@ -324,7 +324,6 @@ export interface IDiagnosticInfo { }; deploymentType: string; binaryDataMode: string; - n8n_multi_user_allowed: boolean; smtp_set_up: boolean; ldap_allowed: boolean; saml_enabled: boolean; @@ -718,6 +717,11 @@ export interface IExecutionTrackProperties extends ITelemetryTrackProperties { // license // ---------------------------------- +type ValuesOf = T[keyof T]; + +export type BooleanLicenseFeature = ValuesOf; +export type NumericLicenseFeature = ValuesOf; + export interface ILicenseReadResponse { usage: { executions: { diff --git a/packages/cli/src/InternalHooks.ts b/packages/cli/src/InternalHooks.ts index 54436694f9..c148e6c4d1 100644 --- a/packages/cli/src/InternalHooks.ts +++ b/packages/cli/src/InternalHooks.ts @@ -30,8 +30,8 @@ import { eventBus } from './eventbus'; import type { User } from '@db/entities/User'; import { N8N_VERSION } from '@/constants'; import { NodeTypes } from './NodeTypes'; -import type { ExecutionMetadata } from './databases/entities/ExecutionMetadata'; -import { ExecutionRepository } from './databases/repositories'; +import type { ExecutionMetadata } from '@db/entities/ExecutionMetadata'; +import { ExecutionRepository } from '@db/repositories'; function userToPayload(user: User): { userId: string; @@ -75,12 +75,10 @@ export class InternalHooks implements IInternalHooksClass { db_type: diagnosticInfo.databaseType, n8n_version_notifications_enabled: diagnosticInfo.notificationsEnabled, n8n_disable_production_main_process: diagnosticInfo.disableProductionWebhooksOnMainProcess, - n8n_basic_auth_active: diagnosticInfo.basicAuthActive, system_info: diagnosticInfo.systemInfo, execution_variables: diagnosticInfo.executionVariables, n8n_deployment_type: diagnosticInfo.deploymentType, n8n_binary_data_mode: diagnosticInfo.binaryDataMode, - n8n_multi_user_allowed: diagnosticInfo.n8n_multi_user_allowed, smtp_set_up: diagnosticInfo.smtp_set_up, ldap_allowed: diagnosticInfo.ldap_allowed, saml_enabled: diagnosticInfo.saml_enabled, diff --git a/packages/cli/src/Ldap/helpers.ts b/packages/cli/src/Ldap/helpers.ts index 8c51add60b..664d730081 100644 --- a/packages/cli/src/Ldap/helpers.ts +++ b/packages/cli/src/Ldap/helpers.ts @@ -12,7 +12,6 @@ import { User } from '@db/entities/User'; import { AuthIdentity } from '@db/entities/AuthIdentity'; import { RoleRepository } from '@db/repositories'; import type { AuthProviderSyncHistory } from '@db/entities/AuthProviderSyncHistory'; -import { isUserManagementEnabled } from '@/UserManagement/UserManagementHelper'; import { LdapManager } from './LdapManager.ee'; import { @@ -37,9 +36,8 @@ import { InternalServerError } from '../ResponseHelper'; /** * Check whether the LDAP feature is disabled in the instance */ -export const isLdapEnabled = (): boolean => { - const license = Container.get(License); - return isUserManagementEnabled() && license.isLdapEnabled(); +export const isLdapEnabled = () => { + return Container.get(License).isLdapEnabled(); }; /** diff --git a/packages/cli/src/License.ts b/packages/cli/src/License.ts index 3452bc75ca..ed27e8d6b9 100644 --- a/packages/cli/src/License.ts +++ b/packages/cli/src/License.ts @@ -9,8 +9,16 @@ import { LICENSE_QUOTAS, N8N_VERSION, SETTINGS_LICENSE_CERT_KEY, + UNLIMITED_LICENSE_QUOTA, } from './constants'; import { Service } from 'typedi'; +import type { BooleanLicenseFeature, NumericLicenseFeature } from './Interfaces'; + +type FeatureReturnType = Partial< + { + planName: string; + } & { [K in NumericLicenseFeature]: number } & { [K in BooleanLicenseFeature]: boolean } +>; @Service() export class License { @@ -96,13 +104,8 @@ export class License { await this.manager.renew(); } - isFeatureEnabled(feature: string): boolean { - if (!this.manager) { - getLogger().warn('License manager not initialized'); - return false; - } - - return this.manager.hasFeatureEnabled(feature); + isFeatureEnabled(feature: BooleanLicenseFeature) { + return this.manager?.hasFeatureEnabled(feature) ?? false; } isSharingEnabled() { @@ -141,15 +144,8 @@ export class License { return this.manager?.getCurrentEntitlements() ?? []; } - getFeatureValue( - feature: string, - requireValidCert?: boolean, - ): undefined | boolean | number | string { - if (!this.manager) { - return undefined; - } - - return this.manager.getFeatureValue(feature, requireValidCert); + getFeatureValue(feature: T): FeatureReturnType[T] { + return this.manager?.getFeatureValue(feature) as FeatureReturnType[T]; } getManagementJwt(): string { @@ -178,20 +174,20 @@ export class License { } // Helper functions for computed data - getTriggerLimit(): number { - return (this.getFeatureValue(LICENSE_QUOTAS.TRIGGER_LIMIT) ?? -1) as number; + getUsersLimit() { + return this.getFeatureValue(LICENSE_QUOTAS.USERS_LIMIT) ?? UNLIMITED_LICENSE_QUOTA; } - getVariablesLimit(): number { - return (this.getFeatureValue(LICENSE_QUOTAS.VARIABLES_LIMIT) ?? -1) as number; + getTriggerLimit() { + return this.getFeatureValue(LICENSE_QUOTAS.TRIGGER_LIMIT) ?? UNLIMITED_LICENSE_QUOTA; } - getUsersLimit(): number { - return this.getFeatureValue(LICENSE_QUOTAS.USERS_LIMIT) as number; + getVariablesLimit() { + return this.getFeatureValue(LICENSE_QUOTAS.VARIABLES_LIMIT) ?? UNLIMITED_LICENSE_QUOTA; } getPlanName(): string { - return (this.getFeatureValue('planName') ?? 'Community') as string; + return this.getFeatureValue('planName') ?? 'Community'; } getInfo(): string { @@ -201,4 +197,8 @@ export class License { return this.manager.toString(); } + + isWithinUsersLimit() { + return this.getUsersLimit() === UNLIMITED_LICENSE_QUOTA; + } } diff --git a/packages/cli/src/LoadNodesAndCredentials.ts b/packages/cli/src/LoadNodesAndCredentials.ts index d62dc464d7..4d6c3d5a5a 100644 --- a/packages/cli/src/LoadNodesAndCredentials.ts +++ b/packages/cli/src/LoadNodesAndCredentials.ts @@ -66,21 +66,23 @@ export class LoadNodesAndCredentials implements INodesAndCredentials { this.downloadFolder = UserSettings.getUserN8nFolderDownloadedNodesPath(); - // Load nodes from `n8n-nodes-base` and any other `n8n-nodes-*` package in the main `node_modules` - const pathsToScan = [ + // Load nodes from `n8n-nodes-base` + const basePathsToScan = [ // In case "n8n" package is in same node_modules folder. path.join(CLI_DIR, '..'), // In case "n8n" package is the root and the packages are // in the "node_modules" folder underneath it. path.join(CLI_DIR, 'node_modules'), - // Path where all community nodes are installed - path.join(this.downloadFolder, 'node_modules'), ]; - for (const nodeModulesDir of pathsToScan) { - await this.loadNodesFromNodeModules(nodeModulesDir); + for (const nodeModulesDir of basePathsToScan) { + await this.loadNodesFromNodeModules(nodeModulesDir, 'n8n-nodes-base'); } + // Load nodes from any other `n8n-nodes-*` packages in the download directory + // This includes the community nodes + await this.loadNodesFromNodeModules(path.join(this.downloadFolder, 'node_modules')); + await this.loadNodesFromCustomDirectories(); await this.postProcessLoaders(); this.injectCustomApiCallOptions(); @@ -127,12 +129,15 @@ export class LoadNodesAndCredentials implements INodesAndCredentials { await writeStaticJSON('credentials', this.types.credentials); } - private async loadNodesFromNodeModules(nodeModulesDir: string): Promise { - const globOptions = { cwd: nodeModulesDir, onlyDirectories: true }; - const installedPackagePaths = [ - ...(await glob('n8n-nodes-*', { ...globOptions, deep: 1 })), - ...(await glob('@*/n8n-nodes-*', { ...globOptions, deep: 2 })), - ]; + private async loadNodesFromNodeModules( + nodeModulesDir: string, + packageName?: string, + ): Promise { + const installedPackagePaths = await glob(packageName ?? ['n8n-nodes-*', '@*/n8n-nodes-*'], { + cwd: nodeModulesDir, + onlyDirectories: true, + deep: 1, + }); for (const packagePath of installedPackagePaths) { try { @@ -326,7 +331,7 @@ export class LoadNodesAndCredentials implements INodesAndCredentials { for (const loader of Object.values(this.loaders)) { // list of node & credential types that will be sent to the frontend - const { types, directory } = loader; + const { known, types, directory } = loader; this.types.nodes = this.types.nodes.concat(types.nodes); this.types.credentials = this.types.credentials.concat(types.credentials); @@ -339,26 +344,30 @@ export class LoadNodesAndCredentials implements INodesAndCredentials { this.loaded.credentials[credentialTypeName] = loader.credentialTypes[credentialTypeName]; } - // Nodes and credentials that will be lazy loaded - if (loader instanceof PackageDirectoryLoader) { - const { packageName, known } = loader; + for (const type in known.nodes) { + const { className, sourcePath } = known.nodes[type]; + this.known.nodes[type] = { + className, + sourcePath: path.join(directory, sourcePath), + }; + } - for (const type in known.nodes) { - const { className, sourcePath } = known.nodes[type]; - this.known.nodes[type] = { - className, - sourcePath: path.join(directory, sourcePath), - }; - } - - for (const type in known.credentials) { - const { className, sourcePath, nodesToTestWith } = known.credentials[type]; - this.known.credentials[type] = { - className, - sourcePath: path.join(directory, sourcePath), - nodesToTestWith: nodesToTestWith?.map((nodeName) => `${packageName}.${nodeName}`), - }; - } + for (const type in known.credentials) { + const { + className, + sourcePath, + nodesToTestWith, + extends: extendsArr, + } = known.credentials[type]; + this.known.credentials[type] = { + className, + sourcePath: path.join(directory, sourcePath), + nodesToTestWith: + loader instanceof PackageDirectoryLoader + ? nodesToTestWith?.map((nodeName) => `${loader.packageName}.${nodeName}`) + : undefined, + extends: extendsArr, + }; } } } diff --git a/packages/cli/src/PublicApi/types.d.ts b/packages/cli/src/PublicApi/types.ts similarity index 92% rename from packages/cli/src/PublicApi/types.d.ts rename to packages/cli/src/PublicApi/types.ts index 636d160588..cae8e64b77 100644 --- a/packages/cli/src/PublicApi/types.d.ts +++ b/packages/cli/src/PublicApi/types.ts @@ -138,13 +138,13 @@ export type OperationID = 'getUsers' | 'getUser'; type PaginationBase = { limit: number }; -type PaginationOffsetDecoded = PaginationBase & { offset: number }; +export type PaginationOffsetDecoded = PaginationBase & { offset: number }; -type PaginationCursorDecoded = PaginationBase & { lastId: string }; +export type PaginationCursorDecoded = PaginationBase & { lastId: string }; -type OffsetPagination = PaginationBase & { offset: number; numberOfTotalRecords: number }; +export type OffsetPagination = PaginationBase & { offset: number; numberOfTotalRecords: number }; -type CursorPagination = PaginationBase & { lastId: string; numberOfNextRecords: number }; +export type CursorPagination = PaginationBase & { lastId: string; numberOfNextRecords: number }; export interface IRequired { required?: string[]; not?: { required?: string[] }; diff --git a/packages/cli/src/PublicApi/v1/handlers/executions/executions.handler.ts b/packages/cli/src/PublicApi/v1/handlers/executions/executions.handler.ts index 57d18e9bf7..03b70556e0 100644 --- a/packages/cli/src/PublicApi/v1/handlers/executions/executions.handler.ts +++ b/packages/cli/src/PublicApi/v1/handlers/executions/executions.handler.ts @@ -37,7 +37,7 @@ export = { return res.status(404).json({ message: 'Not Found' }); } - await BinaryDataManager.getInstance().deleteBinaryDataByExecutionId(execution.id!); + await BinaryDataManager.getInstance().deleteBinaryDataByExecutionIds([execution.id!]); await deleteExecution(execution); diff --git a/packages/cli/src/PublicApi/v1/handlers/executions/executions.service.ts b/packages/cli/src/PublicApi/v1/handlers/executions/executions.service.ts index 36572cb5f3..6061df800c 100644 --- a/packages/cli/src/PublicApi/v1/handlers/executions/executions.service.ts +++ b/packages/cli/src/PublicApi/v1/handlers/executions/executions.service.ts @@ -1,11 +1,11 @@ import type { DeleteResult, FindOptionsWhere } from 'typeorm'; import { In, Not, Raw, LessThan } from 'typeorm'; +import { Container } from 'typedi'; +import type { ExecutionStatus } from 'n8n-workflow'; import * as Db from '@/Db'; import type { IExecutionBase, IExecutionFlattedDb } from '@/Interfaces'; -import type { ExecutionStatus } from 'n8n-workflow'; -import Container from 'typedi'; -import { ExecutionRepository } from '@/databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; function getStatusCondition(status: ExecutionStatus) { const condition: Pick, 'status'> = {}; diff --git a/packages/cli/src/PublicApi/v1/handlers/sourceControl/sourceControl.handler.ts b/packages/cli/src/PublicApi/v1/handlers/sourceControl/sourceControl.handler.ts index daeb6f22e4..5353a8fdb6 100644 --- a/packages/cli/src/PublicApi/v1/handlers/sourceControl/sourceControl.handler.ts +++ b/packages/cli/src/PublicApi/v1/handlers/sourceControl/sourceControl.handler.ts @@ -1,9 +1,9 @@ import type express from 'express'; +import { Container } from 'typedi'; import type { StatusResult } from 'simple-git'; import type { PublicSourceControlRequest } from '../../../types'; import { authorize } from '../../shared/middlewares/global.middleware'; import type { ImportResult } from '@/environments/sourceControl/types/importResult'; -import Container from 'typedi'; import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee'; import { SourceControlPreferencesService } from '@/environments/sourceControl/sourceControlPreferences.service.ee'; import { isSourceControlLicensed } from '@/environments/sourceControl/sourceControlHelper.ee'; diff --git a/packages/cli/src/PublicApi/v1/handlers/users/users.handler.ee.ts b/packages/cli/src/PublicApi/v1/handlers/users/users.handler.ee.ts index ee39dc8105..26a801e74d 100644 --- a/packages/cli/src/PublicApi/v1/handlers/users/users.handler.ee.ts +++ b/packages/cli/src/PublicApi/v1/handlers/users/users.handler.ee.ts @@ -1,4 +1,5 @@ import type express from 'express'; +import { Container } from 'typedi'; import { clean, getAllUsersAndCount, getUser } from './users.service.ee'; @@ -10,7 +11,6 @@ import { } from '../../shared/middlewares/global.middleware'; import type { UserRequest } from '@/requests'; import { InternalHooks } from '@/InternalHooks'; -import Container from 'typedi'; export = { getUser: [ diff --git a/packages/cli/src/PublicApi/v1/handlers/users/users.service.ee.ts b/packages/cli/src/PublicApi/v1/handlers/users/users.service.ee.ts index 0d1536cffa..69c2fa8515 100644 --- a/packages/cli/src/PublicApi/v1/handlers/users/users.service.ee.ts +++ b/packages/cli/src/PublicApi/v1/handlers/users/users.service.ee.ts @@ -1,18 +1,9 @@ import { Container } from 'typedi'; -import { RoleRepository, UserRepository } from '@db/repositories'; -import type { Role } from '@db/entities/Role'; +import { UserRepository } from '@db/repositories'; import type { User } from '@db/entities/User'; import pick from 'lodash/pick'; import { validate as uuidValidate } from 'uuid'; -export function isInstanceOwner(user: User): boolean { - return user.globalRole.name === 'owner'; -} - -export async function getWorkflowOwnerRole(): Promise { - return Container.get(RoleRepository).findWorkflowOwnerRoleOrFail(); -} - export const getSelectableProperties = (table: 'user' | 'role'): string[] => { return { user: ['id', 'email', 'firstName', 'lastName', 'createdAt', 'updatedAt', 'isPending'], diff --git a/packages/cli/src/PublicApi/v1/handlers/users/users.service.ts b/packages/cli/src/PublicApi/v1/handlers/users/users.service.ts new file mode 100644 index 0000000000..bccab33bb9 --- /dev/null +++ b/packages/cli/src/PublicApi/v1/handlers/users/users.service.ts @@ -0,0 +1,7 @@ +import { Container } from 'typedi'; +import { RoleRepository } from '@db/repositories'; +import type { Role } from '@db/entities/Role'; + +export async function getWorkflowOwnerRole(): Promise { + return Container.get(RoleRepository).findWorkflowOwnerRoleOrFail(); +} diff --git a/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts b/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts index ee4cd38b9c..7305b23cb7 100644 --- a/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts +++ b/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts @@ -11,7 +11,7 @@ import { addNodeIds, replaceInvalidCredentials } from '@/WorkflowHelpers'; import type { WorkflowRequest } from '../../../types'; import { authorize, validCursor } from '../../shared/middlewares/global.middleware'; import { encodeNextCursor } from '../../shared/services/pagination.service'; -import { getWorkflowOwnerRole, isInstanceOwner } from '../users/users.service.ee'; +import { getWorkflowOwnerRole } from '../users/users.service'; import { getWorkflowById, getSharedWorkflow, @@ -101,7 +101,7 @@ export = { ...(active !== undefined && { active }), }; - if (isInstanceOwner(req.user)) { + if (req.user.isOwner) { if (tags) { const workflowIds = await getWorkflowIdsViaTags(parseTagNames(tags)); where.id = In(workflowIds); diff --git a/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.service.ts b/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.service.ts index cfcedcbad4..df9515ecdb 100644 --- a/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.service.ts +++ b/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.service.ts @@ -8,7 +8,6 @@ import * as Db from '@/Db'; import type { User } from '@db/entities/User'; import { WorkflowEntity } from '@db/entities/WorkflowEntity'; import { SharedWorkflow } from '@db/entities/SharedWorkflow'; -import { isInstanceOwner } from '../users/users.service.ee'; import type { Role } from '@db/entities/Role'; import config from '@/config'; import { START_NODES } from '@/constants'; @@ -32,7 +31,7 @@ export async function getSharedWorkflow( ): Promise { return Db.collections.SharedWorkflow.findOne({ where: { - ...(!isInstanceOwner(user) && { userId: user.id }), + ...(!user.isOwner && { userId: user.id }), ...(workflowId && { workflowId }), }, relations: [...insertIf(!config.getEnv('workflowTagsDisabled'), ['workflow.tags']), 'workflow'], @@ -48,7 +47,7 @@ export async function getSharedWorkflows( ): Promise { return Db.collections.SharedWorkflow.find({ where: { - ...(!isInstanceOwner(user) && { userId: user.id }), + ...(!user.isOwner && { userId: user.id }), ...(options.workflowIds && { workflowId: In(options.workflowIds) }), }, ...(options.relations && { relations: options.relations }), diff --git a/packages/cli/src/ResponseHelper.ts b/packages/cli/src/ResponseHelper.ts index 907895914f..312990f683 100644 --- a/packages/cli/src/ResponseHelper.ts +++ b/packages/cli/src/ResponseHelper.ts @@ -87,17 +87,6 @@ export class ServiceUnavailableError extends ResponseError { } } -export function basicAuthAuthorizationError(resp: Response, realm: string, message?: string) { - resp.statusCode = 401; - resp.setHeader('WWW-Authenticate', `Basic realm="${realm}"`); - resp.json({ code: resp.statusCode, message }); -} - -export function jwtAuthAuthorizationError(resp: Response, message?: string) { - resp.statusCode = 403; - resp.json({ code: resp.statusCode, message }); -} - export function sendSuccessResponse( res: Response, data: any, diff --git a/packages/cli/src/Server.ts b/packages/cli/src/Server.ts index 279e414bd0..32083baef2 100644 --- a/packages/cli/src/Server.ts +++ b/packages/cli/src/Server.ts @@ -68,6 +68,7 @@ import { EDITOR_UI_DIST_DIR, GENERATED_STATIC_DIR, inDevelopment, + inE2ETests, N8N_VERSION, RESPONSE_ERROR_MESSAGES, TEMPLATES_DIR, @@ -105,7 +106,6 @@ import { getInstanceBaseUrl, isEmailSetUp, isSharingEnabled, - isUserManagementEnabled, whereClause, } from '@/UserManagement/UserManagementHelper'; import { UserManagementMailer } from '@/UserManagement/email'; @@ -145,12 +145,11 @@ import { } from './Ldap/helpers'; import { AbstractServer } from './AbstractServer'; import { configureMetrics } from './metrics'; -import { setupBasicAuth } from './middlewares/basicAuth'; -import { setupExternalJWTAuth } from './middlewares/externalJWTAuth'; import { PostHogClient } from './posthog'; import { eventBus } from './eventbus'; import { Container } from 'typedi'; import { InternalHooks } from './InternalHooks'; +import { License } from './License'; import { getStatusUsingPreviousExecutionStatusMethod, isAdvancedExecutionFiltersEnabled, @@ -169,9 +168,8 @@ import { import { isSourceControlLicensed } from '@/environments/sourceControl/sourceControlHelper.ee'; import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee'; import { SourceControlController } from '@/environments/sourceControl/sourceControl.controller.ee'; -import { SourceControlPreferencesService } from './environments/sourceControl/sourceControlPreferences.service.ee'; -import { ExecutionRepository } from './databases/repositories'; -import type { ExecutionEntity } from './databases/entities/ExecutionEntity'; +import { ExecutionRepository } from '@db/repositories'; +import type { ExecutionEntity } from '@db/entities/ExecutionEntity'; const exec = promisify(callbackExec); @@ -261,11 +259,8 @@ export class Server extends AbstractServer { config.getEnv('personalization.enabled') && config.getEnv('diagnostics.enabled'), defaultLocale: config.getEnv('defaultLocale'), userManagement: { - enabled: isUserManagementEnabled(), - showSetupOnFirstLoad: - config.getEnv('userManagement.disabled') === false && - config.getEnv('userManagement.isInstanceOwnerSetUp') === false && - config.getEnv('userManagement.skipInstanceOwnerSetup') === false, + quota: Container.get(License).getUsersLimit(), + showSetupOnFirstLoad: config.getEnv('userManagement.isInstanceOwnerSetUp') === false, smtpSetup: isEmailSetUp(), authenticationMethod: getCurrentAuthenticationMethod(), }, @@ -323,6 +318,9 @@ export class Server extends AbstractServer { variables: { limit: 0, }, + banners: { + dismissed: [], + }, }; } @@ -340,16 +338,11 @@ export class Server extends AbstractServer { this.push = Container.get(Push); - if (process.env.E2E_TESTS === 'true') { - this.app.use('/e2e', require('./api/e2e.api').e2eController); - } - await super.start(); const cpus = os.cpus(); const binaryDataConfig = config.getEnv('binaryDataManager'); const diagnosticInfo: IDiagnosticInfo = { - basicAuthActive: config.getEnv('security.basicAuth.active'), databaseType: config.getEnv('database.type'), disableProductionWebhooksOnMainProcess: config.getEnv( 'endpoints.disableProductionWebhooksOnMainProcess', @@ -385,7 +378,6 @@ export class Server extends AbstractServer { }, deploymentType: config.getEnv('deployment.type'), binaryDataMode: binaryDataConfig.mode, - n8n_multi_user_allowed: isUserManagementEnabled(), smtp_set_up: config.getEnv('userManagement.emails.mode') === 'smtp', ldap_allowed: isLdapCurrentAuthenticationMethod(), saml_enabled: isSamlCurrentAuthenticationMethod(), @@ -414,15 +406,23 @@ export class Server extends AbstractServer { getSettingsForFrontend(): IN8nUISettings { // refresh user management status Object.assign(this.frontendSettings.userManagement, { - enabled: isUserManagementEnabled(), + quota: Container.get(License).getUsersLimit(), authenticationMethod: getCurrentAuthenticationMethod(), showSetupOnFirstLoad: - config.getEnv('userManagement.disabled') === false && config.getEnv('userManagement.isInstanceOwnerSetUp') === false && - config.getEnv('userManagement.skipInstanceOwnerSetup') === false && config.getEnv('deployment.type').startsWith('desktop_') === false, }); + let dismissedBanners: string[] = []; + + try { + dismissedBanners = config.getEnv('ui.banners.dismissed') ?? []; + } catch { + // not yet in DB + } + + this.frontendSettings.banners.dismissed = dismissedBanners; + // refresh enterprise status Object.assign(this.frontendSettings.enterprise, { sharing: isSharingEnabled(), @@ -458,18 +458,15 @@ export class Server extends AbstractServer { return this.frontendSettings; } - private registerControllers(ignoredEndpoints: Readonly) { + private async registerControllers(ignoredEndpoints: Readonly) { const { app, externalHooks, activeWorkflowRunner, nodeTypes } = this; const repositories = Db.collections; - setupAuthMiddlewares(app, ignoredEndpoints, this.restEndpoint, repositories.User); + setupAuthMiddlewares(app, ignoredEndpoints, this.restEndpoint); const logger = LoggerProxy; const internalHooks = Container.get(InternalHooks); const mailer = Container.get(UserManagementMailer); const postHog = this.postHog; - const samlService = Container.get(SamlService); - const sourceControlService = Container.get(SourceControlService); - const sourceControlPreferencesService = Container.get(SourceControlPreferencesService); const controllers: object[] = [ new EventBusController(), @@ -497,8 +494,8 @@ export class Server extends AbstractServer { logger, postHog, }), - new SamlController(samlService), - new SourceControlController(sourceControlService, sourceControlPreferencesService), + Container.get(SamlController), + Container.get(SourceControlController), ]; if (isLdapEnabled()) { @@ -512,6 +509,12 @@ export class Server extends AbstractServer { ); } + if (inE2ETests) { + // eslint-disable-next-line @typescript-eslint/naming-convention + const { E2EController } = await import('./controllers/e2e.controller'); + controllers.push(Container.get(E2EController)); + } + controllers.forEach((controller) => registerController(app, config, controller)); } @@ -552,19 +555,6 @@ export class Server extends AbstractServer { `REST endpoint cannot be set to any of these values: ${ignoredEndpoints.join()} `, ); - // eslint-disable-next-line no-useless-escape - const authIgnoreRegex = new RegExp(`^\/(${ignoredEndpoints.join('|')})\/?.*$`); - - // Check for basic auth credentials if activated - if (config.getEnv('security.basicAuth.active')) { - setupBasicAuth(this.app, config, authIgnoreRegex); - } - - // Check for and validate JWT if configured - if (config.getEnv('security.jwtAuth.active')) { - setupExternalJWTAuth(this.app, config, authIgnoreRegex); - } - // ---------------------------------------- // Public API // ---------------------------------------- @@ -578,7 +568,7 @@ export class Server extends AbstractServer { this.app.use(cookieParser()); const { restEndpoint, app } = this; - setupPushHandler(restEndpoint, app, isUserManagementEnabled()); + setupPushHandler(restEndpoint, app); // Make sure that Vue history mode works properly this.app.use( @@ -600,7 +590,7 @@ export class Server extends AbstractServer { await handleLdapInit(); - this.registerControllers(ignoredEndpoints); + await this.registerControllers(ignoredEndpoints); this.app.use(`/${this.restEndpoint}/credentials`, credentialsController); diff --git a/packages/cli/src/UserManagement/UserManagementHelper.ts b/packages/cli/src/UserManagement/UserManagementHelper.ts index e299544840..1f5a3c09aa 100644 --- a/packages/cli/src/UserManagement/UserManagementHelper.ts +++ b/packages/cli/src/UserManagement/UserManagementHelper.ts @@ -12,8 +12,8 @@ import { MAX_PASSWORD_LENGTH, MIN_PASSWORD_LENGTH } from '@db/entities/User'; import type { Role } from '@db/entities/Role'; import { RoleRepository } from '@db/repositories'; import config from '@/config'; -import { getWebhookBaseUrl } from '@/WebhookHelpers'; import { License } from '@/License'; +import { getWebhookBaseUrl } from '@/WebhookHelpers'; import type { PostHogClient } from '@/posthog'; export async function getWorkflowOwner(workflowId: string): Promise { @@ -36,26 +36,8 @@ export function isEmailSetUp(): boolean { return smtp && host && user && pass; } -export function isUserManagementEnabled(): boolean { - // This can be simplified but readability is more important here - - if (config.getEnv('userManagement.isInstanceOwnerSetUp')) { - // Short circuit - if owner is set up, UM cannot be disabled. - // Users must reset their instance in order to do so. - return true; - } - - // UM is disabled for desktop by default - if (config.getEnv('deployment.type').startsWith('desktop_')) { - return false; - } - - return config.getEnv('userManagement.disabled') ? false : true; -} - export function isSharingEnabled(): boolean { - const license = Container.get(License); - return isUserManagementEnabled() && license.isSharingEnabled(); + return Container.get(License).isSharingEnabled(); } export async function getRoleId(scope: Role['scope'], name: Role['name']): Promise { diff --git a/packages/cli/src/WaitTracker.ts b/packages/cli/src/WaitTracker.ts index 4d01e5b524..8af5074828 100644 --- a/packages/cli/src/WaitTracker.ts +++ b/packages/cli/src/WaitTracker.ts @@ -8,7 +8,7 @@ import { LoggerProxy as Logger, WorkflowOperationError, } from 'n8n-workflow'; -import Container, { Service } from 'typedi'; +import { Container, Service } from 'typedi'; import type { FindManyOptions, ObjectLiteral } from 'typeorm'; import { Not, LessThanOrEqual } from 'typeorm'; import { DateUtils } from 'typeorm/util/DateUtils'; @@ -23,8 +23,8 @@ import type { import { WorkflowRunner } from '@/WorkflowRunner'; import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper'; import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEventBus/recoverEvents'; -import { ExecutionRepository } from './databases/repositories'; -import type { ExecutionEntity } from './databases/entities/ExecutionEntity'; +import { ExecutionRepository } from '@db/repositories'; +import type { ExecutionEntity } from '@db/entities/ExecutionEntity'; @Service() export class WaitTracker { diff --git a/packages/cli/src/WaitingWebhooks.ts b/packages/cli/src/WaitingWebhooks.ts index bd2a6d5388..7d66fda525 100644 --- a/packages/cli/src/WaitingWebhooks.ts +++ b/packages/cli/src/WaitingWebhooks.ts @@ -12,7 +12,7 @@ import { NodeTypes } from '@/NodeTypes'; import type { IExecutionResponse, IResponseCallbackData, IWorkflowDb } from '@/Interfaces'; import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper'; -import { ExecutionRepository } from './databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; @Service() export class WaitingWebhooks { @@ -106,13 +106,13 @@ export class WaitingWebhooks { workflow, workflow.getNode(lastNodeExecuted) as INode, additionalData, - ).filter((webhook) => { + ).find((webhook) => { return ( webhook.httpMethod === httpMethod && webhook.path === path && webhook.webhookDescription.restartWebhook === true ); - })[0]; + }); if (webhookData === undefined) { // If no data got found it means that the execution can not be started via a webhook. diff --git a/packages/cli/src/WebhookHelpers.ts b/packages/cli/src/WebhookHelpers.ts index eac808d678..3d007cf574 100644 --- a/packages/cli/src/WebhookHelpers.ts +++ b/packages/cli/src/WebhookHelpers.ts @@ -697,20 +697,11 @@ export async function executeWebhook( /** * Returns the base URL of the webhooks - * */ export function getWebhookBaseUrl() { - let urlBaseWebhook = GenericHelpers.getBaseUrl(); - - // We renamed WEBHOOK_TUNNEL_URL to WEBHOOK_URL. This is here to maintain - // backward compatibility. Will be deprecated and removed in the future. - if (process.env.WEBHOOK_TUNNEL_URL !== undefined || process.env.WEBHOOK_URL !== undefined) { - // @ts-ignore - urlBaseWebhook = process.env.WEBHOOK_TUNNEL_URL || process.env.WEBHOOK_URL; - } + let urlBaseWebhook = process.env.WEBHOOK_URL ?? GenericHelpers.getBaseUrl(); if (!urlBaseWebhook.endsWith('/')) { urlBaseWebhook += '/'; } - return urlBaseWebhook; } diff --git a/packages/cli/src/WorkflowExecuteAdditionalData.ts b/packages/cli/src/WorkflowExecuteAdditionalData.ts index 7ea2556438..b55d8a039f 100644 --- a/packages/cli/src/WorkflowExecuteAdditionalData.ts +++ b/packages/cli/src/WorkflowExecuteAdditionalData.ts @@ -70,7 +70,7 @@ import { WorkflowsService } from './workflows/workflows.services'; import { Container } from 'typedi'; import { InternalHooks } from '@/InternalHooks'; import type { ExecutionMetadata } from '@db/entities/ExecutionMetadata'; -import { ExecutionRepository } from './databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType'); @@ -1179,6 +1179,7 @@ export async function getBase( executeWorkflow, restApiUrl: urlBaseWebhook + config.getEnv('endpoints.rest'), timezone, + instanceBaseUrl: urlBaseWebhook, webhookBaseUrl, webhookWaitingBaseUrl, webhookTestBaseUrl, diff --git a/packages/cli/src/WorkflowHelpers.ts b/packages/cli/src/WorkflowHelpers.ts index abdc2b011e..f71f026e30 100644 --- a/packages/cli/src/WorkflowHelpers.ts +++ b/packages/cli/src/WorkflowHelpers.ts @@ -35,8 +35,8 @@ import omit from 'lodash/omit'; import { PermissionChecker } from './UserManagement/PermissionChecker'; import { isWorkflowIdValid } from './utils'; import { UserService } from './user/user.service'; -import type { SharedWorkflow } from './databases/entities/SharedWorkflow'; -import type { RoleNames } from './databases/entities/Role'; +import type { SharedWorkflow } from '@db/entities/SharedWorkflow'; +import type { RoleNames } from '@db/entities/Role'; const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType'); diff --git a/packages/cli/src/WorkflowRunner.ts b/packages/cli/src/WorkflowRunner.ts index 0f636fc05f..8a22a27b3d 100644 --- a/packages/cli/src/WorkflowRunner.ts +++ b/packages/cli/src/WorkflowRunner.ts @@ -54,7 +54,7 @@ import { eventBus } from './eventbus'; import { recoverExecutionDataFromEventLogMessages } from './eventbus/MessageEventBus/recoverEvents'; import { Container } from 'typedi'; import { InternalHooks } from './InternalHooks'; -import { ExecutionRepository } from './databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; export class WorkflowRunner { activeExecutions: ActiveExecutions; diff --git a/packages/cli/src/api/e2e.api.ts b/packages/cli/src/api/e2e.api.ts deleted file mode 100644 index efecd7c34c..0000000000 --- a/packages/cli/src/api/e2e.api.ts +++ /dev/null @@ -1,158 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unsafe-return */ -/* eslint-disable @typescript-eslint/no-unsafe-call */ -/* eslint-disable @typescript-eslint/no-unsafe-member-access */ -/* eslint-disable @typescript-eslint/no-unsafe-argument */ -/* eslint-disable @typescript-eslint/no-unsafe-assignment */ -/* eslint-disable @typescript-eslint/naming-convention */ -import { Router } from 'express'; -import type { Request } from 'express'; -import bodyParser from 'body-parser'; -import { v4 as uuid } from 'uuid'; -import { Container } from 'typedi'; -import config from '@/config'; -import * as Db from '@/Db'; -import type { Role } from '@db/entities/Role'; -import { RoleRepository } from '@db/repositories'; -import { hashPassword } from '@/UserManagement/UserManagementHelper'; -import { eventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; -import { License } from '../License'; -import { LICENSE_FEATURES } from '@/constants'; - -if (process.env.E2E_TESTS !== 'true') { - console.error('E2E endpoints only allowed during E2E tests'); - process.exit(1); -} - -const enabledFeatures = { - [LICENSE_FEATURES.SHARING]: true, //default to true here instead of setting it in config/index.ts for e2e - [LICENSE_FEATURES.LDAP]: false, - [LICENSE_FEATURES.SAML]: false, - [LICENSE_FEATURES.LOG_STREAMING]: false, - [LICENSE_FEATURES.ADVANCED_EXECUTION_FILTERS]: false, - [LICENSE_FEATURES.SOURCE_CONTROL]: false, -}; - -type Feature = keyof typeof enabledFeatures; - -Container.get(License).isFeatureEnabled = (feature: Feature) => enabledFeatures[feature] ?? false; - -const tablesToTruncate = [ - 'auth_identity', - 'auth_provider_sync_history', - 'event_destinations', - 'shared_workflow', - 'shared_credentials', - 'webhook_entity', - 'workflows_tags', - 'credentials_entity', - 'tag_entity', - 'workflow_statistics', - 'workflow_entity', - 'execution_entity', - 'settings', - 'installed_packages', - 'installed_nodes', - 'user', - 'role', - 'variables', -]; - -const truncateAll = async () => { - const connection = Db.getConnection(); - - for (const table of tablesToTruncate) { - try { - await connection.query( - `DELETE FROM ${table}; DELETE FROM sqlite_sequence WHERE name=${table};`, - ); - } catch (error) { - console.warn('Dropping Table for E2E Reset error: ', error); - } - } -}; - -const setupUserManagement = async () => { - const connection = Db.getConnection(); - await connection.query('INSERT INTO role (name, scope) VALUES ("owner", "global");'); - const instanceOwnerRole = (await connection.query( - 'SELECT last_insert_rowid() as insertId', - )) as Array<{ insertId: number }>; - - const roles: Array<[Role['name'], Role['scope']]> = [ - ['member', 'global'], - ['owner', 'workflow'], - ['owner', 'credential'], - ['user', 'credential'], - ['editor', 'workflow'], - ]; - - await Promise.all( - roles.map(async ([name, scope]) => - connection.query(`INSERT INTO role (name, scope) VALUES ("${name}", "${scope}");`), - ), - ); - await connection.query( - `INSERT INTO user (id, globalRoleId) values ("${uuid()}", ${instanceOwnerRole[0].insertId})`, - ); - await connection.query( - "INSERT INTO \"settings\" (key, value, loadOnStartup) values ('userManagement.isInstanceOwnerSetUp', 'false', true), ('userManagement.skipInstanceOwnerSetup', 'false', true)", - ); - - config.set('userManagement.isInstanceOwnerSetUp', false); -}; - -const resetLogStreaming = async () => { - enabledFeatures[LICENSE_FEATURES.LOG_STREAMING] = false; - for (const id in eventBus.destinations) { - await eventBus.removeDestination(id); - } -}; - -export const e2eController = Router(); - -e2eController.post('/db/reset', async (req, res) => { - await resetLogStreaming(); - await truncateAll(); - await setupUserManagement(); - - res.writeHead(204).end(); -}); - -e2eController.post('/db/setup-owner', bodyParser.json(), async (req, res) => { - if (config.get('userManagement.isInstanceOwnerSetUp')) { - res.writeHead(500).send({ error: 'Owner already setup' }); - return; - } - - const globalRole = await Container.get(RoleRepository).findGlobalOwnerRoleOrFail(); - - const owner = await Db.collections.User.findOneByOrFail({ globalRoleId: globalRole.id }); - - await Db.collections.User.update(owner.id, { - email: req.body.email, - password: await hashPassword(req.body.password), - firstName: req.body.firstName, - lastName: req.body.lastName, - }); - - await Db.collections.Settings.update( - { key: 'userManagement.isInstanceOwnerSetUp' }, - { value: 'true' }, - ); - - config.set('userManagement.isInstanceOwnerSetUp', true); - - res.writeHead(204).end(); -}); - -e2eController.patch( - '/feature/:feature', - bodyParser.json(), - async (req: Request<{ feature: Feature }>, res) => { - const { feature } = req.params; - const { enabled } = req.body; - - enabledFeatures[feature] = enabled === undefined || enabled === true; - res.writeHead(204).end(); - }, -); diff --git a/packages/cli/src/audit/constants.ts b/packages/cli/src/audit/constants.ts index dae6fd44c5..9c2658158d 100644 --- a/packages/cli/src/audit/constants.ts +++ b/packages/cli/src/audit/constants.ts @@ -116,6 +116,4 @@ export const DB_QUERY_PARAMS_DOCS_URL = export const COMMUNITY_NODES_RISKS_URL = 'https://docs.n8n.io/integrations/community-nodes/risks'; -export const SELF_HOSTED_AUTH_DOCS_URL = 'https://docs.n8n.io/hosting/authentication'; - export const NPM_PACKAGE_URL = 'https://www.npmjs.com/package'; diff --git a/packages/cli/src/audit/risks/credentials.risk.ts b/packages/cli/src/audit/risks/credentials.risk.ts index 7e0898b0c4..80d145e750 100644 --- a/packages/cli/src/audit/risks/credentials.risk.ts +++ b/packages/cli/src/audit/risks/credentials.risk.ts @@ -1,13 +1,13 @@ import type { FindOperator } from 'typeorm'; import { MoreThanOrEqual } from 'typeorm'; import { DateUtils } from 'typeorm/util/DateUtils'; +import { Container } from 'typedi'; import * as Db from '@/Db'; import config from '@/config'; import { CREDENTIALS_REPORT } from '@/audit/constants'; import type { WorkflowEntity } from '@db/entities/WorkflowEntity'; import type { Risk } from '@/audit/types'; -import Container from 'typedi'; -import { ExecutionRepository } from '@/databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; async function getAllCredsInUse(workflows: WorkflowEntity[]) { const credsInAnyUse = new Set(); diff --git a/packages/cli/src/audit/risks/instance.risk.ts b/packages/cli/src/audit/risks/instance.risk.ts index 6d75acbd98..7465e6228d 100644 --- a/packages/cli/src/audit/risks/instance.risk.ts +++ b/packages/cli/src/audit/risks/instance.risk.ts @@ -4,7 +4,6 @@ import config from '@/config'; import { toFlaggedNode } from '@/audit/utils'; import { separate } from '@/utils'; import { - SELF_HOSTED_AUTH_DOCS_URL, ENV_VARS_DOCS_URL, INSTANCE_REPORT, WEBHOOK_NODE_TYPE, @@ -18,31 +17,17 @@ import { isApiEnabled } from '@/PublicApi'; function getSecuritySettings() { if (config.getEnv('deployment.type') === 'cloud') return null; - const userManagementEnabled = !config.getEnv('userManagement.disabled'); - const basicAuthActive = config.getEnv('security.basicAuth.active'); - const jwtAuthActive = config.getEnv('security.jwtAuth.active'); - - const isInstancePubliclyAccessible = !userManagementEnabled && !basicAuthActive && !jwtAuthActive; - const settings: Record = {}; - if (isInstancePubliclyAccessible) { - settings.publiclyAccessibleInstance = - 'Important! Your n8n instance is publicly accessible. Any third party who knows your instance URL can access your data.'.toUpperCase(); - } - settings.features = { communityPackagesEnabled: config.getEnv('nodes.communityPackages.enabled'), versionNotificationsEnabled: config.getEnv('versionNotifications.enabled'), templatesEnabled: config.getEnv('templates.enabled'), publicApiEnabled: isApiEnabled(), - userManagementEnabled, }; settings.auth = { authExcludeEndpoints: config.getEnv('security.excludeEndpoints') || 'none', - basicAuthActive, - jwtAuthActive, }; settings.nodes = { @@ -207,12 +192,7 @@ export async function reportInstanceRisk(workflows: WorkflowEntity[]) { report.sections.push({ title: INSTANCE_REPORT.SECTIONS.SECURITY_SETTINGS, description: 'This n8n instance has the following security settings.', - recommendation: securitySettings.publiclyAccessibleInstance - ? [ - 'Important! Your n8n instance is publicly accessible. Set up user management or basic/JWT auth to protect access to your n8n instance.'.toUpperCase(), - `See: ${SELF_HOSTED_AUTH_DOCS_URL}`, - ].join(' ') - : `Consider adjusting the security settings for your n8n instance based on your needs. See: ${ENV_VARS_DOCS_URL}`, + recommendation: `Consider adjusting the security settings for your n8n instance based on your needs. See: ${ENV_VARS_DOCS_URL}`, settings: securitySettings, }); } diff --git a/packages/cli/src/auth/jwt.ts b/packages/cli/src/auth/jwt.ts index 5a1db63f03..1fb305cfe6 100644 --- a/packages/cli/src/auth/jwt.ts +++ b/packages/cli/src/auth/jwt.ts @@ -4,15 +4,18 @@ import jwt from 'jsonwebtoken'; import type { Response } from 'express'; import { createHash } from 'crypto'; import * as Db from '@/Db'; -import { AUTH_COOKIE_NAME } from '@/constants'; +import { AUTH_COOKIE_NAME, RESPONSE_ERROR_MESSAGES } from '@/constants'; import type { JwtPayload, JwtToken } from '@/Interfaces'; import type { User } from '@db/entities/User'; import config from '@/config'; import * as ResponseHelper from '@/ResponseHelper'; +import { License } from '@/License'; +import { Container } from 'typedi'; export function issueJWT(user: User): JwtToken { const { id, email, password } = user; const expiresIn = 7 * 86400000; // 7 days + const isWithinUsersLimit = Container.get(License).isWithinUsersLimit(); const payload: JwtPayload = { id, @@ -20,6 +23,13 @@ export function issueJWT(user: User): JwtToken { password: password ?? null, }; + if ( + config.getEnv('userManagement.isInstanceOwnerSetUp') && + !user.isOwner && + !isWithinUsersLimit + ) { + throw new ResponseHelper.UnauthorizedError(RESPONSE_ERROR_MESSAGES.USERS_QUOTA_REACHED); + } if (password) { payload.password = createHash('sha256') .update(password.slice(password.length / 2)) diff --git a/packages/cli/src/commands/BaseCommand.ts b/packages/cli/src/commands/BaseCommand.ts index 9b190542d5..d748d06f65 100644 --- a/packages/cli/src/commands/BaseCommand.ts +++ b/packages/cli/src/commands/BaseCommand.ts @@ -9,7 +9,7 @@ import { getLogger } from '@/Logger'; import config from '@/config'; import * as Db from '@/Db'; import * as CrashJournal from '@/CrashJournal'; -import { USER_MANAGEMENT_DOCS_URL, inTest } from '@/constants'; +import { inTest } from '@/constants'; import { CredentialTypes } from '@/CredentialTypes'; import { CredentialsOverwrites } from '@/CredentialsOverwrites'; import { initErrorHandling } from '@/ErrorReporting'; @@ -65,11 +65,6 @@ export abstract class BaseCommand extends Command { this.exitWithCrash('There was an error running database migrations', error), ); - if (process.env.WEBHOOK_TUNNEL_URL) { - LoggerProxy.warn( - 'You are still using the WEBHOOK_TUNNEL_URL environment variable. It has been deprecated and will be removed in a future version of n8n. Please switch to using WEBHOOK_URL instead.', - ); - } const dbType = config.getEnv('database.type'); if (['mysqldb', 'mariadb'].includes(dbType)) { @@ -83,24 +78,6 @@ export abstract class BaseCommand extends Command { ); } - if (process.env.N8N_BASIC_AUTH_ACTIVE === 'true') { - LoggerProxy.warn( - `Basic auth has been deprecated and will be removed in a future version of n8n. For authentication, please consider User Management. To learn more: ${USER_MANAGEMENT_DOCS_URL}`, - ); - } - - if (process.env.N8N_JWT_AUTH_ACTIVE === 'true') { - LoggerProxy.warn( - `JWT auth has been deprecated and will be removed in a future version of n8n. For authentication, please consider User Management. To learn more: ${USER_MANAGEMENT_DOCS_URL}`, - ); - } - - if (process.env.N8N_USER_MANAGEMENT_DISABLED === 'true') { - LoggerProxy.warn( - `User Management will be mandatory in a future version of n8n. Please set up the instance owner. To learn more: ${USER_MANAGEMENT_DOCS_URL}`, - ); - } - this.instanceId = this.userSettings.instanceId ?? ''; await Container.get(PostHogClient).init(this.instanceId); await Container.get(InternalHooks).init(this.instanceId); diff --git a/packages/cli/src/commands/executeBatch.ts b/packages/cli/src/commands/executeBatch.ts index a71f9d2d2e..df34d32392 100644 --- a/packages/cli/src/commands/executeBatch.ts +++ b/packages/cli/src/commands/executeBatch.ts @@ -2,8 +2,8 @@ import fs from 'fs'; import os from 'os'; import { flags } from '@oclif/command'; -import type { ITaskData } from 'n8n-workflow'; -import { sleep } from 'n8n-workflow'; +import type { IRun, ITaskData } from 'n8n-workflow'; +import { jsonParse, sleep } from 'n8n-workflow'; import { sep } from 'path'; import { diff } from 'json-diff'; import pick from 'lodash/pick'; @@ -18,6 +18,13 @@ import { findCliWorkflowStart } from '@/utils'; import { initEvents } from '@/events'; import { BaseCommand } from './BaseCommand'; import { Container } from 'typedi'; +import type { + IExecutionResult, + INodeSpecialCase, + INodeSpecialCases, + IResult, + IWorkflowExecutionProgress, +} from '../types/commands.types'; const re = /\d+/; @@ -778,8 +785,9 @@ export class ExecuteBatch extends BaseCommand { }${workflowData.id}-snapshot.json`; if (fs.existsSync(fileName)) { const contents = fs.readFileSync(fileName, { encoding: 'utf-8' }); - - const changes = diff(JSON.parse(contents), data, { keysOnly: true }); + const expected = jsonParse(contents); + const received = jsonParse(serializedData); + const changes = diff(expected, received, { keysOnly: true }) as object; if (changes !== undefined) { // If we had only additions with no removals diff --git a/packages/cli/src/commands/import/workflow.ts b/packages/cli/src/commands/import/workflow.ts index 4ac3c9a6ba..b2b1969328 100644 --- a/packages/cli/src/commands/import/workflow.ts +++ b/packages/cli/src/commands/import/workflow.ts @@ -17,7 +17,7 @@ import { disableAutoGeneratedIds } from '@db/utils/commandHelpers'; import type { ICredentialsDb, IWorkflowToImport } from '@/Interfaces'; import { replaceInvalidCredentials } from '@/WorkflowHelpers'; import { BaseCommand, UM_FIX_INSTRUCTION } from '../BaseCommand'; -import { generateNanoId } from '@/databases/utils/generators'; +import { generateNanoId } from '@db/utils/generators'; function assertHasWorkflowsToImport(workflows: unknown): asserts workflows is IWorkflowToImport[] { if (!Array.isArray(workflows)) { diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index 30f76cf72a..1b203c7c4e 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -338,12 +338,6 @@ export class Start extends BaseCommand { const editorUrl = GenericHelpers.getBaseUrl(); this.log(`\nEditor is now accessible via:\n${editorUrl}`); - const saveManualExecutions = config.getEnv('executions.saveDataManualExecutions'); - - if (saveManualExecutions) { - this.log('\nManual executions will be visible only for the owner'); - } - // Allow to open n8n editor by pressing "o" if (Boolean(process.stdout.isTTY) && process.stdin.setRawMode) { process.stdin.setRawMode(true); diff --git a/packages/cli/src/commands/user-management/reset.ts b/packages/cli/src/commands/user-management/reset.ts index 66c8de0117..eec5e1784a 100644 --- a/packages/cli/src/commands/user-management/reset.ts +++ b/packages/cli/src/commands/user-management/reset.ts @@ -56,10 +56,6 @@ export class Reset extends BaseCommand { { key: 'userManagement.isInstanceOwnerSetUp' }, { value: 'false' }, ); - await Db.collections.Settings.update( - { key: 'userManagement.skipInstanceOwnerSetup' }, - { value: 'false' }, - ); this.logger.info('Successfully reset the database to default user state.'); } diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index f346277cbd..e50fa29118 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -22,7 +22,7 @@ import { getWorkflowOwner } from '@/UserManagement/UserManagementHelper'; import { generateFailedExecutionFromError } from '@/WorkflowHelpers'; import { N8N_VERSION } from '@/constants'; import { BaseCommand } from './BaseCommand'; -import { ExecutionRepository } from '@/databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; export class Worker extends BaseCommand { static description = '\nStarts a n8n worker'; diff --git a/packages/cli/src/config/index.ts b/packages/cli/src/config/index.ts index e75db78b7a..5b250b572e 100644 --- a/packages/cli/src/config/index.ts +++ b/packages/cli/src/config/index.ts @@ -18,7 +18,6 @@ if (inE2ETests) { N8N_PUBLIC_API_DISABLED: 'true', EXTERNAL_FRONTEND_HOOKS_URLS: '', N8N_PERSONALIZATION_ENABLED: 'false', - NODE_FUNCTION_ALLOW_EXTERNAL: 'node-fetch', }; } else if (inTest) { const testsDir = join(tmpdir(), 'n8n-tests/'); diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index c25db2b255..e6aedf8369 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -4,7 +4,6 @@ import path from 'path'; import convict from 'convict'; import { UserSettings } from 'n8n-core'; import { jsonParse } from 'n8n-workflow'; -import { IS_V1_RELEASE } from '@/constants'; convict.addFormat({ name: 'nodes-list', @@ -225,13 +224,12 @@ export const schema = { }, executions: { - // By default workflows get always executed in their own process. - // If this option gets set to "main" it will run them in the - // main-process instead. + // By default workflows get always executed in the main process. + // TODO: remove this and all usage of `executions.process` when `own` mode is deleted process: { - doc: 'In what process workflows should be executed. Note: Own mode has been deprecated and will be removed in a future version as well as this setting.', + doc: 'In what process workflows should be executed.', format: ['main', 'own'] as const, - default: IS_V1_RELEASE ? 'main' : 'own', + default: 'main', env: 'EXECUTIONS_PROCESS', }, @@ -299,7 +297,7 @@ export const schema = { saveDataManualExecutions: { doc: 'Save data of executions when started manually via editor', format: 'Boolean', - default: false, + default: true, env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS', }, @@ -311,7 +309,7 @@ export const schema = { pruneData: { doc: 'Delete data of past executions on a rolling basis', format: 'Boolean', - default: false, + default: true, env: 'EXECUTIONS_DATA_PRUNE', }, pruneDataMaxAge: { @@ -329,11 +327,11 @@ export const schema = { // Additional pruning option to delete executions if total count exceeds the configured max. // Deletes the oldest entries first - // Default is 0 = No limit + // Set to 0 for No limit pruneDataMaxCount: { - doc: 'Maximum number of executions to keep in DB. Default 0 = no limit', + doc: 'Maximum number of executions to keep in DB. 0 = no limit', format: Number, - default: 0, + default: 10000, env: 'EXECUTIONS_DATA_PRUNE_MAX_COUNT', }, }, @@ -494,82 +492,6 @@ export const schema = { default: '', env: 'N8N_AUTH_EXCLUDE_ENDPOINTS', }, - basicAuth: { - active: { - format: 'Boolean', - default: false, - env: 'N8N_BASIC_AUTH_ACTIVE', - doc: '[DEPRECATED] If basic auth should be activated for editor and REST-API', - }, - user: { - format: String, - default: '', - env: 'N8N_BASIC_AUTH_USER', - doc: '[DEPRECATED] The name of the basic auth user', - }, - password: { - format: String, - default: '', - env: 'N8N_BASIC_AUTH_PASSWORD', - doc: '[DEPRECATED] The password of the basic auth user', - }, - hash: { - format: 'Boolean', - default: false, - env: 'N8N_BASIC_AUTH_HASH', - doc: '[DEPRECATED] If password for basic auth is hashed', - }, - }, - jwtAuth: { - active: { - format: 'Boolean', - default: false, - env: 'N8N_JWT_AUTH_ACTIVE', - doc: '[DEPRECATED] If JWT auth should be activated for editor and REST-API', - }, - jwtHeader: { - format: String, - default: '', - env: 'N8N_JWT_AUTH_HEADER', - doc: '[DEPRECATED] The request header containing a signed JWT', - }, - jwtHeaderValuePrefix: { - format: String, - default: '', - env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX', - doc: '[DEPRECATED] The request header value prefix to strip (optional)', - }, - jwksUri: { - format: String, - default: '', - env: 'N8N_JWKS_URI', - doc: '[DEPRECATED] The URI to fetch JWK Set for JWT authentication', - }, - jwtIssuer: { - format: String, - default: '', - env: 'N8N_JWT_ISSUER', - doc: '[DEPRECATED] JWT issuer to expect (optional)', - }, - jwtNamespace: { - format: String, - default: '', - env: 'N8N_JWT_NAMESPACE', - doc: '[DEPRECATED] JWT namespace to expect (optional)', - }, - jwtAllowedTenantKey: { - format: String, - default: '', - env: 'N8N_JWT_ALLOWED_TENANT_KEY', - doc: '[DEPRECATED] JWT tenant key name to inspect within JWT namespace (optional)', - }, - jwtAllowedTenant: { - format: String, - default: '', - env: 'N8N_JWT_ALLOWED_TENANT', - doc: '[DEPRECATED] JWT tenant to allow (optional)', - }, - }, }, endpoints: { @@ -728,12 +650,6 @@ export const schema = { }, userManagement: { - disabled: { - doc: '[DEPRECATED] Disable user management and hide it completely.', - format: Boolean, - default: false, - env: 'N8N_USER_MANAGEMENT_DISABLED', - }, jwtSecret: { doc: 'Set a specific JWT secret (optional - n8n can generate one)', // Generated @ start.ts format: String, @@ -746,12 +662,6 @@ export const schema = { format: Boolean, default: false, }, - skipInstanceOwnerSetup: { - // n8n loads this setting from DB on startup - doc: 'Whether to hide the prompt the first time n8n starts with UM enabled', - format: Boolean, - default: false, - }, emails: { mode: { doc: 'How to send emails', @@ -944,7 +854,7 @@ export const schema = { push: { backend: { format: ['sse', 'websocket'] as const, - default: IS_V1_RELEASE ? 'websocket' : 'sse', + default: 'websocket', env: 'N8N_PUSH_BACKEND', doc: 'Backend to use for push notifications', }, diff --git a/packages/cli/src/config/types.d.ts b/packages/cli/src/config/types.ts similarity index 98% rename from packages/cli/src/config/types.d.ts rename to packages/cli/src/config/types.ts index 4dfd71a6b7..28dee1e73f 100644 --- a/packages/cli/src/config/types.d.ts +++ b/packages/cli/src/config/types.ts @@ -80,7 +80,7 @@ type ExceptionPaths = { 'nodes.exclude': string[] | undefined; 'nodes.include': string[] | undefined; 'userManagement.isInstanceOwnerSetUp': boolean; - 'userManagement.skipInstanceOwnerSetup': boolean; + 'ui.banners.dismissed': string[] | undefined; }; // ----------------------------------- diff --git a/packages/cli/src/constants.ts b/packages/cli/src/constants.ts index ec00862f0c..a871d7e5ed 100644 --- a/packages/cli/src/constants.ts +++ b/packages/cli/src/constants.ts @@ -3,7 +3,6 @@ /* eslint-disable @typescript-eslint/naming-convention */ import { readFileSync } from 'fs'; import { resolve, join, dirname } from 'path'; -import { major } from 'semver'; import type { n8n } from 'n8n-core'; import { RESPONSE_ERROR_MESSAGES as CORE_RESPONSE_ERROR_MESSAGES, UserSettings } from 'n8n-core'; import { jsonParse } from 'n8n-workflow'; @@ -30,7 +29,6 @@ export function getN8nPackageJson() { export const START_NODES = ['n8n-nodes-base.start', 'n8n-nodes-base.manualTrigger']; export const N8N_VERSION = getN8nPackageJson().version; -export const IS_V1_RELEASE = major(N8N_VERSION) > 0; export const NODE_PACKAGE_PREFIX = 'n8n-nodes-'; @@ -49,6 +47,7 @@ export const RESPONSE_ERROR_MESSAGES = { PACKAGE_DOES_NOT_CONTAIN_NODES: 'The specified package does not contain any nodes', PACKAGE_LOADING_FAILED: 'The specified package could not be loaded', DISK_IS_FULL: 'There appears to be insufficient disk space', + USERS_QUOTA_REACHED: 'Maximum number of users reached', }; export const AUTH_COOKIE_NAME = 'n8n-auth'; @@ -70,24 +69,22 @@ export const WORKFLOW_REACTIVATE_MAX_TIMEOUT = 24 * 60 * 60 * 1000; // 1 day export const SETTINGS_LICENSE_CERT_KEY = 'license.cert'; -export const enum LICENSE_FEATURES { - SHARING = 'feat:sharing', - LDAP = 'feat:ldap', - SAML = 'feat:saml', - LOG_STREAMING = 'feat:logStreaming', - ADVANCED_EXECUTION_FILTERS = 'feat:advancedExecutionFilters', - VARIABLES = 'feat:variables', - SOURCE_CONTROL = 'feat:sourceControl', - API_DISABLED = 'feat:apiDisabled', -} +export const LICENSE_FEATURES = { + SHARING: 'feat:sharing', + LDAP: 'feat:ldap', + SAML: 'feat:saml', + LOG_STREAMING: 'feat:logStreaming', + ADVANCED_EXECUTION_FILTERS: 'feat:advancedExecutionFilters', + VARIABLES: 'feat:variables', + SOURCE_CONTROL: 'feat:sourceControl', + API_DISABLED: 'feat:apiDisabled', +} as const; -export const enum LICENSE_QUOTAS { - TRIGGER_LIMIT = 'quota:activeWorkflows', - VARIABLES_LIMIT = 'quota:maxVariables', - USERS_LIMIT = 'quota:users', -} +export const LICENSE_QUOTAS = { + TRIGGER_LIMIT: 'quota:activeWorkflows', + VARIABLES_LIMIT: 'quota:maxVariables', + USERS_LIMIT: 'quota:users', +} as const; +export const UNLIMITED_LICENSE_QUOTA = -1; export const CREDENTIAL_BLANKING_VALUE = '__n8n_BLANK_VALUE_e5362baf-c777-4d57-a609-6eaf1f9e87f6'; - -export const USER_MANAGEMENT_DOCS_URL = - 'https://docs.n8n.io/hosting/authentication/user-management-self-hosted'; diff --git a/packages/cli/src/controllers/auth.controller.ts b/packages/cli/src/controllers/auth.controller.ts index 5ae9fe0d7a..dc43ff167a 100644 --- a/packages/cli/src/controllers/auth.controller.ts +++ b/packages/cli/src/controllers/auth.controller.ts @@ -1,14 +1,20 @@ import validator from 'validator'; +import { In } from 'typeorm'; +import { Container } from 'typedi'; import { Authorized, Get, Post, RestController } from '@/decorators'; -import { AuthError, BadRequestError, InternalServerError } from '@/ResponseHelper'; +import { + AuthError, + BadRequestError, + InternalServerError, + UnauthorizedError, +} from '@/ResponseHelper'; import { sanitizeUser, withFeatureFlags } from '@/UserManagement/UserManagementHelper'; import { issueCookie, resolveJwt } from '@/auth/jwt'; -import { AUTH_COOKIE_NAME } from '@/constants'; +import { AUTH_COOKIE_NAME, RESPONSE_ERROR_MESSAGES } from '@/constants'; import { Request, Response } from 'express'; import type { ILogger } from 'n8n-workflow'; import type { User } from '@db/entities/User'; import { LoginRequest, UserRequest } from '@/requests'; -import { In } from 'typeorm'; import type { Config } from '@/config'; import type { PublicUser, @@ -25,7 +31,7 @@ import { } from '@/sso/ssoHelpers'; import type { UserRepository } from '@db/repositories'; import { InternalHooks } from '../InternalHooks'; -import Container from 'typedi'; +import { License } from '@/License'; @RestController() export class AuthController { @@ -71,7 +77,6 @@ export class AuthController { let user: User | undefined; let usedAuthenticationMethod = getCurrentAuthenticationMethod(); - if (isSamlCurrentAuthenticationMethod()) { // attempt to fetch user data with the credentials, but don't log in yet const preliminaryUser = await handleEmailLogin(email, password); @@ -120,6 +125,7 @@ export class AuthController { // If logged in, return user try { user = await resolveJwt(cookieContents); + return await withFeatureFlags(this.postHog, sanitizeUser(user)); } catch (error) { res.clearCookie(AUTH_COOKIE_NAME); @@ -155,6 +161,15 @@ export class AuthController { @Get('/resolve-signup-token') async resolveSignupToken(req: UserRequest.ResolveSignUp) { const { inviterId, inviteeId } = req.query; + const isWithinUsersLimit = Container.get(License).isWithinUsersLimit(); + + if (!isWithinUsersLimit) { + this.logger.debug('Request to resolve signup token failed because of users quota reached', { + inviterId, + inviteeId, + }); + throw new UnauthorizedError(RESPONSE_ERROR_MESSAGES.USERS_QUOTA_REACHED); + } if (!inviterId || !inviteeId) { this.logger.debug( diff --git a/packages/cli/src/controllers/e2e.controller.ts b/packages/cli/src/controllers/e2e.controller.ts new file mode 100644 index 0000000000..3e0a5fae3b --- /dev/null +++ b/packages/cli/src/controllers/e2e.controller.ts @@ -0,0 +1,163 @@ +import { Request } from 'express'; +import { Service } from 'typedi'; +import { v4 as uuid } from 'uuid'; +import config from '@/config'; +import type { Role } from '@db/entities/Role'; +import { RoleRepository, SettingsRepository, UserRepository } from '@db/repositories'; +import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; +import { hashPassword } from '@/UserManagement/UserManagementHelper'; +import { eventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; +import { License } from '@/License'; +import { LICENSE_FEATURES, inE2ETests } from '@/constants'; +import { NoAuthRequired, Patch, Post, RestController } from '@/decorators'; +import type { UserSetupPayload } from '@/requests'; +import type { BooleanLicenseFeature } from '@/Interfaces'; + +if (!inE2ETests) { + console.error('E2E endpoints only allowed during E2E tests'); + process.exit(1); +} + +const tablesToTruncate = [ + 'auth_identity', + 'auth_provider_sync_history', + 'event_destinations', + 'shared_workflow', + 'shared_credentials', + 'webhook_entity', + 'workflows_tags', + 'credentials_entity', + 'tag_entity', + 'workflow_statistics', + 'workflow_entity', + 'execution_entity', + 'settings', + 'installed_packages', + 'installed_nodes', + 'user', + 'role', + 'variables', +]; + +type ResetRequest = Request< + {}, + {}, + { + owner: UserSetupPayload; + members: UserSetupPayload[]; + } +>; + +@Service() +@NoAuthRequired() +@RestController('/e2e') +export class E2EController { + private enabledFeatures: Record = { + [LICENSE_FEATURES.SHARING]: false, + [LICENSE_FEATURES.LDAP]: false, + [LICENSE_FEATURES.SAML]: false, + [LICENSE_FEATURES.LOG_STREAMING]: false, + [LICENSE_FEATURES.ADVANCED_EXECUTION_FILTERS]: false, + [LICENSE_FEATURES.SOURCE_CONTROL]: false, + [LICENSE_FEATURES.VARIABLES]: false, + [LICENSE_FEATURES.API_DISABLED]: false, + }; + + constructor( + license: License, + private roleRepo: RoleRepository, + private settingsRepo: SettingsRepository, + private userRepo: UserRepository, + private workflowRunner: ActiveWorkflowRunner, + ) { + license.isFeatureEnabled = (feature: BooleanLicenseFeature) => + this.enabledFeatures[feature] ?? false; + } + + @Post('/reset') + async reset(req: ResetRequest) { + this.resetFeatures(); + await this.resetLogStreaming(); + await this.removeActiveWorkflows(); + await this.truncateAll(); + await this.setupUserManagement(req.body.owner, req.body.members); + } + + @Patch('/feature') + setFeature(req: Request<{}, {}, { feature: BooleanLicenseFeature; enabled: boolean }>) { + const { enabled, feature } = req.body; + this.enabledFeatures[feature] = enabled; + } + + private resetFeatures() { + for (const feature of Object.keys(this.enabledFeatures)) { + this.enabledFeatures[feature as BooleanLicenseFeature] = false; + } + } + + private async removeActiveWorkflows() { + this.workflowRunner.removeAllQueuedWorkflowActivations(); + await this.workflowRunner.removeAll(); + } + + private async resetLogStreaming() { + for (const id in eventBus.destinations) { + await eventBus.removeDestination(id); + } + } + + private async truncateAll() { + for (const table of tablesToTruncate) { + try { + const { connection } = this.roleRepo.manager; + await connection.query( + `DELETE FROM ${table}; DELETE FROM sqlite_sequence WHERE name=${table};`, + ); + } catch (error) { + console.warn('Dropping Table for E2E Reset error: ', error); + } + } + } + + private async setupUserManagement(owner: UserSetupPayload, members: UserSetupPayload[]) { + const roles: Array<[Role['name'], Role['scope']]> = [ + ['owner', 'global'], + ['member', 'global'], + ['owner', 'workflow'], + ['owner', 'credential'], + ['user', 'credential'], + ['editor', 'workflow'], + ]; + + const [{ id: globalOwnerRoleId }, { id: globalMemberRoleId }] = await this.roleRepo.save( + roles.map(([name, scope], index) => ({ name, scope, id: index.toString() })), + ); + + const users = []; + users.push({ + id: uuid(), + ...owner, + password: await hashPassword(owner.password), + globalRoleId: globalOwnerRoleId, + }); + for (const { password, ...payload } of members) { + users.push( + this.userRepo.create({ + id: uuid(), + ...payload, + password: await hashPassword(password), + globalRoleId: globalMemberRoleId, + }), + ); + } + + await this.userRepo.insert(users); + + await this.settingsRepo.update( + { key: 'userManagement.isInstanceOwnerSetUp' }, + { value: 'true' }, + ); + + config.set('userManagement.isInstanceOwnerSetUp', true); + } +} diff --git a/packages/cli/src/controllers/owner.controller.ts b/packages/cli/src/controllers/owner.controller.ts index ae8745a9af..4875e10ec3 100644 --- a/packages/cli/src/controllers/owner.controller.ts +++ b/packages/cli/src/controllers/owner.controller.ts @@ -1,6 +1,6 @@ import validator from 'validator'; import { validateEntity } from '@/GenericHelpers'; -import { Authorized, Get, Post, RestController } from '@/decorators'; +import { Authorized, Post, RestController } from '@/decorators'; import { BadRequestError } from '@/ResponseHelper'; import { hashPassword, @@ -13,12 +13,7 @@ import type { ILogger } from 'n8n-workflow'; import type { Config } from '@/config'; import { OwnerRequest } from '@/requests'; import type { IDatabaseCollections, IInternalHooksClass } from '@/Interfaces'; -import type { - CredentialsRepository, - SettingsRepository, - UserRepository, - WorkflowRepository, -} from '@db/repositories'; +import type { SettingsRepository, UserRepository } from '@db/repositories'; @Authorized(['global', 'owner']) @RestController('/owner') @@ -33,10 +28,6 @@ export class OwnerController { private readonly settingsRepository: SettingsRepository; - private readonly credentialsRepository: CredentialsRepository; - - private readonly workflowsRepository: WorkflowRepository; - constructor({ config, logger, @@ -46,28 +37,13 @@ export class OwnerController { config: Config; logger: ILogger; internalHooks: IInternalHooksClass; - repositories: Pick; + repositories: Pick; }) { this.config = config; this.logger = logger; this.internalHooks = internalHooks; this.userRepository = repositories.User; this.settingsRepository = repositories.Settings; - this.credentialsRepository = repositories.Credentials; - this.workflowsRepository = repositories.Workflow; - } - - @Get('/pre-setup') - async preSetup(): Promise<{ credentials: number; workflows: number }> { - if (this.config.getEnv('userManagement.isInstanceOwnerSetUp')) { - throw new BadRequestError('Instance owner already setup'); - } - - const [credentials, workflows] = await Promise.all([ - this.credentialsRepository.countBy({}), - this.workflowsRepository.countBy({}), - ]); - return { credentials, workflows }; } /** @@ -149,18 +125,10 @@ export class OwnerController { return sanitizeUser(owner); } - /** - * Persist that the instance owner setup has been skipped - */ - @Post('/skip-setup') - async skipSetup() { - await this.settingsRepository.update( - { key: 'userManagement.skipInstanceOwnerSetup' }, - { value: JSON.stringify(true) }, - ); - - this.config.set('userManagement.skipInstanceOwnerSetup', true); - - return { success: true }; + @Post('/dismiss-banner') + async dismissBanner(req: OwnerRequest.DismissBanner) { + const bannerName = 'banner' in req.body ? (req.body.banner as string) : ''; + const response = await this.settingsRepository.dismissBanner({ bannerName }); + return response; } } diff --git a/packages/cli/src/controllers/passwordReset.controller.ts b/packages/cli/src/controllers/passwordReset.controller.ts index aaaf72896d..afb4bf69a5 100644 --- a/packages/cli/src/controllers/passwordReset.controller.ts +++ b/packages/cli/src/controllers/passwordReset.controller.ts @@ -23,8 +23,11 @@ import { PasswordResetRequest } from '@/requests'; import type { IDatabaseCollections, IExternalHooksClass, IInternalHooksClass } from '@/Interfaces'; import { issueCookie } from '@/auth/jwt'; import { isLdapEnabled } from '@/Ldap/helpers'; -import { isSamlCurrentAuthenticationMethod } from '../sso/ssoHelpers'; -import { UserService } from '../user/user.service'; +import { isSamlCurrentAuthenticationMethod } from '@/sso/ssoHelpers'; +import { UserService } from '@/user/user.service'; +import { License } from '@/License'; +import { Container } from 'typedi'; +import { RESPONSE_ERROR_MESSAGES } from '@/constants'; @RestController() export class PasswordResetController { @@ -103,6 +106,12 @@ export class PasswordResetController { relations: ['authIdentities', 'globalRole'], }); + if (!user?.isOwner && !Container.get(License).isWithinUsersLimit()) { + this.logger.debug( + 'Request to send password reset email failed because the user limit was reached', + ); + throw new UnauthorizedError(RESPONSE_ERROR_MESSAGES.USERS_QUOTA_REACHED); + } if ( isSamlCurrentAuthenticationMethod() && !(user?.globalRole.name === 'owner' || user?.settings?.allowSSOManualLogin === true) @@ -116,7 +125,6 @@ export class PasswordResetController { } const ldapIdentity = user?.authIdentities?.find((i) => i.providerType === 'ldap'); - if (!user?.password || (ldapIdentity && user.disabled)) { this.logger.debug( 'Request to send password reset email failed because no user was found for the provided email', @@ -182,12 +190,21 @@ export class PasswordResetController { // Timestamp is saved in seconds const currentTimestamp = Math.floor(Date.now() / 1000); - const user = await this.userRepository.findOneBy({ - id, - resetPasswordToken, - resetPasswordTokenExpiration: MoreThanOrEqual(currentTimestamp), + const user = await this.userRepository.findOne({ + where: { + id, + resetPasswordToken, + resetPasswordTokenExpiration: MoreThanOrEqual(currentTimestamp), + }, + relations: ['globalRole'], }); - + if (!user?.isOwner && !Container.get(License).isWithinUsersLimit()) { + this.logger.debug( + 'Request to resolve password token failed because the user limit was reached', + { userId: id }, + ); + throw new UnauthorizedError(RESPONSE_ERROR_MESSAGES.USERS_QUOTA_REACHED); + } if (!user) { this.logger.debug( 'Request to resolve password token failed because no user was found for the provided user ID and reset password token', diff --git a/packages/cli/src/controllers/users.controller.ts b/packages/cli/src/controllers/users.controller.ts index e68f869968..d97aa55fd4 100644 --- a/packages/cli/src/controllers/users.controller.ts +++ b/packages/cli/src/controllers/users.controller.ts @@ -17,7 +17,12 @@ import { withFeatureFlags, } from '@/UserManagement/UserManagementHelper'; import { issueCookie } from '@/auth/jwt'; -import { BadRequestError, InternalServerError, NotFoundError } from '@/ResponseHelper'; +import { + BadRequestError, + InternalServerError, + NotFoundError, + UnauthorizedError, +} from '@/ResponseHelper'; import { Response } from 'express'; import type { Config } from '@/config'; import { UserRequest, UserSettingsUpdatePayload } from '@/requests'; @@ -32,7 +37,6 @@ import type { import type { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; import { AuthIdentity } from '@db/entities/AuthIdentity'; import type { PostHogClient } from '@/posthog'; -import { userManagementEnabledMiddleware } from '../middlewares/userManagementEnabled'; import { isSamlLicensedAndEnabled } from '../sso/saml/samlHelpers'; import type { RoleRepository, @@ -40,8 +44,11 @@ import type { SharedWorkflowRepository, UserRepository, } from '@db/repositories'; -import { UserService } from '../user/user.service'; +import { UserService } from '@/user/user.service'; import { plainToInstance } from 'class-transformer'; +import { License } from '@/License'; +import { Container } from 'typedi'; +import { RESPONSE_ERROR_MESSAGES } from '@/constants'; @Authorized(['global', 'owner']) @RestController('/users') @@ -106,8 +113,10 @@ export class UsersController { /** * Send email invite(s) to one or multiple users and create user shell(s). */ - @Post('/', { middlewares: [userManagementEnabledMiddleware] }) + @Post('/') async sendEmailInvites(req: UserRequest.Invite) { + const isWithinUsersLimit = Container.get(License).isWithinUsersLimit(); + if (isSamlLicensedAndEnabled()) { this.logger.debug( 'SAML is enabled, so users are managed by the Identity Provider and cannot be added through invites', @@ -117,6 +126,13 @@ export class UsersController { ); } + if (!isWithinUsersLimit) { + this.logger.debug( + 'Request to send email invite(s) to user(s) failed because the user limit quota has been reached', + ); + throw new UnauthorizedError(RESPONSE_ERROR_MESSAGES.USERS_QUOTA_REACHED); + } + if (!this.config.getEnv('userManagement.isInstanceOwnerSetUp')) { this.logger.debug( 'Request to send email invite(s) to user(s) failed because the owner account is not set up', @@ -552,6 +568,14 @@ export class UsersController { @Post('/:id/reinvite') async reinviteUser(req: UserRequest.Reinvite) { const { id: idToReinvite } = req.params; + const isWithinUsersLimit = Container.get(License).isWithinUsersLimit(); + + if (!isWithinUsersLimit) { + this.logger.debug( + 'Request to send email invite(s) to user(s) failed because the user limit quota has been reached', + ); + throw new UnauthorizedError(RESPONSE_ERROR_MESSAGES.USERS_QUOTA_REACHED); + } if (!isEmailSetUp()) { this.logger.error('Request to reinvite a user failed because email sending was not set up'); diff --git a/packages/cli/src/credentials/credentials.service.ts b/packages/cli/src/credentials/credentials.service.ts index 2eb0f55961..e00820a958 100644 --- a/packages/cli/src/credentials/credentials.service.ts +++ b/packages/cli/src/credentials/credentials.service.ts @@ -7,7 +7,7 @@ import type { INodeCredentialTestResult, INodeProperties, } from 'n8n-workflow'; -import { deepCopy, LoggerProxy, NodeHelpers } from 'n8n-workflow'; +import { CREDENTIAL_EMPTY_VALUE, deepCopy, LoggerProxy, NodeHelpers } from 'n8n-workflow'; import { Container } from 'typedi'; import type { FindManyOptions, FindOptionsWhere } from 'typeorm'; import { In } from 'typeorm'; @@ -300,7 +300,11 @@ export class CredentialsService { for (const dataKey of Object.keys(copiedData)) { // The frontend only cares that this value isn't falsy. if (dataKey === 'oauthTokenData') { - copiedData[dataKey] = CREDENTIAL_BLANKING_VALUE; + if (copiedData[dataKey].toString().length > 0) { + copiedData[dataKey] = CREDENTIAL_BLANKING_VALUE; + } else { + copiedData[dataKey] = CREDENTIAL_EMPTY_VALUE; + } continue; } const prop = properties.find((v) => v.name === dataKey); @@ -308,8 +312,11 @@ export class CredentialsService { continue; } if (prop.typeOptions?.password) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access - copiedData[dataKey] = CREDENTIAL_BLANKING_VALUE; + if (copiedData[dataKey].toString().length > 0) { + copiedData[dataKey] = CREDENTIAL_BLANKING_VALUE; + } else { + copiedData[dataKey] = CREDENTIAL_EMPTY_VALUE; + } } } @@ -321,7 +328,7 @@ export class CredentialsService { // eslint-disable-next-line @typescript-eslint/no-unsafe-argument for (const [key, value] of Object.entries(unmerged)) { // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - if (value === CREDENTIAL_BLANKING_VALUE) { + if (value === CREDENTIAL_BLANKING_VALUE || value === CREDENTIAL_EMPTY_VALUE) { // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access unmerged[key] = replacement[key]; } else if ( diff --git a/packages/cli/src/databases/entities/User.ts b/packages/cli/src/databases/entities/User.ts index 1aa334d489..1ee1570f8f 100644 --- a/packages/cli/src/databases/entities/User.ts +++ b/packages/cli/src/databases/entities/User.ts @@ -113,4 +113,14 @@ export class User extends AbstractEntity implements IUser { computeIsPending(): void { this.isPending = this.password === null; } + + /** + * Whether the user is instance owner + */ + isOwner: boolean; + + @AfterLoad() + computeIsOwner(): void { + this.isOwner = this.globalRole?.name === 'owner'; + } } diff --git a/packages/cli/src/databases/migrations/mysqldb/1646992772331-CreateUserManagement.ts b/packages/cli/src/databases/migrations/mysqldb/1646992772331-CreateUserManagement.ts index 89e94181d3..c227f457c3 100644 --- a/packages/cli/src/databases/migrations/mysqldb/1646992772331-CreateUserManagement.ts +++ b/packages/cli/src/databases/migrations/mysqldb/1646992772331-CreateUserManagement.ts @@ -154,6 +154,10 @@ export class CreateUserManagement1646992772331 implements ReversibleMigration { await queryRunner.query( `INSERT INTO ${tablePrefix}settings (\`key\`, value, loadOnStartup) VALUES ("userManagement.isInstanceOwnerSetUp", "false", 1), ("userManagement.skipInstanceOwnerSetup", "false", 1)`, ); + + await queryRunner.query( + `INSERT INTO ${tablePrefix}settings (\`key\`, value, loadOnStartup) VALUES ("ui.banners.dismissed", JSON_ARRAY('V1'), 1)`, + ); } async down({ queryRunner, tablePrefix }: MigrationContext) { diff --git a/packages/cli/src/databases/migrations/mysqldb/1671726148420-RemoveWorkflowDataLoadedFlag.ts b/packages/cli/src/databases/migrations/mysqldb/1671726148420-RemoveWorkflowDataLoadedFlag.ts index 6b7b05f0f5..f000a2b7ce 100644 --- a/packages/cli/src/databases/migrations/mysqldb/1671726148420-RemoveWorkflowDataLoadedFlag.ts +++ b/packages/cli/src/databases/migrations/mysqldb/1671726148420-RemoveWorkflowDataLoadedFlag.ts @@ -1,5 +1,5 @@ import type { MigrationContext, ReversibleMigration } from '@db/types'; -import { StatisticsNames } from '@/databases/entities/WorkflowStatistics'; +import { StatisticsNames } from '@db/entities/WorkflowStatistics'; export class RemoveWorkflowDataLoadedFlag1671726148420 implements ReversibleMigration { async up({ queryRunner, tablePrefix }: MigrationContext) { diff --git a/packages/cli/src/databases/migrations/mysqldb/1681134145997-RemoveSkipOwnerSetup.ts b/packages/cli/src/databases/migrations/mysqldb/1681134145997-RemoveSkipOwnerSetup.ts new file mode 100644 index 0000000000..c756b5839a --- /dev/null +++ b/packages/cli/src/databases/migrations/mysqldb/1681134145997-RemoveSkipOwnerSetup.ts @@ -0,0 +1,9 @@ +import type { IrreversibleMigration, MigrationContext } from '@db/types'; + +export class RemoveSkipOwnerSetup1681134145997 implements IrreversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `DELETE FROM ${tablePrefix}settings WHERE \`key\` = 'userManagement.skipInstanceOwnerSetup';`, + ); + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/1690000000030-SeparateExecutionData.ts b/packages/cli/src/databases/migrations/mysqldb/1690000000030-SeparateExecutionData.ts index 7bf58e9c7d..cb56cf7b63 100644 --- a/packages/cli/src/databases/migrations/mysqldb/1690000000030-SeparateExecutionData.ts +++ b/packages/cli/src/databases/migrations/mysqldb/1690000000030-SeparateExecutionData.ts @@ -6,7 +6,7 @@ export class SeparateExecutionData1690000000030 implements ReversibleMigration { `CREATE TABLE ${tablePrefix}execution_data ( executionId int(11) NOT NULL primary key, workflowData json NOT NULL, - data TEXT NOT NULL, + data MEDIUMTEXT NOT NULL, CONSTRAINT \`${tablePrefix}execution_data_FK\` FOREIGN KEY (\`executionId\`) REFERENCES \`${tablePrefix}execution_entity\` (\`id\`) ON DELETE CASCADE ) ENGINE=InnoDB`, @@ -30,7 +30,7 @@ export class SeparateExecutionData1690000000030 implements ReversibleMigration { await queryRunner.query( `ALTER TABLE ${tablePrefix}execution_entity ADD workflowData json NULL, - ADD data text NULL`, + ADD data MEDIUMTEXT NULL`, ); await queryRunner.query( diff --git a/packages/cli/src/databases/migrations/mysqldb/1690000000031-FixExecutionDataType.ts b/packages/cli/src/databases/migrations/mysqldb/1690000000031-FixExecutionDataType.ts new file mode 100644 index 0000000000..7107eaec13 --- /dev/null +++ b/packages/cli/src/databases/migrations/mysqldb/1690000000031-FixExecutionDataType.ts @@ -0,0 +1,17 @@ +import type { MigrationContext, IrreversibleMigration } from '@db/types'; + +export class FixExecutionDataType1690000000031 implements IrreversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext) { + /** + * SeparateExecutionData migration for MySQL/MariaDB accidentally changed the data-type for `data` column to `TEXT`. + * This migration changes it back. + * The previous migration has been patched to avoid converting to `TEXT`, which might fail. + * + * For any users who already ran the previous migration, this migration should fix the column type. + * For any users who run these migrations in the same batch, this migration would be no-op, as the column type is already `MEDIUMTEXT` + */ + await queryRunner.query( + 'ALTER TABLE `' + tablePrefix + 'execution_data` MODIFY COLUMN `data` MEDIUMTEXT', + ); + } +} diff --git a/packages/cli/src/databases/migrations/mysqldb/index.ts b/packages/cli/src/databases/migrations/mysqldb/index.ts index ac84b759e2..18e666a145 100644 --- a/packages/cli/src/databases/migrations/mysqldb/index.ts +++ b/packages/cli/src/databases/migrations/mysqldb/index.ts @@ -40,6 +40,8 @@ import { CreateVariables1677501636753 } from './1677501636753-CreateVariables'; import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty'; import { MigrateIntegerKeysToString1690000000001 } from './1690000000001-MigrateIntegerKeysToString'; import { SeparateExecutionData1690000000030 } from './1690000000030-SeparateExecutionData'; +import { FixExecutionDataType1690000000031 } from './1690000000031-FixExecutionDataType'; +import { RemoveSkipOwnerSetup1681134145997 } from './1681134145997-RemoveSkipOwnerSetup'; export const mysqlMigrations: Migration[] = [ InitialMigration1588157391238, @@ -83,4 +85,6 @@ export const mysqlMigrations: Migration[] = [ AddUserActivatedProperty1681134145996, MigrateIntegerKeysToString1690000000001, SeparateExecutionData1690000000030, + FixExecutionDataType1690000000031, + RemoveSkipOwnerSetup1681134145997, ]; diff --git a/packages/cli/src/databases/migrations/postgresdb/1646992772331-CreateUserManagement.ts b/packages/cli/src/databases/migrations/postgresdb/1646992772331-CreateUserManagement.ts index 982596e192..98547f3564 100644 --- a/packages/cli/src/databases/migrations/postgresdb/1646992772331-CreateUserManagement.ts +++ b/packages/cli/src/databases/migrations/postgresdb/1646992772331-CreateUserManagement.ts @@ -133,6 +133,11 @@ export class CreateUserManagement1646992772331 implements ReversibleMigration { await queryRunner.query( `INSERT INTO ${tablePrefix}settings ("key", "value", "loadOnStartup") VALUES ('userManagement.isInstanceOwnerSetUp', 'false', true), ('userManagement.skipInstanceOwnerSetup', 'false', true)`, ); + + await queryRunner.query( + `INSERT INTO ${tablePrefix}settings ("key", "value", "loadOnStartup") VALUES ($1, $2, $3)`, + ['ui.banners.dismissed', '["V1"]', true], + ); } async down({ queryRunner, tablePrefix }: MigrationContext) { diff --git a/packages/cli/src/databases/migrations/postgresdb/1659902242948-AddJsonKeyPinData.ts b/packages/cli/src/databases/migrations/postgresdb/1659902242948-AddJsonKeyPinData.ts index 57ca41dfb6..fa1face876 100644 --- a/packages/cli/src/databases/migrations/postgresdb/1659902242948-AddJsonKeyPinData.ts +++ b/packages/cli/src/databases/migrations/postgresdb/1659902242948-AddJsonKeyPinData.ts @@ -1,4 +1,4 @@ -import type { MigrationContext, IrreversibleMigration } from '@/databases/types'; +import type { MigrationContext, IrreversibleMigration } from '@db/types'; import { runInBatches } from '@db/utils/migrationHelpers'; import { addJsonKeyToPinDataColumn } from '../sqlite/1659888469333-AddJsonKeyPinData'; diff --git a/packages/cli/src/databases/migrations/postgresdb/1671726148421-RemoveWorkflowDataLoadedFlag.ts b/packages/cli/src/databases/migrations/postgresdb/1671726148421-RemoveWorkflowDataLoadedFlag.ts index c62dfd2c2f..e28e6e67c1 100644 --- a/packages/cli/src/databases/migrations/postgresdb/1671726148421-RemoveWorkflowDataLoadedFlag.ts +++ b/packages/cli/src/databases/migrations/postgresdb/1671726148421-RemoveWorkflowDataLoadedFlag.ts @@ -1,5 +1,5 @@ import type { MigrationContext, ReversibleMigration } from '@db/types'; -import { StatisticsNames } from '@/databases/entities/WorkflowStatistics'; +import { StatisticsNames } from '@db/entities/WorkflowStatistics'; export class RemoveWorkflowDataLoadedFlag1671726148421 implements ReversibleMigration { async up({ queryRunner, tablePrefix }: MigrationContext) { diff --git a/packages/cli/src/databases/migrations/postgresdb/1681134145997-RemoveSkipOwnerSetup.ts b/packages/cli/src/databases/migrations/postgresdb/1681134145997-RemoveSkipOwnerSetup.ts new file mode 100644 index 0000000000..958cf30199 --- /dev/null +++ b/packages/cli/src/databases/migrations/postgresdb/1681134145997-RemoveSkipOwnerSetup.ts @@ -0,0 +1,9 @@ +import type { IrreversibleMigration, MigrationContext } from '@db/types'; + +export class RemoveSkipOwnerSetup1681134145997 implements IrreversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `DELETE FROM ${tablePrefix}settings WHERE key = 'userManagement.skipInstanceOwnerSetup';`, + ); + } +} diff --git a/packages/cli/src/databases/migrations/postgresdb/index.ts b/packages/cli/src/databases/migrations/postgresdb/index.ts index 95c025acf6..460c33213c 100644 --- a/packages/cli/src/databases/migrations/postgresdb/index.ts +++ b/packages/cli/src/databases/migrations/postgresdb/index.ts @@ -38,6 +38,7 @@ import { CreateVariables1677501636754 } from './1677501636754-CreateVariables'; import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty'; import { MigrateIntegerKeysToString1690000000000 } from './1690000000000-MigrateIntegerKeysToString'; import { SeparateExecutionData1690000000020 } from './1690000000020-SeparateExecutionData'; +import { RemoveSkipOwnerSetup1681134145997 } from './1681134145997-RemoveSkipOwnerSetup'; export const postgresMigrations: Migration[] = [ InitialMigration1587669153312, @@ -79,4 +80,5 @@ export const postgresMigrations: Migration[] = [ AddUserActivatedProperty1681134145996, MigrateIntegerKeysToString1690000000000, SeparateExecutionData1690000000020, + RemoveSkipOwnerSetup1681134145997, ]; diff --git a/packages/cli/src/databases/migrations/sqlite/1646992772331-CreateUserManagement.ts b/packages/cli/src/databases/migrations/sqlite/1646992772331-CreateUserManagement.ts index 42ffe1d517..298db01c34 100644 --- a/packages/cli/src/databases/migrations/sqlite/1646992772331-CreateUserManagement.ts +++ b/packages/cli/src/databases/migrations/sqlite/1646992772331-CreateUserManagement.ts @@ -94,6 +94,14 @@ export class CreateUserManagement1646992772331 implements ReversibleMigration { INSERT INTO "${tablePrefix}settings" (key, value, loadOnStartup) values ('userManagement.isInstanceOwnerSetUp', 'false', true), ('userManagement.skipInstanceOwnerSetup', 'false', true) `); + + await queryRunner.query( + ` + INSERT INTO "${tablePrefix}settings" (key, value, loadOnStartup) + VALUES (?, ?, ?) + `, + ['ui.banners.dismissed', '["V1"]', true], + ); } async down({ queryRunner, tablePrefix }: MigrationContext) { diff --git a/packages/cli/src/databases/migrations/sqlite/1652367743993-AddUserSettings.ts b/packages/cli/src/databases/migrations/sqlite/1652367743993-AddUserSettings.ts index 82efcf0043..ee73395478 100644 --- a/packages/cli/src/databases/migrations/sqlite/1652367743993-AddUserSettings.ts +++ b/packages/cli/src/databases/migrations/sqlite/1652367743993-AddUserSettings.ts @@ -1,6 +1,8 @@ import type { MigrationContext, ReversibleMigration } from '@db/types'; export class AddUserSettings1652367743993 implements ReversibleMigration { + transaction = false as const; + async up({ queryRunner, tablePrefix }: MigrationContext) { await queryRunner.query( `CREATE TABLE "temporary_user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, "settings" text, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`, diff --git a/packages/cli/src/databases/migrations/sqlite/1652905585850-AddAPIKeyColumn.ts b/packages/cli/src/databases/migrations/sqlite/1652905585850-AddAPIKeyColumn.ts index fa319b9808..74440331bb 100644 --- a/packages/cli/src/databases/migrations/sqlite/1652905585850-AddAPIKeyColumn.ts +++ b/packages/cli/src/databases/migrations/sqlite/1652905585850-AddAPIKeyColumn.ts @@ -1,6 +1,8 @@ import type { MigrationContext, ReversibleMigration } from '@db/types'; export class AddAPIKeyColumn1652905585850 implements ReversibleMigration { + transaction = false as const; + async up({ queryRunner, tablePrefix }: MigrationContext) { await queryRunner.query( `CREATE TABLE "temporary_user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, "settings" text, "apiKey" varchar, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`, diff --git a/packages/cli/src/databases/migrations/sqlite/1671726148419-RemoveWorkflowDataLoadedFlag.ts b/packages/cli/src/databases/migrations/sqlite/1671726148419-RemoveWorkflowDataLoadedFlag.ts index 31b1f95481..5de2f01b68 100644 --- a/packages/cli/src/databases/migrations/sqlite/1671726148419-RemoveWorkflowDataLoadedFlag.ts +++ b/packages/cli/src/databases/migrations/sqlite/1671726148419-RemoveWorkflowDataLoadedFlag.ts @@ -1,5 +1,5 @@ import type { MigrationContext, ReversibleMigration } from '@db/types'; -import { StatisticsNames } from '@/databases/entities/WorkflowStatistics'; +import { StatisticsNames } from '@db/entities/WorkflowStatistics'; export class RemoveWorkflowDataLoadedFlag1671726148419 implements ReversibleMigration { async up({ queryRunner, tablePrefix }: MigrationContext) { diff --git a/packages/cli/src/databases/migrations/sqlite/1673268682475-DeleteExecutionsWithWorkflows.ts b/packages/cli/src/databases/migrations/sqlite/1673268682475-DeleteExecutionsWithWorkflows.ts index 250c747ef4..cfd670bc62 100644 --- a/packages/cli/src/databases/migrations/sqlite/1673268682475-DeleteExecutionsWithWorkflows.ts +++ b/packages/cli/src/databases/migrations/sqlite/1673268682475-DeleteExecutionsWithWorkflows.ts @@ -1,6 +1,8 @@ import type { MigrationContext, ReversibleMigration } from '@db/types'; export class DeleteExecutionsWithWorkflows1673268682475 implements ReversibleMigration { + transaction = false as const; + async up({ queryRunner, tablePrefix }: MigrationContext) { const workflowIds = (await queryRunner.query(` SELECT id FROM "${tablePrefix}workflow_entity" diff --git a/packages/cli/src/databases/migrations/sqlite/1681134145997-RemoveSkipOwnerSetup.ts b/packages/cli/src/databases/migrations/sqlite/1681134145997-RemoveSkipOwnerSetup.ts new file mode 100644 index 0000000000..4d8f7d47c6 --- /dev/null +++ b/packages/cli/src/databases/migrations/sqlite/1681134145997-RemoveSkipOwnerSetup.ts @@ -0,0 +1,9 @@ +import type { IrreversibleMigration, MigrationContext } from '@db/types'; + +export class RemoveSkipOwnerSetup1681134145997 implements IrreversibleMigration { + async up({ queryRunner, tablePrefix }: MigrationContext) { + await queryRunner.query( + `DELETE FROM "${tablePrefix}settings" WHERE key = 'userManagement.skipInstanceOwnerSetup';`, + ); + } +} diff --git a/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts b/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts index 50cd5c57b2..39d566c89e 100644 --- a/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts +++ b/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts @@ -1,18 +1,31 @@ +import { statSync } from 'fs'; +import path from 'path'; +import { UserSettings } from 'n8n-core'; import type { MigrationContext, IrreversibleMigration } from '@db/types'; +import config from '@/config'; +import { copyTable } from '@db/utils/migrationHelpers'; export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigration { - async up({ queryRunner, tablePrefix }: MigrationContext) { + transaction = false as const; + + async up(context: MigrationContext) { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + await pruneExecutionsData(context); + + const { queryRunner, tablePrefix } = context; + await queryRunner.query(` -CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`); + CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`); await queryRunner.query( `INSERT INTO "${tablePrefix}TMP_workflow_entity" (id, name, active, nodes, connections, createdAt, updatedAt, settings, staticData, pinData, triggerCount, versionId) SELECT id, name, active, nodes, connections, createdAt, updatedAt, settings, staticData, pinData, triggerCount, versionId FROM "${tablePrefix}workflow_entity";`, ); await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_entity";`); - await queryRunner.query(`ALTER TABLE "${tablePrefix}TMP_workflow_entity" RENAME TO "${tablePrefix}workflow_entity"; -`); + await queryRunner.query( + `ALTER TABLE "${tablePrefix}TMP_workflow_entity" RENAME TO "${tablePrefix}workflow_entity"`, + ); await queryRunner.query(` -CREATE TABLE "${tablePrefix}TMP_tag_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')));`); + CREATE TABLE "${tablePrefix}TMP_tag_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')));`); await queryRunner.query( `INSERT INTO "${tablePrefix}TMP_tag_entity" SELECT * FROM "${tablePrefix}tag_entity";`, ); @@ -22,7 +35,7 @@ CREATE TABLE "${tablePrefix}TMP_tag_entity" ("id" varchar(36) PRIMARY KEY NOT NU ); await queryRunner.query(` -CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "FK_${tablePrefix}workflows_tags_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}workflows_tags_tag_entity" FOREIGN KEY ("tagId") REFERENCES "${tablePrefix}tag_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("workflowId", "tagId"));`); + CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "FK_${tablePrefix}workflows_tags_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}workflows_tags_tag_entity" FOREIGN KEY ("tagId") REFERENCES "${tablePrefix}tag_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("workflowId", "tagId"));`); await queryRunner.query( `INSERT INTO "${tablePrefix}TMP_workflows_tags" SELECT * FROM "${tablePrefix}workflows_tags";`, ); @@ -105,9 +118,7 @@ CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NU "data" text NOT NULL, "status" varchar, FOREIGN KEY("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE );`); - await queryRunner.query( - `INSERT INTO "${tablePrefix}TMP_execution_entity" SELECT * FROM "${tablePrefix}execution_entity";`, - ); + await copyTable({ tablePrefix, queryRunner }, 'execution_entity', 'TMP_execution_entity'); await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity";`); await queryRunner.query( `ALTER TABLE "${tablePrefix}TMP_execution_entity" RENAME TO "${tablePrefix}execution_entity";`, @@ -175,3 +186,44 @@ CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NU ); } } + +const DESIRED_DATABASE_FILE_SIZE = 1 * 1024 * 1024 * 1024; // 1 GB +const migrationsPruningEnabled = process.env.MIGRATIONS_PRUNING_ENABLED === 'true'; + +function getSqliteDbFileSize(): number { + const filename = path.resolve( + UserSettings.getUserN8nFolderPath(), + config.getEnv('database.sqlite.database'), + ); + const { size } = statSync(filename); + return size; +} + +const pruneExecutionsData = async ({ queryRunner, tablePrefix, logger }: MigrationContext) => { + if (migrationsPruningEnabled) { + const dbFileSize = getSqliteDbFileSize(); + if (dbFileSize < DESIRED_DATABASE_FILE_SIZE) { + logger.debug(`DB Size not large enough to prune: ${dbFileSize}`); + return; + } + + console.time('pruningData'); + const counting = (await queryRunner.query( + `select count(id) as rows from "${tablePrefix}execution_entity";`, + )) as Array<{ rows: number }>; + + const averageExecutionSize = dbFileSize / counting[0].rows; + const numberOfExecutionsToKeep = Math.floor(DESIRED_DATABASE_FILE_SIZE / averageExecutionSize); + + const query = `SELECT id FROM "${tablePrefix}execution_entity" ORDER BY id DESC limit ${numberOfExecutionsToKeep}, 1`; + const idToKeep = await queryRunner + .query(query) + .then((rows: Array<{ id: number }>) => rows[0].id); + + const removalQuery = `DELETE FROM "${tablePrefix}execution_entity" WHERE id < ${idToKeep} and status IN ('success')`; + await queryRunner.query(removalQuery); + console.timeEnd('pruningData'); + } else { + logger.debug('Pruning was requested, but was not enabled'); + } +}; diff --git a/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts b/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts index 3d8943a7a6..f5b36b05bf 100644 --- a/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts +++ b/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts @@ -1,4 +1,5 @@ -import type { MigrationContext, ReversibleMigration } from '@/databases/types'; +import type { MigrationContext, ReversibleMigration } from '@db/types'; +import { copyTable } from '@db/utils/migrationHelpers'; export class SeparateExecutionData1690000000010 implements ReversibleMigration { async up({ queryRunner, tablePrefix }: MigrationContext): Promise { @@ -11,13 +12,12 @@ export class SeparateExecutionData1690000000010 implements ReversibleMigration { )`, ); - await queryRunner.query( - `INSERT INTO "${tablePrefix}execution_data" ( - "executionId", - "workflowData", - "data") - SELECT "id", "workflowData", "data" FROM "${tablePrefix}execution_entity" - `, + await copyTable( + { tablePrefix, queryRunner }, + 'execution_entity', + 'execution_data', + ['id', 'workflowData', 'data'], + ['executionId', 'workflowData', 'data'], ); await queryRunner.query( diff --git a/packages/cli/src/databases/migrations/sqlite/index.ts b/packages/cli/src/databases/migrations/sqlite/index.ts index bedd8cd14a..b0a98fa10b 100644 --- a/packages/cli/src/databases/migrations/sqlite/index.ts +++ b/packages/cli/src/databases/migrations/sqlite/index.ts @@ -37,6 +37,7 @@ import { CreateVariables1677501636752 } from './1677501636752-CreateVariables'; import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty'; import { MigrateIntegerKeysToString1690000000002 } from './1690000000002-MigrateIntegerKeysToString'; import { SeparateExecutionData1690000000010 } from './1690000000010-SeparateExecutionData'; +import { RemoveSkipOwnerSetup1681134145997 } from './1681134145997-RemoveSkipOwnerSetup'; const sqliteMigrations: Migration[] = [ InitialMigration1588102412422, @@ -77,6 +78,7 @@ const sqliteMigrations: Migration[] = [ AddUserActivatedProperty1681134145996, MigrateIntegerKeysToString1690000000002, SeparateExecutionData1690000000010, + RemoveSkipOwnerSetup1681134145997, ]; export { sqliteMigrations }; diff --git a/packages/cli/src/databases/repositories/execution.repository.ts b/packages/cli/src/databases/repositories/execution.repository.ts index 6c69d316d2..5db7130aca 100644 --- a/packages/cli/src/databases/repositories/execution.repository.ts +++ b/packages/cli/src/databases/repositories/execution.repository.ts @@ -240,7 +240,7 @@ export class ExecutionRepository extends Repository { async deleteExecution(executionId: string) { // TODO: Should this be awaited? Should we add a catch in case it fails? - await BinaryDataManager.getInstance().deleteBinaryDataByExecutionId(executionId); + await BinaryDataManager.getInstance().deleteBinaryDataByExecutionIds([executionId]); return this.delete({ id: executionId }); } @@ -392,17 +392,14 @@ export class ExecutionRepository extends Repository { return; } - const idsToDelete = executions.map(({ id }) => id); - + const executionIds = executions.map(({ id }) => id); const binaryDataManager = BinaryDataManager.getInstance(); - await Promise.all( - idsToDelete.map(async (id) => binaryDataManager.deleteBinaryDataByExecutionId(id)), - ); + await binaryDataManager.deleteBinaryDataByExecutionIds(executionIds); do { // Delete in batches to avoid "SQLITE_ERROR: Expression tree is too large (maximum depth 1000)" error - const batch = idsToDelete.splice(0, 500); + const batch = executionIds.splice(0, 500); await this.delete(batch); - } while (idsToDelete.length > 0); + } while (executionIds.length > 0); } } diff --git a/packages/cli/src/databases/repositories/settings.repository.ts b/packages/cli/src/databases/repositories/settings.repository.ts index d0ae091ce6..b7a802c2f4 100644 --- a/packages/cli/src/databases/repositories/settings.repository.ts +++ b/packages/cli/src/databases/repositories/settings.repository.ts @@ -1,10 +1,41 @@ import { Service } from 'typedi'; import { DataSource, Repository } from 'typeorm'; import { Settings } from '../entities/Settings'; +import config from '@/config'; @Service() export class SettingsRepository extends Repository { constructor(dataSource: DataSource) { super(Settings, dataSource.manager); } + + async dismissBanner({ bannerName }: { bannerName: string }): Promise<{ success: boolean }> { + const dismissedBannersSetting = await this.findOneBy({ key: 'ui.banners.dismissed' }); + + if (dismissedBannersSetting) { + try { + const dismissedBanners = JSON.parse(dismissedBannersSetting.value) as string[]; + await this.saveSetting( + 'ui.banners.dismissed', + JSON.stringify([...dismissedBanners, bannerName]), + ); + return { success: true }; + } catch (error) { + return { success: false }; + } + } + return { success: false }; + } + + async saveSetting(key: string, value: string, loadOnStartup = true) { + const setting = await this.findOneBy({ key }); + + if (setting) { + await this.update({ key }, { value, loadOnStartup }); + } else { + await this.save({ key, value, loadOnStartup }); + } + + if (loadOnStartup) config.set('ui.banners.dismissed', value); + } } diff --git a/packages/cli/src/databases/types.ts b/packages/cli/src/databases/types.ts index 19d4813163..ada0268756 100644 --- a/packages/cli/src/databases/types.ts +++ b/packages/cli/src/databases/types.ts @@ -12,15 +12,19 @@ export interface MigrationContext { migrationName: string; } -type MigrationFn = (ctx: MigrationContext) => Promise; +export type MigrationFn = (ctx: MigrationContext) => Promise; -export interface ReversibleMigration { +export interface BaseMigration { up: MigrationFn; + down?: MigrationFn | never; + transaction?: false; +} + +export interface ReversibleMigration extends BaseMigration { down: MigrationFn; } -export interface IrreversibleMigration { - up: MigrationFn; +export interface IrreversibleMigration extends BaseMigration { down?: never; } diff --git a/packages/cli/src/databases/utils/migrationHelpers.ts b/packages/cli/src/databases/utils/migrationHelpers.ts index 0221e7345d..bbb626395c 100644 --- a/packages/cli/src/databases/utils/migrationHelpers.ts +++ b/packages/cli/src/databases/utils/migrationHelpers.ts @@ -1,11 +1,10 @@ -/* eslint-disable no-await-in-loop */ import { readFileSync, rmSync } from 'fs'; import { UserSettings } from 'n8n-core'; import type { QueryRunner } from 'typeorm/query-runner/QueryRunner'; import config from '@/config'; import { getLogger } from '@/Logger'; import { inTest } from '@/constants'; -import type { Migration } from '@db/types'; +import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@db/types'; const logger = getLogger(); @@ -39,50 +38,114 @@ export function loadSurveyFromDisk(): string | null { } } -let logFinishTimeout: NodeJS.Timeout; +let runningMigrations = false; -export function logMigrationStart(migrationName: string, disableLogging = inTest): void { - if (disableLogging) return; +function logMigrationStart(migrationName: string): void { + if (inTest) return; - if (!logFinishTimeout) { + if (!runningMigrations) { logger.warn('Migrations in progress, please do NOT stop the process.'); + runningMigrations = true; } logger.debug(`Starting migration ${migrationName}`); - - clearTimeout(logFinishTimeout); } -export function logMigrationEnd(migrationName: string, disableLogging = inTest): void { - if (disableLogging) return; +function logMigrationEnd(migrationName: string): void { + if (inTest) return; logger.debug(`Finished migration ${migrationName}`); - - logFinishTimeout = setTimeout(() => { - logger.warn('Migrations finished.'); - }, 100); } +const runDisablingForeignKeys = async ( + migration: BaseMigration, + context: MigrationContext, + fn: MigrationFn, +) => { + const { dbType, queryRunner } = context; + if (dbType !== 'sqlite') throw new Error('Disabling transactions only available in sqlite'); + await queryRunner.query('PRAGMA foreign_keys=OFF'); + await queryRunner.startTransaction(); + try { + await fn.call(migration, context); + await queryRunner.commitTransaction(); + } catch (e) { + try { + await queryRunner.rollbackTransaction(); + } catch {} + throw e; + } finally { + await queryRunner.query('PRAGMA foreign_keys=ON'); + } +}; + export const wrapMigration = (migration: Migration) => { const dbType = config.getEnv('database.type'); const dbName = config.getEnv(`database.${dbType === 'mariadb' ? 'mysqldb' : dbType}.database`); const tablePrefix = config.getEnv('database.tablePrefix'); const migrationName = migration.name; - const context = { tablePrefix, dbType, dbName, migrationName }; + const context: Omit = { + tablePrefix, + dbType, + dbName, + migrationName, + logger, + }; const { up, down } = migration.prototype; Object.assign(migration.prototype, { - async up(queryRunner: QueryRunner) { + async up(this: BaseMigration, queryRunner: QueryRunner) { logMigrationStart(migrationName); - await up.call(this, { queryRunner, ...context }); + if (this.transaction === false) { + await runDisablingForeignKeys(this, { queryRunner, ...context }, up); + } else { + await up.call(this, { queryRunner, ...context }); + } logMigrationEnd(migrationName); }, - async down(queryRunner: QueryRunner) { - await down?.call(this, { queryRunner, ...context }); + async down(this: BaseMigration, queryRunner: QueryRunner) { + if (down) { + if (this.transaction === false) { + await runDisablingForeignKeys(this, { queryRunner, ...context }, up); + } else { + await down.call(this, { queryRunner, ...context }); + } + } }, }); }; +export const copyTable = async ( + { tablePrefix, queryRunner }: Pick, + fromTable: string, + toTable: string, + fromFields: string[] = [], + toFields: string[] = [], + batchSize = 10, +) => { + const driver = queryRunner.connection.driver; + fromTable = driver.escape(`${tablePrefix}${fromTable}`); + toTable = driver.escape(`${tablePrefix}${toTable}`); + const fromFieldsStr = fromFields.length + ? fromFields.map((f) => driver.escape(f)).join(', ') + : '*'; + const toFieldsStr = toFields.length + ? `(${toFields.map((f) => driver.escape(f)).join(', ')})` + : ''; + + const total = await queryRunner + .query(`SELECT COUNT(*) as count from ${fromTable}`) + .then((rows: Array<{ count: number }>) => rows[0].count); + + let migrated = 0; + while (migrated < total) { + await queryRunner.query( + `INSERT INTO ${toTable} ${toFieldsStr} SELECT ${fromFieldsStr} FROM ${fromTable} LIMIT ${migrated}, ${batchSize}`, + ); + migrated += batchSize; + } +}; + function batchQuery(query: string, limit: number, offset = 0): string { return ` ${query} diff --git a/packages/cli/src/environments/sourceControl/constants.ts b/packages/cli/src/environments/sourceControl/constants.ts index c3f8c2390d..3ef023f349 100644 --- a/packages/cli/src/environments/sourceControl/constants.ts +++ b/packages/cli/src/environments/sourceControl/constants.ts @@ -2,9 +2,10 @@ export const SOURCE_CONTROL_PREFERENCES_DB_KEY = 'features.sourceControl'; export const SOURCE_CONTROL_GIT_FOLDER = 'git'; export const SOURCE_CONTROL_GIT_KEY_COMMENT = 'n8n deploy key'; export const SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER = 'workflows'; -export const SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credentials'; -export const SOURCE_CONTROL_VARIABLES_EXPORT_FILE = 'variables.json'; +export const SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credential_stubs'; +export const SOURCE_CONTROL_VARIABLES_EXPORT_FILE = 'variable_stubs.json'; export const SOURCE_CONTROL_TAGS_EXPORT_FILE = 'tags.json'; +export const SOURCE_CONTROL_OWNERS_EXPORT_FILE = 'owners.json'; export const SOURCE_CONTROL_SSH_FOLDER = 'ssh'; export const SOURCE_CONTROL_SSH_KEY_NAME = 'key'; export const SOURCE_CONTROL_DEFAULT_BRANCH = 'main'; diff --git a/packages/cli/src/environments/sourceControl/middleware/sourceControlEnabledMiddleware.ee.ts b/packages/cli/src/environments/sourceControl/middleware/sourceControlEnabledMiddleware.ee.ts index 940d10ad02..271db3805a 100644 --- a/packages/cli/src/environments/sourceControl/middleware/sourceControlEnabledMiddleware.ee.ts +++ b/packages/cli/src/environments/sourceControl/middleware/sourceControlEnabledMiddleware.ee.ts @@ -1,6 +1,6 @@ import type { RequestHandler } from 'express'; +import { Container } from 'typedi'; import { isSourceControlLicensed } from '../sourceControlHelper.ee'; -import Container from 'typedi'; import { SourceControlPreferencesService } from '../sourceControlPreferences.service.ee'; export const sourceControlLicensedAndEnabledMiddleware: RequestHandler = (req, res, next) => { diff --git a/packages/cli/src/environments/sourceControl/sourceControl.controller.ee.ts b/packages/cli/src/environments/sourceControl/sourceControl.controller.ee.ts index 7907e63127..7c0497d4a6 100644 --- a/packages/cli/src/environments/sourceControl/sourceControl.controller.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControl.controller.ee.ts @@ -1,3 +1,6 @@ +import express from 'express'; +import { Service } from 'typedi'; +import type { PullResult, PushResult, StatusResult } from 'simple-git'; import { Authorized, Get, Post, Patch, RestController } from '@/decorators'; import { sourceControlLicensedMiddleware, @@ -7,13 +10,12 @@ import { SourceControlService } from './sourceControl.service.ee'; import { SourceControlRequest } from './types/requests'; import type { SourceControlPreferences } from './types/sourceControlPreferences'; import { BadRequestError } from '@/ResponseHelper'; -import type { PullResult, PushResult, StatusResult } from 'simple-git'; -import express from 'express'; import type { ImportResult } from './types/importResult'; import { SourceControlPreferencesService } from './sourceControlPreferences.service.ee'; import type { SourceControlledFile } from './types/sourceControlledFile'; import { SOURCE_CONTROL_API_ROOT, SOURCE_CONTROL_DEFAULT_BRANCH } from './constants'; +@Service() @RestController(`/${SOURCE_CONTROL_API_ROOT}`) export class SourceControlController { constructor( diff --git a/packages/cli/src/environments/sourceControl/sourceControl.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControl.service.ee.ts index d1c5a3ccf6..8dea0e0771 100644 --- a/packages/cli/src/environments/sourceControl/sourceControl.service.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControl.service.ee.ts @@ -32,6 +32,9 @@ import type { import { SourceControlPreferencesService } from './sourceControlPreferences.service.ee'; import { writeFileSync } from 'fs'; import { SourceControlImportService } from './sourceControlImport.service.ee'; +import type { WorkflowEntity } from '@db/entities/WorkflowEntity'; +import type { CredentialsEntity } from '@db/entities/CredentialsEntity'; + @Service() export class SourceControlService { private sshKeyName: string; @@ -252,6 +255,7 @@ export class SourceControlService { ...status.modified, ]); } + mergedFileNames.add(this.sourceControlExportService.getOwnersPath()); const deletedFiles = new Set(status.deleted); deletedFiles.forEach((e) => mergedFileNames.delete(e)); await this.unstage(); @@ -285,6 +289,20 @@ export class SourceControlService { let conflict = false; let status: SourceControlledFileStatus = 'unknown'; let type: SourceControlledFileType = 'file'; + let updatedAt = ''; + + const allWorkflows: Map = new Map(); + (await Db.collections.Workflow.find({ select: ['id', 'name', 'updatedAt'] })).forEach( + (workflow) => { + allWorkflows.set(workflow.id, workflow); + }, + ); + const allCredentials: Map = new Map(); + (await Db.collections.Credentials.find({ select: ['id', 'name', 'updatedAt'] })).forEach( + (credential) => { + allCredentials.set(credential.id, credential); + }, + ); // initialize status from git status result if (statusResult.not_added.find((e) => e === fileName)) status = 'new'; @@ -303,14 +321,14 @@ export class SourceControlService { .replace(/[\/,\\]/, '') .replace('.json', ''); if (location === 'remote') { - const existingWorkflow = await Db.collections.Workflow.find({ - where: { id }, - }); - if (existingWorkflow?.length > 0) { - name = existingWorkflow[0].name; + const existingWorkflow = allWorkflows.get(id); + if (existingWorkflow) { + name = existingWorkflow.name; + updatedAt = existingWorkflow.updatedAt.toISOString(); } } else { name = '(deleted)'; + // todo: once we have audit log, this deletion date could be looked up } } else { const workflow = await this.sourceControlExportService.getWorkflowFromFile(fileName); @@ -326,6 +344,11 @@ export class SourceControlService { id = workflow.id; name = workflow.name; } + const existingWorkflow = allWorkflows.get(id); + if (existingWorkflow) { + name = existingWorkflow.name; + updatedAt = existingWorkflow.updatedAt.toISOString(); + } } } if (fileName.startsWith(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER)) { @@ -336,11 +359,10 @@ export class SourceControlService { .replace(/[\/,\\]/, '') .replace('.json', ''); if (location === 'remote') { - const existingCredential = await Db.collections.Credentials.find({ - where: { id }, - }); - if (existingCredential?.length > 0) { - name = existingCredential[0].name; + const existingCredential = allCredentials.get(id); + if (existingCredential) { + name = existingCredential.name; + updatedAt = existingCredential.updatedAt.toISOString(); } } else { name = '(deleted)'; @@ -359,6 +381,11 @@ export class SourceControlService { id = credential.id; name = credential.name; } + const existingCredential = allCredentials.get(id); + if (existingCredential) { + name = existingCredential.name; + updatedAt = existingCredential.updatedAt.toISOString(); + } } } @@ -369,9 +396,15 @@ export class SourceControlService { } if (fileName.startsWith(SOURCE_CONTROL_TAGS_EXPORT_FILE)) { + const lastUpdatedTag = await Db.collections.Tag.find({ + order: { updatedAt: 'DESC' }, + take: 1, + select: ['updatedAt'], + }); id = 'tags'; name = 'tags'; type = 'tags'; + updatedAt = lastUpdatedTag[0]?.updatedAt.toISOString(); } if (!id) return; @@ -384,6 +417,7 @@ export class SourceControlService { status, location, conflict, + updatedAt, }; } diff --git a/packages/cli/src/environments/sourceControl/sourceControlExport.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlExport.service.ee.ts index 1352c8c7b8..12806dd67a 100644 --- a/packages/cli/src/environments/sourceControl/sourceControlExport.service.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControlExport.service.ee.ts @@ -3,6 +3,7 @@ import path from 'path'; import { SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, SOURCE_CONTROL_GIT_FOLDER, + SOURCE_CONTROL_OWNERS_EXPORT_FILE, SOURCE_CONTROL_TAGS_EXPORT_FILE, SOURCE_CONTROL_VARIABLES_EXPORT_FILE, SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, @@ -17,7 +18,7 @@ import type { IWorkflowToImport } from '@/Interfaces'; import type { ExportableWorkflow } from './types/exportableWorkflow'; import type { ExportableCredential } from './types/exportableCredential'; import type { ExportResult } from './types/exportResult'; -import type { SharedWorkflow } from '@/databases/entities/SharedWorkflow'; +import type { SharedWorkflow } from '@db/entities/SharedWorkflow'; import { sourceControlFoldersExistCheck } from './sourceControlHelper.ee'; @Service() @@ -50,6 +51,10 @@ export class SourceControlExportService { return path.join(this.gitFolder, SOURCE_CONTROL_TAGS_EXPORT_FILE); } + getOwnersPath(): string { + return path.join(this.gitFolder, SOURCE_CONTROL_OWNERS_EXPORT_FILE); + } + getVariablesPath(): string { return path.join(this.gitFolder, SOURCE_CONTROL_VARIABLES_EXPORT_FILE); } @@ -160,7 +165,6 @@ export class SourceControlExportService { connections: e.workflow?.connections, settings: e.workflow?.settings, triggerCount: e.workflow?.triggerCount, - owner: e.user.email, versionId: e.workflow?.versionId, }; LoggerProxy.debug(`Writing workflow ${e.workflowId} to ${fileName}`); @@ -186,6 +190,11 @@ export class SourceControlExportService { const removedFiles = await this.rmDeletedWorkflowsFromExportFolder(sharedWorkflows); // write the workflows to the export folder as json files await this.writeExportableWorkflowsToExportFolder(sharedWorkflows); + // write list of owners to file + const ownersFileName = this.getOwnersPath(); + const owners: Record = {}; + sharedWorkflows.forEach((e) => (owners[e.workflowId] = e.user.email)); + await fsWriteFile(ownersFileName, JSON.stringify(owners, null, 2)); return { count: sharedWorkflows.length, folder: this.workflowExportFolder, @@ -280,7 +289,10 @@ export class SourceControlExportService { } else if (typeof data[key] === 'object') { data[key] = this.replaceCredentialData(data[key] as ICredentialDataDecryptedObject); } else if (typeof data[key] === 'string') { - data[key] = (data[key] as string)?.startsWith('={{') ? data[key] : ''; + data[key] = + (data[key] as string)?.startsWith('={{') && (data[key] as string)?.includes('$secret') + ? data[key] + : ''; } else if (typeof data[key] === 'number') { // TODO: leaving numbers in for now, but maybe we should remove them continue; diff --git a/packages/cli/src/environments/sourceControl/sourceControlHelper.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlHelper.ee.ts index 656fe4ab74..18b699330f 100644 --- a/packages/cli/src/environments/sourceControl/sourceControlHelper.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControlHelper.ee.ts @@ -1,10 +1,10 @@ -import Container from 'typedi'; -import { License } from '../../License'; +import { Container } from 'typedi'; import { generateKeyPairSync } from 'crypto'; import sshpk from 'sshpk'; -import type { KeyPair } from './types/keyPair'; import { constants as fsConstants, mkdirSync, accessSync } from 'fs'; import { LoggerProxy } from 'n8n-workflow'; +import { License } from '@/License'; +import type { KeyPair } from './types/keyPair'; import { SOURCE_CONTROL_GIT_KEY_COMMENT } from './constants'; export function sourceControlFoldersExistCheck(folders: string[]) { diff --git a/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts index af4d5946d8..f0977df1d6 100644 --- a/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts @@ -1,8 +1,9 @@ -import Container, { Service } from 'typedi'; +import { Container, Service } from 'typedi'; import path from 'path'; import { SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, SOURCE_CONTROL_GIT_FOLDER, + SOURCE_CONTROL_OWNERS_EXPORT_FILE, SOURCE_CONTROL_TAGS_EXPORT_FILE, SOURCE_CONTROL_VARIABLES_EXPORT_FILE, SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, @@ -14,15 +15,12 @@ import { readFile as fsReadFile } from 'fs/promises'; import { Credentials, UserSettings } from 'n8n-core'; import type { IWorkflowToImport } from '@/Interfaces'; import type { ExportableCredential } from './types/exportableCredential'; -import { SharedWorkflow } from '@/databases/entities/SharedWorkflow'; -import { CredentialsEntity } from '@/databases/entities/CredentialsEntity'; -import { Variables } from '@/databases/entities/Variables'; +import { Variables } from '@db/entities/Variables'; import type { ImportResult } from './types/importResult'; import { UM_FIX_INSTRUCTION } from '@/commands/BaseCommand'; -import { SharedCredentials } from '@/databases/entities/SharedCredentials'; -import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; -import { WorkflowTagMapping } from '@/databases/entities/WorkflowTagMapping'; -import { TagEntity } from '@/databases/entities/TagEntity'; +import { SharedCredentials } from '@db/entities/SharedCredentials'; +import type { WorkflowTagMapping } from '@db/entities/WorkflowTagMapping'; +import type { TagEntity } from '@db/entities/TagEntity'; import { ActiveWorkflowRunner } from '../../ActiveWorkflowRunner'; import type { SourceControllPullOptions } from './types/sourceControlPullWorkFolder'; import { In } from 'typeorm'; @@ -94,56 +92,54 @@ export class SourceControlImportService { const ownerGlobalRole = await this.getOwnerGlobalRole(); const encryptionKey = await UserSettings.getEncryptionKey(); let importCredentialsResult: Array<{ id: string; name: string; type: string }> = []; - await Db.transaction(async (transactionManager) => { - importCredentialsResult = await Promise.all( - credentialFiles.map(async (file) => { - LoggerProxy.debug(`Importing credentials file ${file}`); - const credential = jsonParse( - await fsReadFile(file, { encoding: 'utf8' }), - ); - const existingCredential = existingCredentials.find( - (e) => e.id === credential.id && e.type === credential.type, - ); - const sharedOwner = await Db.collections.SharedCredentials.findOne({ - select: ['userId'], - where: { - credentialsId: credential.id, - roleId: In([ownerCredentialRole.id, ownerGlobalRole.id]), - }, - }); + importCredentialsResult = await Promise.all( + credentialFiles.map(async (file) => { + LoggerProxy.debug(`Importing credentials file ${file}`); + const credential = jsonParse( + await fsReadFile(file, { encoding: 'utf8' }), + ); + const existingCredential = existingCredentials.find( + (e) => e.id === credential.id && e.type === credential.type, + ); + const sharedOwner = await Db.collections.SharedCredentials.findOne({ + select: ['userId'], + where: { + credentialsId: credential.id, + roleId: In([ownerCredentialRole.id, ownerGlobalRole.id]), + }, + }); - const { name, type, data, id, nodesAccess } = credential; - const newCredentialObject = new Credentials({ id, name }, type, []); - if (existingCredential?.data) { - newCredentialObject.data = existingCredential.data; - } else { - newCredentialObject.setData(data, encryptionKey); - } - newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || []; + const { name, type, data, id, nodesAccess } = credential; + const newCredentialObject = new Credentials({ id, name }, type, []); + if (existingCredential?.data) { + newCredentialObject.data = existingCredential.data; + } else { + newCredentialObject.setData(data, encryptionKey); + } + newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || []; - LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`); - await transactionManager.upsert(CredentialsEntity, newCredentialObject, ['id']); + LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`); + await Db.collections.Credentials.upsert(newCredentialObject, ['id']); - if (!sharedOwner) { - const newSharedCredential = new SharedCredentials(); - newSharedCredential.credentialsId = newCredentialObject.id as string; - newSharedCredential.userId = userId; - newSharedCredential.roleId = ownerGlobalRole.id; + if (!sharedOwner) { + const newSharedCredential = new SharedCredentials(); + newSharedCredential.credentialsId = newCredentialObject.id as string; + newSharedCredential.userId = userId; + newSharedCredential.roleId = ownerGlobalRole.id; - await transactionManager.upsert(SharedCredentials, { ...newSharedCredential }, [ - 'credentialsId', - 'userId', - ]); - } + await Db.collections.SharedCredentials.upsert({ ...newSharedCredential }, [ + 'credentialsId', + 'userId', + ]); + } - return { - id: newCredentialObject.id as string, - name: newCredentialObject.name, - type: newCredentialObject.type, - }; - }), - ); - }); + return { + id: newCredentialObject.id as string, + name: newCredentialObject.name, + type: newCredentialObject.type, + }; + }), + ); return importCredentialsResult.filter((e) => e !== undefined); } @@ -224,35 +220,31 @@ export class SourceControlImportService { ).map((e) => e.id), ); - await Db.transaction(async (transactionManager) => { - await Promise.all( - mappedTags.tags.map(async (tag) => { - await transactionManager.upsert( - TagEntity, - { - ...tag, - }, - { - skipUpdateIfNoValuesChanged: true, - conflictPaths: { id: true }, - }, - ); - }), - ); - await Promise.all( - mappedTags.mappings.map(async (mapping) => { - if (!existingWorkflowIds.has(String(mapping.workflowId))) return; - await transactionManager.upsert( - WorkflowTagMapping, - { tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) }, - { - skipUpdateIfNoValuesChanged: true, - conflictPaths: { tagId: true, workflowId: true }, - }, - ); - }), - ); - }); + await Promise.all( + mappedTags.tags.map(async (tag) => { + await Db.collections.Tag.upsert( + { + ...tag, + }, + { + skipUpdateIfNoValuesChanged: true, + conflictPaths: { id: true }, + }, + ); + }), + ); + await Promise.all( + mappedTags.mappings.map(async (mapping) => { + if (!existingWorkflowIds.has(String(mapping.workflowId))) return; + await Db.collections.WorkflowTagMapping.upsert( + { tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) }, + { + skipUpdateIfNoValuesChanged: true, + conflictPaths: { tagId: true, workflowId: true }, + }, + ); + }), + ); return mappedTags; } return { tags: [], mappings: [] }; @@ -273,74 +265,118 @@ export class SourceControlImportService { const ownerWorkflowRole = await this.getOwnerWorkflowRole(); const workflowRunner = Container.get(ActiveWorkflowRunner); - let importWorkflowsResult = new Array<{ id: string; name: string }>(); - await Db.transaction(async (transactionManager) => { - importWorkflowsResult = await Promise.all( - workflowFiles.map(async (file) => { - LoggerProxy.debug(`Parsing workflow file ${file}`); - const importedWorkflow = jsonParse( - await fsReadFile(file, { encoding: 'utf8' }), + // read owner file if it exists and map workflow ids to owner emails + // then find existing users with those emails or fallback to passed in userId + const ownerRecords: Record = {}; + const ownersFile = await glob(SOURCE_CONTROL_OWNERS_EXPORT_FILE, { + cwd: this.gitFolder, + absolute: true, + }); + if (ownersFile.length > 0) { + LoggerProxy.debug(`Reading workflow owners from file ${ownersFile[0]}`); + const ownerEmails = jsonParse>( + await fsReadFile(ownersFile[0], { encoding: 'utf8' }), + { fallbackValue: {} }, + ); + if (ownerEmails) { + const uniqueOwnerEmails = new Set(Object.values(ownerEmails)); + const existingUsers = await Db.collections.User.find({ + where: { email: In([...uniqueOwnerEmails]) }, + }); + Object.keys(ownerEmails).forEach((workflowId) => { + ownerRecords[workflowId] = + existingUsers.find((e) => e.email === ownerEmails[workflowId])?.id ?? userId; + }); + } + } + + let importWorkflowsResult = new Array<{ id: string; name: string } | undefined>(); + + const allSharedWorkflows = await Db.collections.SharedWorkflow.find({ + select: ['workflowId', 'roleId', 'userId'], + }); + + importWorkflowsResult = await Promise.all( + workflowFiles.map(async (file) => { + LoggerProxy.debug(`Parsing workflow file ${file}`); + const importedWorkflow = jsonParse( + await fsReadFile(file, { encoding: 'utf8' }), + ); + if (!importedWorkflow?.id) { + return; + } + const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id); + if (existingWorkflow?.versionId === importedWorkflow.versionId) { + LoggerProxy.debug( + `Skipping import of workflow ${importedWorkflow.id ?? 'n/a'} - versionId is up to date`, ); - const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id); - if (existingWorkflow?.versionId === importedWorkflow.versionId) { - LoggerProxy.debug( - `Skipping import of workflow ${ - importedWorkflow.id ?? 'n/a' - } - versionId is up to date`, - ); - return { - id: importedWorkflow.id ?? 'n/a', - name: 'skipped', - }; - } - LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`); - importedWorkflow.active = existingWorkflow?.active ?? false; - LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`); - const upsertResult = await transactionManager.upsert( - WorkflowEntity, - { ...importedWorkflow }, - ['id'], - ); - if (upsertResult?.identifiers?.length !== 1) { - throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`); - } - // due to sequential Ids, this may have changed during the insert - // TODO: once IDs are unique and we removed autoincrement, remove this - const upsertedWorkflowId = upsertResult.identifiers[0].id as string; - await transactionManager.upsert( - SharedWorkflow, + return { + id: importedWorkflow.id ?? 'n/a', + name: 'skipped', + }; + } + LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`); + importedWorkflow.active = existingWorkflow?.active ?? false; + LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`); + const upsertResult = await Db.collections.Workflow.upsert({ ...importedWorkflow }, ['id']); + if (upsertResult?.identifiers?.length !== 1) { + throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`); + } + // Update workflow owner to the user who exported the workflow, if that user exists + // in the instance, and the workflow doesn't already have an owner + const workflowOwnerId = ownerRecords[importedWorkflow.id] ?? userId; + const existingSharedWorkflowOwnerByRoleId = allSharedWorkflows.find( + (e) => e.workflowId === importedWorkflow.id && e.roleId === ownerWorkflowRole.id, + ); + const existingSharedWorkflowOwnerByUserId = allSharedWorkflows.find( + (e) => e.workflowId === importedWorkflow.id && e.userId === workflowOwnerId, + ); + if (!existingSharedWorkflowOwnerByUserId && !existingSharedWorkflowOwnerByRoleId) { + // no owner exists yet, so create one + await Db.collections.SharedWorkflow.insert({ + workflowId: importedWorkflow.id, + userId: workflowOwnerId, + roleId: ownerWorkflowRole.id, + }); + } else if (existingSharedWorkflowOwnerByRoleId) { + // skip, because the workflow already has a global owner + } else if (existingSharedWorkflowOwnerByUserId && !existingSharedWorkflowOwnerByRoleId) { + // if the worklflow has a non-global owner that is referenced by the owner file, + // and no existing global owner, update the owner to the user referenced in the owner file + await Db.collections.SharedWorkflow.update( + { + workflowId: importedWorkflow.id, + userId: workflowOwnerId, + }, { - workflowId: upsertedWorkflowId, - userId, roleId: ownerWorkflowRole.id, }, - ['workflowId', 'userId'], ); - - if (existingWorkflow?.active) { - try { - // remove active pre-import workflow - LoggerProxy.debug(`Deactivating workflow id ${existingWorkflow.id}`); - await workflowRunner.remove(existingWorkflow.id); - // try activating the imported workflow - LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`); - await workflowRunner.add(existingWorkflow.id, 'activate'); - } catch (error) { - LoggerProxy.error( - `Failed to activate workflow ${existingWorkflow.id}`, - error as Error, - ); - } + } + if (existingWorkflow?.active) { + try { + // remove active pre-import workflow + LoggerProxy.debug(`Deactivating workflow id ${existingWorkflow.id}`); + await workflowRunner.remove(existingWorkflow.id); + // try activating the imported workflow + LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`); + await workflowRunner.add(existingWorkflow.id, 'activate'); + } catch (error) { + LoggerProxy.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error); } + } - return { - id: importedWorkflow.id ?? 'unknown', - name: file, - }; - }), - ); - }); - return importWorkflowsResult; + return { + id: importedWorkflow.id ?? 'unknown', + name: file, + }; + }), + ); + + return importWorkflowsResult.filter((e) => e !== undefined) as Array<{ + id: string; + name: string; + }>; } async importFromWorkFolder(options: SourceControllPullOptions): Promise { diff --git a/packages/cli/src/environments/sourceControl/types/exportableWorkflow.ts b/packages/cli/src/environments/sourceControl/types/exportableWorkflow.ts index ca0f7087f9..15d405fbb7 100644 --- a/packages/cli/src/environments/sourceControl/types/exportableWorkflow.ts +++ b/packages/cli/src/environments/sourceControl/types/exportableWorkflow.ts @@ -8,6 +8,5 @@ export interface ExportableWorkflow { connections: IConnections; settings?: IWorkflowSettings; triggerCount: number; - owner: string; versionId: string; } diff --git a/packages/cli/src/environments/sourceControl/types/importResult.ts b/packages/cli/src/environments/sourceControl/types/importResult.ts index 541e38d0e9..2d90b40904 100644 --- a/packages/cli/src/environments/sourceControl/types/importResult.ts +++ b/packages/cli/src/environments/sourceControl/types/importResult.ts @@ -1,5 +1,5 @@ -import type { TagEntity } from '@/databases/entities/TagEntity'; -import type { WorkflowTagMapping } from '@/databases/entities/WorkflowTagMapping'; +import type { TagEntity } from '@db/entities/TagEntity'; +import type { WorkflowTagMapping } from '@db/entities/WorkflowTagMapping'; export interface ImportResult { workflows: Array<{ diff --git a/packages/cli/src/environments/sourceControl/types/sourceControlledFile.ts b/packages/cli/src/environments/sourceControl/types/sourceControlledFile.ts index 12b99457b7..165621ebc6 100644 --- a/packages/cli/src/environments/sourceControl/types/sourceControlledFile.ts +++ b/packages/cli/src/environments/sourceControl/types/sourceControlledFile.ts @@ -16,4 +16,5 @@ export type SourceControlledFile = { status: SourceControlledFileStatus; location: SourceControlledFileLocation; conflict: boolean; + updatedAt: string; }; diff --git a/packages/cli/src/environments/variables/enviromentHelpers.ts b/packages/cli/src/environments/variables/enviromentHelpers.ts index d7cc122492..7868f22715 100644 --- a/packages/cli/src/environments/variables/enviromentHelpers.ts +++ b/packages/cli/src/environments/variables/enviromentHelpers.ts @@ -1,5 +1,5 @@ +import { Container } from 'typedi'; import { License } from '@/License'; -import Container from 'typedi'; export function isVariablesEnabled(): boolean { const license = Container.get(License); diff --git a/packages/cli/src/environments/variables/variables.service.ee.ts b/packages/cli/src/environments/variables/variables.service.ee.ts index f218329fab..33bbe16d36 100644 --- a/packages/cli/src/environments/variables/variables.service.ee.ts +++ b/packages/cli/src/environments/variables/variables.service.ee.ts @@ -1,10 +1,10 @@ -import type { Variables } from '@/databases/entities/Variables'; +import { Container } from 'typedi'; +import type { Variables } from '@db/entities/Variables'; import { collections } from '@/Db'; import { InternalHooks } from '@/InternalHooks'; -import Container from 'typedi'; +import { generateNanoId } from '@db/utils/generators'; import { canCreateNewVariable } from './enviromentHelpers'; import { VariablesService } from './variables.service'; -import { generateNanoId } from '../../databases/utils/generators'; export class VariablesLicenseError extends Error {} export class VariablesValidationError extends Error {} diff --git a/packages/cli/src/environments/variables/variables.service.ts b/packages/cli/src/environments/variables/variables.service.ts index 7cc26ee214..01657b96eb 100644 --- a/packages/cli/src/environments/variables/variables.service.ts +++ b/packages/cli/src/environments/variables/variables.service.ts @@ -1,4 +1,4 @@ -import type { Variables } from '@/databases/entities/Variables'; +import type { Variables } from '@db/entities/Variables'; import { collections } from '@/Db'; export class VariablesService { diff --git a/packages/cli/src/eventbus/MessageEventBus/recoverEvents.ts b/packages/cli/src/eventbus/MessageEventBus/recoverEvents.ts index 90dd30c849..9e155d29f1 100644 --- a/packages/cli/src/eventbus/MessageEventBus/recoverEvents.ts +++ b/packages/cli/src/eventbus/MessageEventBus/recoverEvents.ts @@ -9,7 +9,7 @@ import { eventBus } from './MessageEventBus'; import { Container } from 'typedi'; import { InternalHooks } from '@/InternalHooks'; import { getWorkflowHooksMain } from '@/WorkflowExecuteAdditionalData'; -import { ExecutionRepository } from '@/databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; export async function recoverExecutionDataFromEventLogMessages( executionId: string, diff --git a/packages/cli/src/executions/executions.service.ts b/packages/cli/src/executions/executions.service.ts index f6c8303b59..ac934f09bd 100644 --- a/packages/cli/src/executions/executions.service.ts +++ b/packages/cli/src/executions/executions.service.ts @@ -25,7 +25,7 @@ import * as Db from '@/Db'; import * as GenericHelpers from '@/GenericHelpers'; import { Container } from 'typedi'; import { getStatusUsingPreviousExecutionStatusMethod } from './executionHelpers'; -import { ExecutionRepository } from '@/databases/repositories'; +import { ExecutionRepository } from '@db/repositories'; export interface IGetExecutionsQueryFilter { id?: FindOperator | string; diff --git a/packages/cli/src/license/license.controller.ts b/packages/cli/src/license/license.controller.ts index 04c98cb2bb..46106f7bfb 100644 --- a/packages/cli/src/license/license.controller.ts +++ b/packages/cli/src/license/license.controller.ts @@ -9,7 +9,6 @@ import type { ILicensePostResponse, ILicenseReadResponse } from '@/Interfaces'; import { LicenseService } from './License.service'; import { License } from '@/License'; import type { AuthenticatedRequest, LicenseRequest } from '@/requests'; -import { isInstanceOwner } from '@/PublicApi/v1/handlers/users/users.service.ee'; import { Container } from 'typedi'; import { InternalHooks } from '@/InternalHooks'; @@ -34,7 +33,7 @@ licenseController.use((req, res, next) => { */ licenseController.use((req: AuthenticatedRequest, res, next) => { if (OWNER_ROUTES.includes(req.path) && req.user) { - if (!isInstanceOwner(req.user)) { + if (!req.user.isOwner) { LoggerProxy.info('Non-owner attempted to activate or renew a license', { userId: req.user.id, }); diff --git a/packages/cli/src/middlewares/auth.ts b/packages/cli/src/middlewares/auth.ts index e28272766b..4a4dff63ff 100644 --- a/packages/cli/src/middlewares/auth.ts +++ b/packages/cli/src/middlewares/auth.ts @@ -1,6 +1,5 @@ import type { Application, NextFunction, Request, RequestHandler, Response } from 'express'; import jwt from 'jsonwebtoken'; -import cookieParser from 'cookie-parser'; import passport from 'passport'; import { Strategy } from 'passport-jwt'; import { sync as globSync } from 'fast-glob'; @@ -10,8 +9,6 @@ import type { AuthenticatedRequest } from '@/requests'; import config from '@/config'; import { AUTH_COOKIE_NAME, EDITOR_UI_DIST_DIR } from '@/constants'; import { issueCookie, resolveJwtContent } from '@/auth/jwt'; -import { isUserManagementEnabled } from '@/UserManagement/UserManagementHelper'; -import type { UserRepository } from '@db/repositories'; import { canSkipAuth } from '@/decorators/registerController'; const jwtFromRequest = (req: Request) => { @@ -19,7 +16,7 @@ const jwtFromRequest = (req: Request) => { return (req.cookies?.[AUTH_COOKIE_NAME] as string | undefined) ?? null; }; -const jwtAuth = (): RequestHandler => { +const userManagementJwtAuth = (): RequestHandler => { const jwtStrategy = new Strategy( { jwtFromRequest, @@ -79,11 +76,8 @@ export const setupAuthMiddlewares = ( app: Application, ignoredEndpoints: Readonly, restEndpoint: string, - userRepository: UserRepository, ) => { - // needed for testing; not adding overhead since it directly returns if req.cookies exists - app.use(cookieParser()); - app.use(jwtAuth()); + app.use(userManagementJwtAuth()); app.use(async (req: Request, res: Response, next: NextFunction) => { if ( @@ -101,15 +95,6 @@ export const setupAuthMiddlewares = ( return next(); } - // skip authentication if user management is disabled - if (!isUserManagementEnabled()) { - req.user = await userRepository.findOneOrFail({ - relations: ['globalRole'], - where: {}, - }); - return next(); - } - return passportMiddleware(req, res, next); }); diff --git a/packages/cli/src/middlewares/basicAuth.ts b/packages/cli/src/middlewares/basicAuth.ts deleted file mode 100644 index bdf28b5a0e..0000000000 --- a/packages/cli/src/middlewares/basicAuth.ts +++ /dev/null @@ -1,57 +0,0 @@ -import type { Application } from 'express'; -import basicAuth from 'basic-auth'; -// IMPORTANT! Do not switch to anther bcrypt library unless really necessary and -// tested with all possible systems like Windows, Alpine on ARM, FreeBSD, ... -import { compare } from 'bcryptjs'; -import type { Config } from '@/config'; -import { basicAuthAuthorizationError } from '@/ResponseHelper'; - -export const setupBasicAuth = (app: Application, config: Config, authIgnoreRegex: RegExp) => { - const basicAuthUser = config.getEnv('security.basicAuth.user'); - if (basicAuthUser === '') { - throw new Error('Basic auth is activated but no user got defined. Please set one!'); - } - - const basicAuthPassword = config.getEnv('security.basicAuth.password'); - if (basicAuthPassword === '') { - throw new Error('Basic auth is activated but no password got defined. Please set one!'); - } - - const basicAuthHashEnabled = config.getEnv('security.basicAuth.hash'); - - let validPassword: null | string = null; - - app.use(async (req, res, next) => { - // Skip basic auth for a few listed endpoints or when instance owner has been setup - if (authIgnoreRegex.exec(req.url) || config.getEnv('userManagement.isInstanceOwnerSetUp')) { - return next(); - } - const realm = 'n8n - Editor UI'; - const basicAuthData = basicAuth(req); - - if (basicAuthData === undefined) { - // Authorization data is missing - return basicAuthAuthorizationError(res, realm, 'Authorization is required!'); - } - - if (basicAuthData.name === basicAuthUser) { - if (basicAuthHashEnabled) { - if (validPassword === null && (await compare(basicAuthData.pass, basicAuthPassword))) { - // Password is valid so save for future requests - validPassword = basicAuthData.pass; - } - - if (validPassword === basicAuthData.pass && validPassword !== null) { - // Provided hash is correct - return next(); - } - } else if (basicAuthData.pass === basicAuthPassword) { - // Provided password is correct - return next(); - } - } - - // Provided authentication data is wrong - return basicAuthAuthorizationError(res, realm, 'Authorization data is wrong!'); - }); -}; diff --git a/packages/cli/src/middlewares/externalJWTAuth.ts b/packages/cli/src/middlewares/externalJWTAuth.ts deleted file mode 100644 index 8d769933b3..0000000000 --- a/packages/cli/src/middlewares/externalJWTAuth.ts +++ /dev/null @@ -1,85 +0,0 @@ -import type { Application } from 'express'; -import jwt from 'jsonwebtoken'; -import jwks from 'jwks-rsa'; -import type { Config } from '@/config'; -import { jwtAuthAuthorizationError } from '@/ResponseHelper'; - -export const setupExternalJWTAuth = (app: Application, config: Config, authIgnoreRegex: RegExp) => { - const jwtAuthHeader = config.getEnv('security.jwtAuth.jwtHeader'); - if (jwtAuthHeader === '') { - throw new Error('JWT auth is activated but no request header was defined. Please set one!'); - } - - const jwksUri = config.getEnv('security.jwtAuth.jwksUri'); - if (jwksUri === '') { - throw new Error('JWT auth is activated but no JWK Set URI was defined. Please set one!'); - } - - const jwtHeaderValuePrefix = config.getEnv('security.jwtAuth.jwtHeaderValuePrefix'); - const jwtIssuer = config.getEnv('security.jwtAuth.jwtIssuer'); - const jwtNamespace = config.getEnv('security.jwtAuth.jwtNamespace'); - const jwtAllowedTenantKey = config.getEnv('security.jwtAuth.jwtAllowedTenantKey'); - const jwtAllowedTenant = config.getEnv('security.jwtAuth.jwtAllowedTenant'); - - // eslint-disable-next-line no-inner-declarations - function isTenantAllowed(decodedToken: object): boolean { - if (jwtNamespace === '' || jwtAllowedTenantKey === '' || jwtAllowedTenant === '') { - return true; - } - - for (const [k, v] of Object.entries(decodedToken)) { - if (k === jwtNamespace) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - for (const [kn, kv] of Object.entries(v)) { - if (kn === jwtAllowedTenantKey && kv === jwtAllowedTenant) { - return true; - } - } - } - } - - return false; - } - - // eslint-disable-next-line consistent-return - app.use((req, res, next) => { - if (authIgnoreRegex.exec(req.url)) { - return next(); - } - - let token = req.header(jwtAuthHeader) as string; - if (token === undefined || token === '') { - return jwtAuthAuthorizationError(res, 'Missing token'); - } - - if (jwtHeaderValuePrefix !== '' && token.startsWith(jwtHeaderValuePrefix)) { - token = token.replace(`${jwtHeaderValuePrefix} `, '').trimStart(); - } - - const jwkClient = jwks({ cache: true, jwksUri }); - const getKey: jwt.GetPublicKeyOrSecret = (header, callbackFn) => { - // eslint-disable-next-line @typescript-eslint/no-throw-literal - if (!header.kid) throw jwtAuthAuthorizationError(res, 'No JWT key found'); - jwkClient.getSigningKey(header.kid, (error, key) => { - // eslint-disable-next-line @typescript-eslint/no-throw-literal - if (error) throw jwtAuthAuthorizationError(res, error.message); - callbackFn(null, key?.getPublicKey()); - }); - }; - - const jwtVerifyOptions: jwt.VerifyOptions = { - issuer: jwtIssuer !== '' ? jwtIssuer : undefined, - ignoreExpiration: false, - }; - - jwt.verify(token, getKey, jwtVerifyOptions, (error: jwt.VerifyErrors, decoded: object) => { - if (error) { - jwtAuthAuthorizationError(res, 'Invalid token'); - } else if (!isTenantAllowed(decoded)) { - jwtAuthAuthorizationError(res, 'Tenant not allowed'); - } else { - next(); - } - }); - }); -}; diff --git a/packages/cli/src/middlewares/userManagementEnabled.ts b/packages/cli/src/middlewares/userManagementEnabled.ts deleted file mode 100644 index c1f3c58c6f..0000000000 --- a/packages/cli/src/middlewares/userManagementEnabled.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { RequestHandler } from 'express'; -import { LoggerProxy } from 'n8n-workflow'; -import { isUserManagementEnabled } from '../UserManagement/UserManagementHelper'; - -export const userManagementEnabledMiddleware: RequestHandler = (req, res, next) => { - if (isUserManagementEnabled()) { - next(); - } else { - LoggerProxy.debug('Request failed because user management is disabled'); - res.status(400).json({ status: 'error', message: 'User management is disabled' }); - } -}; diff --git a/packages/cli/src/push/index.ts b/packages/cli/src/push/index.ts index 63615605d6..72289e79e3 100644 --- a/packages/cli/src/push/index.ts +++ b/packages/cli/src/push/index.ts @@ -57,11 +57,7 @@ export const setupPushServer = (restEndpoint: string, server: Server, app: Appli } }; -export const setupPushHandler = ( - restEndpoint: string, - app: Application, - isUserManagementEnabled: boolean, -) => { +export const setupPushHandler = (restEndpoint: string, app: Application) => { const endpoint = `/${restEndpoint}/push`; const pushValidationMiddleware: RequestHandler = async ( @@ -81,22 +77,18 @@ export const setupPushHandler = ( } return; } - - // Handle authentication - if (isUserManagementEnabled) { - try { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access - const authCookie: string = req.cookies?.[AUTH_COOKIE_NAME] ?? ''; - await resolveJwt(authCookie); - } catch (error) { - if (ws) { - ws.send(`Unauthorized: ${(error as Error).message}`); - ws.close(401); - } else { - res.status(401).send('Unauthorized'); - } - return; + try { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access + const authCookie: string = req.cookies?.[AUTH_COOKIE_NAME] ?? ''; + await resolveJwt(authCookie); + } catch (error) { + if (ws) { + ws.send(`Unauthorized: ${(error as Error).message}`); + ws.close(401); + } else { + res.status(401).send('Unauthorized'); } + return; } next(); diff --git a/packages/cli/src/requests.ts b/packages/cli/src/requests.ts index 9b4adf94ad..f1c0636557 100644 --- a/packages/cli/src/requests.ts +++ b/packages/cli/src/requests.ts @@ -1,5 +1,6 @@ import type express from 'express'; import type { + Banners, IConnections, ICredentialDataDecryptedObject, ICredentialNodeAccess, @@ -181,22 +182,21 @@ export declare namespace MeRequest { export type SurveyAnswers = AuthenticatedRequest<{}, {}, Record | {}>; } +export interface UserSetupPayload { + email: string; + password: string; + firstName: string; + lastName: string; +} + // ---------------------------------- // /owner // ---------------------------------- export declare namespace OwnerRequest { - type Post = AuthenticatedRequest< - {}, - {}, - Partial<{ - email: string; - password: string; - firstName: string; - lastName: string; - }>, - {} - >; + type Post = AuthenticatedRequest<{}, {}, UserSetupPayload, {}>; + + type DismissBanner = AuthenticatedRequest<{}, {}, Partial<{ bannerName: Banners }>, {}>; } // ---------------------------------- diff --git a/packages/cli/src/sso/saml/routes/saml.controller.ee.ts b/packages/cli/src/sso/saml/routes/saml.controller.ee.ts index d93bfe5fdb..d0d27730f3 100644 --- a/packages/cli/src/sso/saml/routes/saml.controller.ee.ts +++ b/packages/cli/src/sso/saml/routes/saml.controller.ee.ts @@ -1,4 +1,5 @@ import express from 'express'; +import { Container, Service } from 'typedi'; import { getInstanceBaseUrl } from '@/UserManagement/UserManagementHelper'; import { Authorized, Get, NoAuthRequired, Post, RestController } from '@/decorators'; import { SamlUrls } from '../constants'; @@ -23,9 +24,9 @@ import { } from '../serviceProvider.ee'; import { getSamlConnectionTestSuccessView } from '../views/samlConnectionTestSuccess'; import { getSamlConnectionTestFailedView } from '../views/samlConnectionTestFailed'; -import Container from 'typedi'; import { InternalHooks } from '@/InternalHooks'; +@Service() @RestController('/sso/saml') export class SamlController { constructor(private samlService: SamlService) {} diff --git a/packages/cli/src/sso/saml/saml.service.ee.ts b/packages/cli/src/sso/saml/saml.service.ee.ts index cbeb3dee3a..fa63646f9d 100644 --- a/packages/cli/src/sso/saml/saml.service.ee.ts +++ b/packages/cli/src/sso/saml/saml.service.ee.ts @@ -145,8 +145,9 @@ export class SamlService { }> { const attributes = await this.getAttributesFromLoginResponse(req, binding); if (attributes.email) { + const lowerCasedEmail = attributes.email.toLowerCase(); const user = await Db.collections.User.findOne({ - where: { email: attributes.email }, + where: { email: lowerCasedEmail }, relations: ['globalRole', 'authIdentities'], }); if (user) { diff --git a/packages/cli/src/sso/saml/samlHelpers.ts b/packages/cli/src/sso/saml/samlHelpers.ts index 0672965deb..db2ad210e1 100644 --- a/packages/cli/src/sso/saml/samlHelpers.ts +++ b/packages/cli/src/sso/saml/samlHelpers.ts @@ -6,7 +6,7 @@ import { User } from '@db/entities/User'; import { RoleRepository } from '@db/repositories'; import { License } from '@/License'; import { AuthError, InternalServerError } from '@/ResponseHelper'; -import { hashPassword, isUserManagementEnabled } from '@/UserManagement/UserManagementHelper'; +import { hashPassword } from '@/UserManagement/UserManagementHelper'; import type { SamlPreferences } from './types/samlPreferences'; import type { SamlUserAttributes } from './types/samlUserAttributes'; import type { FlowResult } from 'samlify/types/src/flow'; @@ -53,8 +53,7 @@ export function setSamlLoginLabel(label: string): void { } export function isSamlLicensed(): boolean { - const license = Container.get(License); - return isUserManagementEnabled() && license.isSamlEnabled(); + return Container.get(License).isSamlEnabled(); } export function isSamlLicensedAndEnabled(): boolean { @@ -98,7 +97,8 @@ export function generatePassword(): string { export async function createUserFromSamlAttributes(attributes: SamlUserAttributes): Promise { const user = new User(); const authIdentity = new AuthIdentity(); - user.email = attributes.email; + const lowerCasedEmail = attributes.email?.toLowerCase() ?? ''; + user.email = lowerCasedEmail; user.firstName = attributes.firstName; user.lastName = attributes.lastName; user.globalRole = await Container.get(RoleRepository).findGlobalMemberRoleOrFail(); diff --git a/packages/cli/src/commands/Interfaces.d.ts b/packages/cli/src/types/commands.types.ts similarity index 70% rename from packages/cli/src/commands/Interfaces.d.ts rename to packages/cli/src/types/commands.types.ts index 765b16d7aa..80e556ee62 100644 --- a/packages/cli/src/commands/Interfaces.d.ts +++ b/packages/cli/src/types/commands.types.ts @@ -1,4 +1,6 @@ -interface IResult { +import type { ExecutionStatus } from 'n8n-workflow'; + +export interface IResult { totalWorkflows: number; slackMessage: string; summary: { @@ -14,14 +16,14 @@ interface IResult { executions: IExecutionResult[]; } -interface IExecutionResult { +export interface IExecutionResult { workflowId: string; workflowName: string; executionTime: number; // Given in seconds with decimals for milliseconds finished: boolean; executionStatus: ExecutionStatus; error?: string; - changes?: string; + changes?: object; coveredNodes: { [nodeType: string]: number; }; @@ -32,24 +34,17 @@ interface IExecutionError { error: string; } -interface IWorkflowExecutionProgress { +export interface IWorkflowExecutionProgress { workflowId: string; status: ExecutionStatus; } -interface INodeSpecialCases { +export interface INodeSpecialCases { [nodeName: string]: INodeSpecialCase; } -interface INodeSpecialCase { +export interface INodeSpecialCase { ignoredProperties?: string[]; capResults?: number; keepOnlyProperties?: string[]; } - -declare module 'json-diff' { - interface IDiffOptions { - keysOnly?: boolean; - } - export function diff(obj1: unknown, obj2: unknown, diffOptions: IDiffOptions): string; -} diff --git a/packages/cli/src/workflows/workflows.controller.ee.ts b/packages/cli/src/workflows/workflows.controller.ee.ts index 14b4d56d96..c50860c359 100644 --- a/packages/cli/src/workflows/workflows.controller.ee.ts +++ b/packages/cli/src/workflows/workflows.controller.ee.ts @@ -87,6 +87,7 @@ EEWorkflowController.put( EEWorkflowController.get( '/:id(\\w+)', + (req, res, next) => (req.params.id === 'new' ? next('router') : next()), // skip ee router and use free one for naming ResponseHelper.send(async (req: WorkflowRequest.Get) => { const { id: workflowId } = req.params; diff --git a/packages/cli/src/workflows/workflows.services.ts b/packages/cli/src/workflows/workflows.services.ts index 74d31f1a64..14f2333227 100644 --- a/packages/cli/src/workflows/workflows.services.ts +++ b/packages/cli/src/workflows/workflows.services.ts @@ -12,6 +12,7 @@ import * as ResponseHelper from '@/ResponseHelper'; import * as WorkflowHelpers from '@/WorkflowHelpers'; import config from '@/config'; import type { SharedWorkflow } from '@db/entities/SharedWorkflow'; +import type { RoleNames } from '@db/entities/Role'; import type { User } from '@db/entities/User'; import type { WorkflowEntity } from '@db/entities/WorkflowEntity'; import { validateEntity } from '@/GenericHelpers'; @@ -27,7 +28,6 @@ import { getSharedWorkflowIds } from '@/WorkflowHelpers'; import { isSharingEnabled, whereClause } from '@/UserManagement/UserManagementHelper'; import type { WorkflowForList } from '@/workflows/workflows.types'; import { InternalHooks } from '@/InternalHooks'; -import type { RoleNames } from '../databases/entities/Role'; export type IGetWorkflowsQueryFilter = Pick< FindOptionsWhere, diff --git a/packages/cli/test/integration/audit/credentials.risk.test.ts b/packages/cli/test/integration/audit/credentials.risk.test.ts index 395cfa2275..10d1d9ecbd 100644 --- a/packages/cli/test/integration/audit/credentials.risk.test.ts +++ b/packages/cli/test/integration/audit/credentials.risk.test.ts @@ -5,7 +5,7 @@ import { audit } from '@/audit'; import { CREDENTIALS_REPORT } from '@/audit/constants'; import { getRiskSection } from './utils'; import * as testDb from '../shared/testDb'; -import { generateNanoId } from '@/databases/utils/generators'; +import { generateNanoId } from '@db/utils/generators'; beforeAll(async () => { await testDb.init(); diff --git a/packages/cli/test/integration/audit/database.risk.test.ts b/packages/cli/test/integration/audit/database.risk.test.ts index 0ae783ffa7..a4068f5d77 100644 --- a/packages/cli/test/integration/audit/database.risk.test.ts +++ b/packages/cli/test/integration/audit/database.risk.test.ts @@ -8,7 +8,7 @@ import { } from '@/audit/constants'; import { getRiskSection, saveManualTriggerWorkflow } from './utils'; import * as testDb from '../shared/testDb'; -import { generateNanoId } from '@/databases/utils/generators'; +import { generateNanoId } from '@db/utils/generators'; beforeAll(async () => { await testDb.init(); diff --git a/packages/cli/test/integration/audit/filesystem.risk.test.ts b/packages/cli/test/integration/audit/filesystem.risk.test.ts index 009d1c2dd0..8418dbc102 100644 --- a/packages/cli/test/integration/audit/filesystem.risk.test.ts +++ b/packages/cli/test/integration/audit/filesystem.risk.test.ts @@ -4,7 +4,7 @@ import { audit } from '@/audit'; import { FILESYSTEM_INTERACTION_NODE_TYPES, FILESYSTEM_REPORT } from '@/audit/constants'; import { getRiskSection, saveManualTriggerWorkflow } from './utils'; import * as testDb from '../shared/testDb'; -import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; +import { WorkflowEntity } from '@db/entities/WorkflowEntity'; beforeAll(async () => { await testDb.init(); diff --git a/packages/cli/test/integration/audit/instance.risk.test.ts b/packages/cli/test/integration/audit/instance.risk.test.ts index 89411c1d0d..aa186c4d7c 100644 --- a/packages/cli/test/integration/audit/instance.risk.test.ts +++ b/packages/cli/test/integration/audit/instance.risk.test.ts @@ -12,7 +12,7 @@ import { import * as testDb from '../shared/testDb'; import { toReportTitle } from '@/audit/utils'; import config from '@/config'; -import { generateNanoId } from '@/databases/utils/generators'; +import { generateNanoId } from '@db/utils/generators'; beforeAll(async () => { await testDb.init(); @@ -244,12 +244,9 @@ test('should report security settings', async () => { versionNotificationsEnabled: true, templatesEnabled: true, publicApiEnabled: false, - userManagementEnabled: true, }, auth: { authExcludeEndpoints: 'none', - basicAuthActive: false, - jwtAuthActive: false, }, nodes: { nodesExclude: 'none', nodesInclude: 'none' }, telemetry: { diagnosticsEnabled: true }, diff --git a/packages/cli/test/integration/audit/nodes.risk.test.ts b/packages/cli/test/integration/audit/nodes.risk.test.ts index 8a3643fb7f..884594cfe0 100644 --- a/packages/cli/test/integration/audit/nodes.risk.test.ts +++ b/packages/cli/test/integration/audit/nodes.risk.test.ts @@ -6,10 +6,10 @@ import { OFFICIAL_RISKY_NODE_TYPES, NODES_REPORT } from '@/audit/constants'; import { getRiskSection, MOCK_PACKAGE, saveManualTriggerWorkflow } from './utils'; import * as testDb from '../shared/testDb'; import { toReportTitle } from '@/audit/utils'; -import { mockInstance } from '../shared/utils'; +import { mockInstance } from '../shared/utils/'; import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; import { NodeTypes } from '@/NodeTypes'; -import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; +import { WorkflowEntity } from '@db/entities/WorkflowEntity'; const nodesAndCredentials = mockInstance(LoadNodesAndCredentials); nodesAndCredentials.getCustomDirectories.mockReturnValue([]); diff --git a/packages/cli/test/integration/auth.api.test.ts b/packages/cli/test/integration/auth.api.test.ts index e81cf06aa3..1484468cf3 100644 --- a/packages/cli/test/integration/auth.api.test.ts +++ b/packages/cli/test/integration/auth.api.test.ts @@ -1,5 +1,6 @@ -import type { Application } from 'express'; import type { SuperAgentTest } from 'supertest'; +import { Container } from 'typedi'; +import { License } from '@/License'; import validator from 'validator'; import config from '@/config'; import * as Db from '@/Db'; @@ -9,37 +10,27 @@ import type { User } from '@db/entities/User'; import { LOGGED_OUT_RESPONSE_BODY } from './shared/constants'; import { randomValidPassword } from './shared/random'; import * as testDb from './shared/testDb'; -import type { AuthAgent } from './shared/types'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; -let app: Application; let globalOwnerRole: Role; let globalMemberRole: Role; let owner: User; -let authAgent: AuthAgent; -let authlessAgent: SuperAgentTest; let authOwnerAgent: SuperAgentTest; const ownerPassword = randomValidPassword(); -beforeAll(async () => { - app = await utils.initTestServer({ endpointGroups: ['auth'] }); - authAgent = utils.createAuthAgent(app); +const testServer = utils.setupTestServer({ endpointGroups: ['auth'] }); +beforeAll(async () => { globalOwnerRole = await testDb.getGlobalOwnerRole(); globalMemberRole = await testDb.getGlobalMemberRole(); }); beforeEach(async () => { await testDb.truncate(['User']); - authlessAgent = utils.createAgent(app); config.set('ldap.disabled', true); await utils.setInstanceOwnerSetUp(true); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('POST /login', () => { beforeEach(async () => { owner = await testDb.createUser({ @@ -49,7 +40,7 @@ describe('POST /login', () => { }); test('should log user in', async () => { - const response = await authlessAgent.post('/login').send({ + const response = await testServer.authlessAgent.post('/login').send({ email: owner.email, password: ownerPassword, }); @@ -84,11 +75,31 @@ describe('POST /login', () => { const authToken = utils.getAuthToken(response); expect(authToken).toBeDefined(); }); + + test('should throw AuthError for non-owner if not within users limit quota', async () => { + jest.spyOn(Container.get(License), 'isWithinUsersLimit').mockReturnValueOnce(false); + const member = await testDb.createUserShell(globalMemberRole); + + const response = await testServer.authAgentFor(member).get('/login'); + expect(response.statusCode).toBe(401); + }); + + test('should not throw AuthError for owner if not within users limit quota', async () => { + jest.spyOn(Container.get(License), 'isWithinUsersLimit').mockReturnValueOnce(false); + const ownerUser = await testDb.createUser({ + password: randomValidPassword(), + globalRole: globalOwnerRole, + isOwner: true, + }); + + const response = await testServer.authAgentFor(ownerUser).get('/login'); + expect(response.statusCode).toBe(200); + }); }); describe('GET /login', () => { test('should return 401 Unauthorized if no cookie', async () => { - const response = await authlessAgent.get('/login'); + const response = await testServer.authlessAgent.get('/login'); expect(response.statusCode).toBe(401); @@ -100,7 +111,7 @@ describe('GET /login', () => { await testDb.createUserShell(globalOwnerRole); await utils.setInstanceOwnerSetUp(false); - const response = await authlessAgent.get('/login'); + const response = await testServer.authlessAgent.get('/login'); expect(response.statusCode).toBe(200); @@ -109,9 +120,9 @@ describe('GET /login', () => { }); test('should return 401 Unauthorized if invalid cookie', async () => { - authlessAgent.jar.setCookie(`${AUTH_COOKIE_NAME}=invalid`); + testServer.authlessAgent.jar.setCookie(`${AUTH_COOKIE_NAME}=invalid`); - const response = await authlessAgent.get('/login'); + const response = await testServer.authlessAgent.get('/login'); expect(response.statusCode).toBe(401); @@ -122,7 +133,7 @@ describe('GET /login', () => { test('should return logged-in owner shell', async () => { const ownerShell = await testDb.createUserShell(globalOwnerRole); - const response = await authAgent(ownerShell).get('/login'); + const response = await testServer.authAgentFor(ownerShell).get('/login'); expect(response.statusCode).toBe(200); @@ -158,7 +169,7 @@ describe('GET /login', () => { test('should return logged-in member shell', async () => { const memberShell = await testDb.createUserShell(globalMemberRole); - const response = await authAgent(memberShell).get('/login'); + const response = await testServer.authAgentFor(memberShell).get('/login'); expect(response.statusCode).toBe(200); @@ -194,7 +205,7 @@ describe('GET /login', () => { test('should return logged-in owner', async () => { const owner = await testDb.createUser({ globalRole: globalOwnerRole }); - const response = await authAgent(owner).get('/login'); + const response = await testServer.authAgentFor(owner).get('/login'); expect(response.statusCode).toBe(200); @@ -230,7 +241,7 @@ describe('GET /login', () => { test('should return logged-in member', async () => { const member = await testDb.createUser({ globalRole: globalMemberRole }); - const response = await authAgent(member).get('/login'); + const response = await testServer.authAgentFor(member).get('/login'); expect(response.statusCode).toBe(200); @@ -270,7 +281,7 @@ describe('GET /resolve-signup-token', () => { password: ownerPassword, globalRole: globalOwnerRole, }); - authOwnerAgent = authAgent(owner); + authOwnerAgent = testServer.authAgentFor(owner); }); test('should validate invite token', async () => { @@ -292,6 +303,18 @@ describe('GET /resolve-signup-token', () => { }); }); + test('should return 403 if user quota reached', async () => { + jest.spyOn(Container.get(License), 'isWithinUsersLimit').mockReturnValueOnce(false); + const memberShell = await testDb.createUserShell(globalMemberRole); + + const response = await authOwnerAgent + .get('/resolve-signup-token') + .query({ inviterId: owner.id }) + .query({ inviteeId: memberShell.id }); + + expect(response.statusCode).toBe(403); + }); + test('should fail with invalid inputs', async () => { const { id: inviteeId } = await testDb.createUser({ globalRole: globalMemberRole }); @@ -327,7 +350,7 @@ describe('POST /logout', () => { test('should log user out', async () => { const owner = await testDb.createUser({ globalRole: globalOwnerRole }); - const response = await authAgent(owner).post('/logout'); + const response = await testServer.authAgentFor(owner).post('/logout'); expect(response.statusCode).toBe(200); expect(response.body).toEqual(LOGGED_OUT_RESPONSE_BODY); diff --git a/packages/cli/test/integration/auth.mw.test.ts b/packages/cli/test/integration/auth.mw.test.ts index 1c4c5b72a6..c3db14926a 100644 --- a/packages/cli/test/integration/auth.mw.test.ts +++ b/packages/cli/test/integration/auth.mw.test.ts @@ -1,47 +1,51 @@ import type { SuperAgentTest } from 'supertest'; -import { - ROUTES_REQUIRING_AUTHENTICATION, - ROUTES_REQUIRING_AUTHORIZATION, -} from './shared/constants'; import * as testDb from './shared/testDb'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; -let authlessAgent: SuperAgentTest; -let authMemberAgent: SuperAgentTest; +describe('Auth Middleware', () => { + const testServer = utils.setupTestServer({ endpointGroups: ['me', 'auth', 'owner', 'users'] }); -beforeAll(async () => { - const app = await utils.initTestServer({ endpointGroups: ['me', 'auth', 'owner', 'users'] }); - const globalMemberRole = await testDb.getGlobalMemberRole(); - const member = await testDb.createUser({ globalRole: globalMemberRole }); + /** Routes requiring a valid `n8n-auth` cookie for a user, either owner or member. */ + const ROUTES_REQUIRING_AUTHENTICATION: Readonly> = [ + ['PATCH', '/me'], + ['PATCH', '/me/password'], + ['POST', '/me/survey'], + ['POST', '/owner/setup'], + ['GET', '/non-existent'], + ]; - authlessAgent = utils.createAgent(app); - authMemberAgent = utils.createAuthAgent(app)(member); -}); + /** Routes requiring a valid `n8n-auth` cookie for an owner. */ + const ROUTES_REQUIRING_AUTHORIZATION: Readonly> = [ + ['POST', '/users'], + ['DELETE', '/users/123'], + ['POST', '/users/123/reinvite'], + ['POST', '/owner/setup'], + ]; -afterAll(async () => { - await testDb.terminate(); -}); + describe('Routes requiring Authentication', () => { + ROUTES_REQUIRING_AUTHENTICATION.concat(ROUTES_REQUIRING_AUTHORIZATION).forEach( + ([method, endpoint]) => { + test(`${method} ${endpoint} should return 401 Unauthorized if no cookie`, async () => { + const { statusCode } = await testServer.authlessAgent[method.toLowerCase()](endpoint); + expect(statusCode).toBe(401); + }); + }, + ); + }); -ROUTES_REQUIRING_AUTHENTICATION.concat(ROUTES_REQUIRING_AUTHORIZATION).forEach((route) => { - const [method, endpoint] = getMethodAndEndpoint(route); + describe('Routes requiring Authorization', () => { + let authMemberAgent: SuperAgentTest; + beforeAll(async () => { + const globalMemberRole = await testDb.getGlobalMemberRole(); + const member = await testDb.createUser({ globalRole: globalMemberRole }); + authMemberAgent = testServer.authAgentFor(member); + }); - test(`${route} should return 401 Unauthorized if no cookie`, async () => { - const { statusCode } = await authlessAgent[method](endpoint); - expect(statusCode).toBe(401); + ROUTES_REQUIRING_AUTHORIZATION.forEach(async ([method, endpoint]) => { + test(`${method} ${endpoint} should return 403 Forbidden for member`, async () => { + const { statusCode } = await authMemberAgent[method.toLowerCase()](endpoint); + expect(statusCode).toBe(403); + }); + }); }); }); - -ROUTES_REQUIRING_AUTHORIZATION.forEach(async (route) => { - const [method, endpoint] = getMethodAndEndpoint(route); - - test(`${route} should return 403 Forbidden for member`, async () => { - const { statusCode } = await authMemberAgent[method](endpoint); - expect(statusCode).toBe(403); - }); -}); - -function getMethodAndEndpoint(route: string) { - return route.split(' ').map((segment, index) => { - return index % 2 === 0 ? segment.toLowerCase() : segment; - }); -} diff --git a/packages/cli/test/integration/commands/import.cmd.test.ts b/packages/cli/test/integration/commands/import.cmd.test.ts index 746af32a2f..750ed86b44 100644 --- a/packages/cli/test/integration/commands/import.cmd.test.ts +++ b/packages/cli/test/integration/commands/import.cmd.test.ts @@ -1,5 +1,5 @@ import * as testDb from '../shared/testDb'; -import { mockInstance } from '../shared/utils'; +import { mockInstance } from '../shared/utils/'; import { InternalHooks } from '@/InternalHooks'; import { ImportWorkflowsCommand } from '@/commands/import/workflow'; import * as Config from '@oclif/config'; diff --git a/packages/cli/test/integration/commands/reset.cmd.test.ts b/packages/cli/test/integration/commands/reset.cmd.test.ts index 8de3d43721..c1f4b3f7d8 100644 --- a/packages/cli/test/integration/commands/reset.cmd.test.ts +++ b/packages/cli/test/integration/commands/reset.cmd.test.ts @@ -2,7 +2,7 @@ import * as Db from '@/Db'; import { Reset } from '@/commands/user-management/reset'; import type { Role } from '@db/entities/Role'; import * as testDb from '../shared/testDb'; -import { mockInstance } from '../shared/utils'; +import { mockInstance } from '../shared/utils/'; import { InternalHooks } from '@/InternalHooks'; import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; import { NodeTypes } from '@/NodeTypes'; diff --git a/packages/cli/test/integration/credentials.ee.test.ts b/packages/cli/test/integration/credentials.ee.test.ts index 344e01887d..94b2ca663a 100644 --- a/packages/cli/test/integration/credentials.ee.test.ts +++ b/packages/cli/test/integration/credentials.ee.test.ts @@ -11,22 +11,19 @@ import type { Role } from '@db/entities/Role'; import type { User } from '@db/entities/User'; import { randomCredentialPayload } from './shared/random'; import * as testDb from './shared/testDb'; -import type { AuthAgent, SaveCredentialFunction } from './shared/types'; -import * as utils from './shared/utils'; +import type { SaveCredentialFunction } from './shared/types'; +import * as utils from './shared/utils/'; + +const sharingSpy = jest.spyOn(UserManagementHelpers, 'isSharingEnabled').mockReturnValue(true); +const testServer = utils.setupTestServer({ endpointGroups: ['credentials'] }); let globalMemberRole: Role; let owner: User; let member: User; let authOwnerAgent: SuperAgentTest; -let authAgent: AuthAgent; let saveCredential: SaveCredentialFunction; -let sharingSpy: jest.SpyInstance; beforeAll(async () => { - const app = await utils.initTestServer({ endpointGroups: ['credentials'] }); - - await utils.initConfigFile(); - const globalOwnerRole = await testDb.getGlobalOwnerRole(); globalMemberRole = await testDb.getGlobalMemberRole(); const credentialOwnerRole = await testDb.getCredentialOwnerRole(); @@ -34,21 +31,15 @@ beforeAll(async () => { owner = await testDb.createUser({ globalRole: globalOwnerRole }); member = await testDb.createUser({ globalRole: globalMemberRole }); - authAgent = utils.createAuthAgent(app); - authOwnerAgent = authAgent(owner); + authOwnerAgent = testServer.authAgentFor(owner); saveCredential = testDb.affixRoleToSaveCredential(credentialOwnerRole); - sharingSpy = jest.spyOn(UserManagementHelpers, 'isSharingEnabled').mockReturnValue(true); }); beforeEach(async () => { await testDb.truncate(['SharedCredentials', 'Credentials']); }); -afterAll(async () => { - await testDb.terminate(); -}); - // ---------------------------------------- // dynamic router switching // ---------------------------------------- @@ -100,8 +91,12 @@ describe('GET /credentials', () => { expect(response.statusCode).toBe(200); expect(response.body.data).toHaveLength(2); // owner retrieved owner cred and member cred - - const [ownerCredential, memberCredential] = response.body.data as CredentialWithSharings[]; + const ownerCredential = response.body.data.find( + (e: CredentialWithSharings) => e.ownedBy?.id === owner.id, + ); + const memberCredential = response.body.data.find( + (e: CredentialWithSharings) => e.ownedBy?.id === member1.id, + ); validateMainCredentialData(ownerCredential); expect(ownerCredential.data).toBeUndefined(); @@ -157,7 +152,7 @@ describe('GET /credentials', () => { await testDb.shareCredentialWithUsers(savedMemberCredential, [member2]); - const response = await authAgent(member1).get('/credentials'); + const response = await testServer.authAgentFor(member1).get('/credentials'); expect(response.statusCode).toBe(200); expect(response.body.data).toHaveLength(1); // member retrieved only member cred @@ -264,7 +259,7 @@ describe('GET /credentials/:id', () => { const [member1, member2, member3] = await testDb.createManyUsers(3, { globalRole: globalMemberRole, }); - const authMemberAgent = authAgent(member1); + const authMemberAgent = testServer.authAgentFor(member1); const savedCredential = await saveCredential(randomCredentialPayload(), { user: member1 }); await testDb.shareCredentialWithUsers(savedCredential, [member2, member3]); @@ -301,7 +296,9 @@ describe('GET /credentials/:id', () => { test('should not retrieve non-owned cred for member', async () => { const savedCredential = await saveCredential(randomCredentialPayload(), { user: owner }); - const response = await authAgent(member).get(`/credentials/${savedCredential.id}`); + const response = await testServer + .authAgentFor(member) + .get(`/credentials/${savedCredential.id}`); expect(response.statusCode).toBe(403); expect(response.body.data).toBeUndefined(); // owner's cred not returned diff --git a/packages/cli/test/integration/credentials.test.ts b/packages/cli/test/integration/credentials.test.ts index 597f4528e0..8b7da715c9 100644 --- a/packages/cli/test/integration/credentials.test.ts +++ b/packages/cli/test/integration/credentials.test.ts @@ -1,4 +1,3 @@ -import type { Application } from 'express'; import type { SuperAgentTest } from 'supertest'; import { UserSettings } from 'n8n-core'; @@ -11,14 +10,13 @@ import type { Role } from '@db/entities/Role'; import type { User } from '@db/entities/User'; import { randomCredentialPayload, randomName, randomString } from './shared/random'; import * as testDb from './shared/testDb'; -import type { AuthAgent, SaveCredentialFunction } from './shared/types'; -import * as utils from './shared/utils'; +import type { SaveCredentialFunction } from './shared/types'; +import * as utils from './shared/utils/'; // mock that credentialsSharing is not enabled -const mockIsCredentialsSharingEnabled = jest.spyOn(UserManagementHelpers, 'isSharingEnabled'); -mockIsCredentialsSharingEnabled.mockReturnValue(false); +jest.spyOn(UserManagementHelpers, 'isSharingEnabled').mockReturnValue(false); +const testServer = utils.setupTestServer({ endpointGroups: ['credentials'] }); -let app: Application; let globalOwnerRole: Role; let globalMemberRole: Role; let owner: User; @@ -26,13 +24,8 @@ let member: User; let authOwnerAgent: SuperAgentTest; let authMemberAgent: SuperAgentTest; let saveCredential: SaveCredentialFunction; -let authAgent: AuthAgent; beforeAll(async () => { - app = await utils.initTestServer({ endpointGroups: ['credentials'] }); - - await utils.initConfigFile(); - globalOwnerRole = await testDb.getGlobalOwnerRole(); globalMemberRole = await testDb.getGlobalMemberRole(); const credentialOwnerRole = await testDb.getCredentialOwnerRole(); @@ -42,19 +35,14 @@ beforeAll(async () => { saveCredential = testDb.affixRoleToSaveCredential(credentialOwnerRole); - authAgent = utils.createAuthAgent(app); - authOwnerAgent = authAgent(owner); - authMemberAgent = authAgent(member); + authOwnerAgent = testServer.authAgentFor(owner); + authMemberAgent = testServer.authAgentFor(member); }); beforeEach(async () => { await testDb.truncate(['SharedCredentials', 'Credentials']); }); -afterAll(async () => { - await testDb.terminate(); -}); - // ---------------------------------------- // GET /credentials - fetch all credentials // ---------------------------------------- @@ -88,7 +76,7 @@ describe('GET /credentials', () => { saveCredential(randomCredentialPayload(), { user: member2 }), ]); - const response = await authAgent(member1).get('/credentials'); + const response = await testServer.authAgentFor(member1).get('/credentials'); expect(response.statusCode).toBe(200); expect(response.body.data.length).toBe(1); // member retrieved only own cred diff --git a/packages/cli/test/integration/environments/VersionControl.test.ts b/packages/cli/test/integration/environments/VersionControl.test.ts index 19c510ae10..1b85c92fce 100644 --- a/packages/cli/test/integration/environments/VersionControl.test.ts +++ b/packages/cli/test/integration/environments/VersionControl.test.ts @@ -1,23 +1,18 @@ -import { Container } from 'typedi'; import type { SuperAgentTest } from 'supertest'; -import type { User } from '@db/entities/User'; -import { License } from '@/License'; -import * as testDb from '../shared/testDb'; -import * as utils from '../shared/utils'; import { SOURCE_CONTROL_API_ROOT } from '@/environments/sourceControl/constants'; +import * as testDb from '../shared/testDb'; +import * as utils from '../shared/utils/'; -let owner: User; let authOwnerAgent: SuperAgentTest; -beforeAll(async () => { - Container.get(License).isSourceControlLicensed = () => true; - const app = await utils.initTestServer({ endpointGroups: ['sourceControl'] }); - owner = await testDb.createOwner(); - authOwnerAgent = utils.createAuthAgent(app)(owner); +const testServer = utils.setupTestServer({ + endpointGroups: ['sourceControl'], + enabledFeatures: ['feat:sourceControl'], }); -afterAll(async () => { - await testDb.terminate(); +beforeAll(async () => { + const owner = await testDb.createOwner(); + authOwnerAgent = testServer.authAgentFor(owner); }); describe('GET /sourceControl/preferences', () => { diff --git a/packages/cli/test/integration/eventbus.test.ts b/packages/cli/test/integration/eventbus.test.ts index d6c1c84c8f..8f76fa146c 100644 --- a/packages/cli/test/integration/eventbus.test.ts +++ b/packages/cli/test/integration/eventbus.test.ts @@ -1,11 +1,9 @@ -import type express from 'express'; import config from '@/config'; import axios from 'axios'; import syslog from 'syslog-client'; import { v4 as uuid } from 'uuid'; -import { Container } from 'typedi'; import type { SuperAgentTest } from 'supertest'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; import * as testDb from './shared/testDb'; import type { Role } from '@db/entities/Role'; import type { User } from '@db/entities/User'; @@ -26,7 +24,6 @@ import type { MessageEventBusDestinationWebhook } from '@/eventbus/MessageEventB import type { MessageEventBusDestinationSentry } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationSentry.ee'; import { EventMessageAudit } from '@/eventbus/EventMessageClasses/EventMessageAudit'; import type { EventNamesTypes } from '@/eventbus/EventMessageClasses'; -import { License } from '@/License'; jest.unmock('@/eventbus/MessageEventBus/MessageEventBus'); jest.mock('axios'); @@ -34,10 +31,8 @@ const mockedAxios = axios as jest.Mocked; jest.mock('syslog-client'); const mockedSyslog = syslog as jest.Mocked; -let app: express.Application; let globalOwnerRole: Role; let owner: User; -let unAuthOwnerAgent: SuperAgentTest; let authOwnerAgent: SuperAgentTest; const testSyslogDestination: MessageEventBusDestinationSyslogOptions = { @@ -80,41 +75,27 @@ async function confirmIdSent(id: string) { expect(sent.find((msg) => msg.id === id)).toBeTruthy(); } -beforeAll(async () => { - Container.get(License).isLogStreamingEnabled = () => true; - app = await utils.initTestServer({ endpointGroups: ['eventBus'] }); +const testServer = utils.setupTestServer({ + endpointGroups: ['eventBus'], + enabledFeatures: ['feat:logStreaming'], +}); +beforeAll(async () => { globalOwnerRole = await testDb.getGlobalOwnerRole(); owner = await testDb.createUser({ globalRole: globalOwnerRole }); - - unAuthOwnerAgent = utils.createAgent(app, { - apiPath: 'internal', - auth: false, - user: owner, - version: 1, - }); - - authOwnerAgent = utils.createAgent(app, { - apiPath: 'internal', - auth: true, - user: owner, - version: 1, - }); + authOwnerAgent = testServer.authAgentFor(owner); mockedSyslog.createClient.mockImplementation(() => new syslog.Client()); - await utils.initConfigFile(); + await utils.initEncryptionKey(); config.set('eventBus.logWriter.logBaseName', 'n8n-test-logwriter'); config.set('eventBus.logWriter.keepLogCount', 1); - config.set('userManagement.disabled', false); - config.set('userManagement.isInstanceOwnerSetUp', true); await eventBus.initialize(); }); afterAll(async () => { jest.mock('@/eventbus/MessageEventBus/MessageEventBus'); - await testDb.terminate(); await eventBus.close(); }); @@ -139,41 +120,47 @@ test('should have logwriter log messages', async () => { }); }); -test('GET /eventbus/destination should fail due to missing authentication', async () => { - const response = await unAuthOwnerAgent.get('/eventbus/destination'); - expect(response.statusCode).toBe(401); +describe('GET /eventbus/destination', () => { + test('should fail due to missing authentication', async () => { + const response = await testServer.authlessAgent.get('/eventbus/destination'); + expect(response.statusCode).toBe(401); + }); + + test('all returned destinations should exist in eventbus', async () => { + const response = await authOwnerAgent.get('/eventbus/destination'); + expect(response.statusCode).toBe(200); + + const data = response.body.data; + expect(data).toBeTruthy(); + expect(Array.isArray(data)).toBeTruthy(); + + for (let index = 0; index < data.length; index++) { + const destination = data[index]; + const foundDestinations = await eventBus.findDestination(destination.id); + expect(Array.isArray(foundDestinations)).toBeTruthy(); + expect(foundDestinations.length).toBe(1); + expect(foundDestinations[0].label).toBe(destination.label); + } + }); }); -test('POST /eventbus/destination create syslog destination', async () => { - const response = await authOwnerAgent.post('/eventbus/destination').send(testSyslogDestination); - expect(response.statusCode).toBe(200); -}); +describe('POST /eventbus/destination', () => { + test('create syslog destination', async () => { + const response = await authOwnerAgent.post('/eventbus/destination').send(testSyslogDestination); + expect(response.statusCode).toBe(200); + }); -test('POST /eventbus/destination create sentry destination', async () => { - const response = await authOwnerAgent.post('/eventbus/destination').send(testSentryDestination); - expect(response.statusCode).toBe(200); -}); + test('create sentry destination', async () => { + const response = await authOwnerAgent.post('/eventbus/destination').send(testSentryDestination); + expect(response.statusCode).toBe(200); + }); -test('POST /eventbus/destination create webhook destination', async () => { - const response = await authOwnerAgent.post('/eventbus/destination').send(testWebhookDestination); - expect(response.statusCode).toBe(200); -}); - -test('GET /eventbus/destination all returned destinations should exist in eventbus', async () => { - const response = await authOwnerAgent.get('/eventbus/destination'); - expect(response.statusCode).toBe(200); - - const data = response.body.data; - expect(data).toBeTruthy(); - expect(Array.isArray(data)).toBeTruthy(); - - for (let index = 0; index < data.length; index++) { - const destination = data[index]; - const foundDestinations = await eventBus.findDestination(destination.id); - expect(Array.isArray(foundDestinations)).toBeTruthy(); - expect(foundDestinations.length).toBe(1); - expect(foundDestinations[0].label).toBe(destination.label); - } + test('create webhook destination', async () => { + const response = await authOwnerAgent + .post('/eventbus/destination') + .send(testWebhookDestination); + expect(response.statusCode).toBe(200); + }); }); // this test (presumably the mocking) is causing the test suite to randomly fail @@ -389,7 +376,7 @@ test('should send message to sentry ', async () => { }); }); -test('DEL /eventbus/destination delete all destinations by id', async () => { +test('DELETE /eventbus/destination delete all destinations by id', async () => { const existingDestinationIds = [...Object.keys(eventBus.destinations)]; await Promise.all( diff --git a/packages/cli/test/integration/ldap/ldap.api.test.ts b/packages/cli/test/integration/ldap/ldap.api.test.ts index 0e9688c639..65d1c080d5 100644 --- a/packages/cli/test/integration/ldap/ldap.api.test.ts +++ b/packages/cli/test/integration/ldap/ldap.api.test.ts @@ -1,7 +1,6 @@ -import type express from 'express'; +import type { SuperAgentTest } from 'supertest'; import type { Entry as LdapUser } from 'ldapts'; import { Not } from 'typeorm'; -import { Container } from 'typedi'; import { jsonParse } from 'n8n-workflow'; import config from '@/config'; import * as Db from '@/Db'; @@ -14,19 +13,17 @@ import { encryptPassword, saveLdapSynchronization } from '@/Ldap/helpers'; import type { LdapConfig } from '@/Ldap/types'; import { sanitizeUser } from '@/UserManagement/UserManagementHelper'; import { getCurrentAuthenticationMethod, setCurrentAuthenticationMethod } from '@/sso/ssoHelpers'; -import { License } from '@/License'; + import { randomEmail, randomName, uniqueId } from './../shared/random'; import * as testDb from './../shared/testDb'; -import type { AuthAgent } from '../shared/types'; -import * as utils from '../shared/utils'; +import * as utils from '../shared/utils/'; jest.mock('@/telemetry'); jest.mock('@/UserManagement/email/NodeMailer'); -let app: express.Application; let globalMemberRole: Role; let owner: User; -let authAgent: AuthAgent; +let authOwnerAgent: SuperAgentTest; const defaultLdapConfig = { ...LDAP_DEFAULT_CONFIGURATION, @@ -42,23 +39,24 @@ const defaultLdapConfig = { bindingAdminPassword: 'adminPassword', }; -beforeAll(async () => { - Container.get(License).isLdapEnabled = () => true; - app = await utils.initTestServer({ endpointGroups: ['auth', 'ldap'] }); +const testServer = utils.setupTestServer({ + endpointGroups: ['auth', 'ldap'], + enabledFeatures: ['feat:ldap'], +}); +beforeAll(async () => { const [globalOwnerRole, fetchedGlobalMemberRole] = await testDb.getAllRoles(); globalMemberRole = fetchedGlobalMemberRole; owner = await testDb.createUser({ globalRole: globalOwnerRole }); - - authAgent = utils.createAuthAgent(app); + authOwnerAgent = testServer.authAgentFor(owner); defaultLdapConfig.bindingAdminPassword = await encryptPassword( defaultLdapConfig.bindingAdminPassword, ); - await utils.initConfigFile(); + await utils.initEncryptionKey(); await setCurrentAuthenticationMethod('email'); }); @@ -77,15 +75,10 @@ beforeEach(async () => { jest.mock('@/telemetry'); - config.set('userManagement.disabled', false); config.set('userManagement.isInstanceOwnerSetUp', true); config.set('userManagement.emails.mode', ''); }); -afterAll(async () => { - await testDb.terminate(); -}); - const createLdapConfig = async (attributes: Partial = {}): Promise => { const { value: ldapConfig } = await Db.collections.Settings.save({ key: LDAP_FEATURE_NAME, @@ -100,21 +93,12 @@ const createLdapConfig = async (attributes: Partial = {}): Promise { const member = await testDb.createUser({ globalRole: globalMemberRole }); - - let response = await authAgent(member).get('/ldap/config'); - expect(response.statusCode).toBe(403); - - response = await authAgent(member).put('/ldap/config'); - expect(response.statusCode).toBe(403); - - response = await authAgent(member).post('/ldap/test-connection'); - expect(response.statusCode).toBe(403); - - response = await authAgent(member).post('/ldap/sync'); - expect(response.statusCode).toBe(403); - - response = await authAgent(member).get('/ldap/sync'); - expect(response.statusCode).toBe(403); + const authAgent = testServer.authAgentFor(member); + await authAgent.get('/ldap/config').expect(403); + await authAgent.put('/ldap/config').expect(403); + await authAgent.post('/ldap/test-connection').expect(403); + await authAgent.post('/ldap/sync').expect(403); + await authAgent.get('/ldap/sync').expect(403); }); describe('PUT /ldap/config', () => { @@ -143,7 +127,7 @@ describe('PUT /ldap/config', () => { ]; for (const invalidPayload of invalidPayloads) { - const response = await authAgent(owner).put('/ldap/config').send(invalidPayload); + const response = await authOwnerAgent.put('/ldap/config').send(invalidPayload); expect(response.statusCode).toBe(400); expect(response.body).toHaveProperty('message'); } @@ -156,7 +140,7 @@ describe('PUT /ldap/config', () => { loginLabel: '', }; - const response = await authAgent(owner).put('/ldap/config').send(validPayload); + const response = await authOwnerAgent.put('/ldap/config').send(validPayload); expect(response.statusCode).toBe(200); expect(response.body.data.loginEnabled).toBe(true); @@ -172,9 +156,7 @@ describe('PUT /ldap/config', () => { const configuration = ldapConfig; // disable the login, so the strategy is applied - await authAgent(owner) - .put('/ldap/config') - .send({ ...configuration, loginEnabled: false }); + await authOwnerAgent.put('/ldap/config').send({ ...configuration, loginEnabled: false }); const emailUser = await Db.collections.User.findOneByOrFail({ id: member.id }); const localLdapIdentities = await testDb.getLdapIdentities(); @@ -194,11 +176,11 @@ test('GET /ldap/config route should retrieve current configuration', async () => loginLabel: '', }; - let response = await authAgent(owner).put('/ldap/config').send(validPayload); + let response = await authOwnerAgent.put('/ldap/config').send(validPayload); expect(response.statusCode).toBe(200); expect(getCurrentAuthenticationMethod()).toBe('ldap'); - response = await authAgent(owner).get('/ldap/config'); + response = await authOwnerAgent.get('/ldap/config'); expect(response.body.data).toMatchObject(validPayload); }); @@ -207,8 +189,7 @@ describe('POST /ldap/test-connection', () => { test('route should success', async () => { jest.spyOn(LdapService.prototype, 'testConnection').mockResolvedValue(); - const response = await authAgent(owner).post('/ldap/test-connection'); - expect(response.statusCode).toBe(200); + await authOwnerAgent.post('/ldap/test-connection').expect(200); }); test('route should fail', async () => { @@ -216,7 +197,7 @@ describe('POST /ldap/test-connection', () => { jest.spyOn(LdapService.prototype, 'testConnection').mockRejectedValue(new Error(errorMessage)); - const response = await authAgent(owner).post('/ldap/test-connection'); + const response = await authOwnerAgent.post('/ldap/test-connection'); expect(response.statusCode).toBe(400); expect(response.body).toHaveProperty('message'); expect(response.body.message).toStrictEqual(errorMessage); @@ -238,9 +219,7 @@ describe('POST /ldap/sync', () => { const runTest = async (ldapUsers: LdapUser[]) => { jest.spyOn(LdapService.prototype, 'searchWithAdminBinding').mockResolvedValue(ldapUsers); - const response = await authAgent(owner).post('/ldap/sync').send({ type: 'dry' }); - - expect(response.statusCode).toBe(200); + await authOwnerAgent.post('/ldap/sync').send({ type: 'dry' }).expect(200); const synchronization = await Db.collections.AuthProviderSyncHistory.findOneByOrFail({}); @@ -333,9 +312,7 @@ describe('POST /ldap/sync', () => { const runTest = async (ldapUsers: LdapUser[]) => { jest.spyOn(LdapService.prototype, 'searchWithAdminBinding').mockResolvedValue(ldapUsers); - const response = await authAgent(owner).post('/ldap/sync').send({ type: 'live' }); - - expect(response.statusCode).toBe(200); + await authOwnerAgent.post('/ldap/sync').send({ type: 'live' }).expect(200); const synchronization = await Db.collections.AuthProviderSyncHistory.findOneByOrFail({}); @@ -461,9 +438,9 @@ describe('POST /ldap/sync', () => { jest.spyOn(LdapService.prototype, 'searchWithAdminBinding').mockResolvedValue([]); - await authAgent(owner).post('/ldap/sync').send({ type: 'live' }); + await authOwnerAgent.post('/ldap/sync').send({ type: 'live' }); - const response = await authAgent(member).get('/login'); + const response = await testServer.authAgentFor(member).get('/login'); expect(response.body.code).toBe(401); }); }); @@ -484,10 +461,10 @@ test('GET /ldap/sync should return paginated synchronizations', async () => { }); } - let response = await authAgent(owner).get('/ldap/sync?perPage=1&page=0'); + let response = await authOwnerAgent.get('/ldap/sync?perPage=1&page=0'); expect(response.body.data.length).toBe(1); - response = await authAgent(owner).get('/ldap/sync?perPage=1&page=1'); + response = await authOwnerAgent.get('/ldap/sync?perPage=1&page=1'); expect(response.body.data.length).toBe(1); }); @@ -496,13 +473,11 @@ describe('POST /login', () => { const ldapConfig = await createLdapConfig(); LdapManager.updateConfig(ldapConfig); - const authlessAgent = utils.createAgent(app); - jest.spyOn(LdapService.prototype, 'searchWithAdminBinding').mockResolvedValue([ldapUser]); jest.spyOn(LdapService.prototype, 'validUser').mockResolvedValue(); - const response = await authlessAgent + const response = await testServer.authlessAgent .post('/login') .send({ email: ldapUser.mail, password: 'password' }); @@ -582,7 +557,7 @@ describe('Instance owner should able to delete LDAP users', () => { const member = await testDb.createLdapUser({ globalRole: globalMemberRole }, uniqueId()); - await authAgent(owner).post(`/users/${member.id}`); + await authOwnerAgent.post(`/users/${member.id}`); }); test('transfer workflows and credentials', async () => { @@ -592,7 +567,7 @@ describe('Instance owner should able to delete LDAP users', () => { const member = await testDb.createLdapUser({ globalRole: globalMemberRole }, uniqueId()); // delete the LDAP member and transfer its workflows/credentials to instance owner - await authAgent(owner).post(`/users/${member.id}?transferId=${owner.id}`); + await authOwnerAgent.post(`/users/${member.id}?transferId=${owner.id}`); }); }); diff --git a/packages/cli/test/integration/license.api.test.ts b/packages/cli/test/integration/license.api.test.ts index 3b45f507f4..66b6ed32b7 100644 --- a/packages/cli/test/integration/license.api.test.ts +++ b/packages/cli/test/integration/license.api.test.ts @@ -4,7 +4,7 @@ import type { User } from '@db/entities/User'; import type { ILicensePostResponse, ILicenseReadResponse } from '@/Interfaces'; import { License } from '@/License'; import * as testDb from './shared/testDb'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; const MOCK_SERVER_URL = 'https://server.com/v1'; const MOCK_RENEW_OFFSET = 259200; @@ -14,17 +14,16 @@ let member: User; let authOwnerAgent: SuperAgentTest; let authMemberAgent: SuperAgentTest; -beforeAll(async () => { - const app = await utils.initTestServer({ endpointGroups: ['license'] }); +const testServer = utils.setupTestServer({ endpointGroups: ['license'] }); +beforeAll(async () => { const globalOwnerRole = await testDb.getGlobalOwnerRole(); const globalMemberRole = await testDb.getGlobalMemberRole(); owner = await testDb.createUserShell(globalOwnerRole); member = await testDb.createUserShell(globalMemberRole); - const authAgent = utils.createAuthAgent(app); - authOwnerAgent = authAgent(owner); - authMemberAgent = authAgent(member); + authOwnerAgent = testServer.authAgentFor(owner); + authMemberAgent = testServer.authAgentFor(member); config.set('license.serverUrl', MOCK_SERVER_URL); config.set('license.autoRenewEnabled', true); @@ -35,10 +34,6 @@ afterEach(async () => { await testDb.truncate(['Settings']); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('GET /license', () => { test('should return license information to the instance owner', async () => { // No license defined so we just expect the result to be the defaults diff --git a/packages/cli/test/integration/me.api.test.ts b/packages/cli/test/integration/me.api.test.ts index 7c5004a36e..b330a9d9cb 100644 --- a/packages/cli/test/integration/me.api.test.ts +++ b/packages/cli/test/integration/me.api.test.ts @@ -1,4 +1,3 @@ -import type { Application } from 'express'; import type { SuperAgentTest } from 'supertest'; import { IsNull } from 'typeorm'; import validator from 'validator'; @@ -14,31 +13,22 @@ import { randomValidPassword, } from './shared/random'; import * as testDb from './shared/testDb'; -import type { AuthAgent } from './shared/types'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; + +const testServer = utils.setupTestServer({ endpointGroups: ['me'] }); -let app: Application; let globalOwnerRole: Role; let globalMemberRole: Role; -let authAgent: AuthAgent; beforeAll(async () => { - app = await utils.initTestServer({ endpointGroups: ['me'] }); - globalOwnerRole = await testDb.getGlobalOwnerRole(); globalMemberRole = await testDb.getGlobalMemberRole(); - - authAgent = utils.createAuthAgent(app); }); beforeEach(async () => { await testDb.truncate(['User']); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('Owner shell', () => { let ownerShell: User; let authOwnerShellAgent: SuperAgentTest; @@ -46,7 +36,7 @@ describe('Owner shell', () => { beforeEach(async () => { ownerShell = await testDb.createUserShell(globalOwnerRole); await testDb.addApiKey(ownerShell); - authOwnerShellAgent = authAgent(ownerShell); + authOwnerShellAgent = testServer.authAgentFor(ownerShell); }); test('PATCH /me should succeed with valid inputs', async () => { @@ -108,22 +98,20 @@ describe('Owner shell', () => { const validPayloads = [validPasswordPayload, ...INVALID_PASSWORD_PAYLOADS]; - await Promise.all( - validPayloads.map(async (payload) => { - const response = await authOwnerShellAgent.patch('/me/password').send(payload); - expect([400, 500].includes(response.statusCode)).toBe(true); + for (const payload of validPayloads) { + const response = await authOwnerShellAgent.patch('/me/password').send(payload); + expect([400, 500].includes(response.statusCode)).toBe(true); - const storedMember = await Db.collections.User.findOneByOrFail({}); + const storedMember = await Db.collections.User.findOneByOrFail({}); - if (payload.newPassword) { - expect(storedMember.password).not.toBe(payload.newPassword); - } + if (payload.newPassword) { + expect(storedMember.password).not.toBe(payload.newPassword); + } - if (payload.currentPassword) { - expect(storedMember.password).not.toBe(payload.currentPassword); - } - }), - ); + if (payload.currentPassword) { + expect(storedMember.password).not.toBe(payload.currentPassword); + } + } const storedOwnerShell = await Db.collections.User.findOneByOrFail({}); expect(storedOwnerShell.password).toBeNull(); @@ -191,7 +179,7 @@ describe('Member', () => { globalRole: globalMemberRole, apiKey: randomApiKey(), }); - authMemberAgent = authAgent(member); + authMemberAgent = testServer.authAgentFor(member); await utils.setInstanceOwnerSetUp(true); }); @@ -295,7 +283,7 @@ describe('Member', () => { }); test('POST /me/api-key should create an api key', async () => { - const response = await authAgent(member).post('/me/api-key'); + const response = await testServer.authAgentFor(member).post('/me/api-key'); expect(response.statusCode).toBe(200); expect(response.body.data.apiKey).toBeDefined(); @@ -307,14 +295,14 @@ describe('Member', () => { }); test('GET /me/api-key should fetch the api key', async () => { - const response = await authAgent(member).get('/me/api-key'); + const response = await testServer.authAgentFor(member).get('/me/api-key'); expect(response.statusCode).toBe(200); expect(response.body.data.apiKey).toEqual(member.apiKey); }); test('DELETE /me/api-key should fetch the api key', async () => { - const response = await authAgent(member).delete('/me/api-key'); + const response = await testServer.authAgentFor(member).delete('/me/api-key'); expect(response.statusCode).toBe(200); @@ -331,7 +319,7 @@ describe('Owner', () => { test('PATCH /me should succeed with valid inputs', async () => { const owner = await testDb.createUser({ globalRole: globalOwnerRole }); - const authOwnerAgent = authAgent(owner); + const authOwnerAgent = testServer.authAgentFor(owner); for (const validPayload of VALID_PATCH_ME_PAYLOADS) { const response = await authOwnerAgent.patch('/me').send(validPayload); diff --git a/packages/cli/test/integration/nodes.api.test.ts b/packages/cli/test/integration/nodes.api.test.ts index c22b43494c..c30035520f 100644 --- a/packages/cli/test/integration/nodes.api.test.ts +++ b/packages/cli/test/integration/nodes.api.test.ts @@ -16,7 +16,7 @@ import type { InstalledNodes } from '@db/entities/InstalledNodes'; import { NodeTypes } from '@/NodeTypes'; import { Push } from '@/push'; import { COMMUNITY_PACKAGE_VERSION } from './shared/constants'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; import * as testDb from './shared/testDb'; const mockLoadNodesAndCredentials = utils.mockInstance(LoadNodesAndCredentials); @@ -44,17 +44,15 @@ jest.mock('@/CommunityNodes/packageModel', () => { const mockedEmptyPackage = mocked(utils.emptyPackage); +const testServer = utils.setupTestServer({ endpointGroups: ['nodes'] }); + let ownerShell: User; let authOwnerShellAgent: SuperAgentTest; beforeAll(async () => { - const app = await utils.initTestServer({ endpointGroups: ['nodes'] }); - const globalOwnerRole = await testDb.getGlobalOwnerRole(); ownerShell = await testDb.createUserShell(globalOwnerRole); - authOwnerShellAgent = utils.createAuthAgent(app)(ownerShell); - - await utils.initConfigFile(); + authOwnerShellAgent = testServer.authAgentFor(ownerShell); }); beforeEach(async () => { @@ -64,10 +62,6 @@ beforeEach(async () => { mocked(findInstalledPackage).mockReset(); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('GET /nodes', () => { test('should respond 200 if no nodes are installed', async () => { const { diff --git a/packages/cli/test/integration/owner.api.test.ts b/packages/cli/test/integration/owner.api.test.ts index 8b59df68d7..e8227c9d5f 100644 --- a/packages/cli/test/integration/owner.api.test.ts +++ b/packages/cli/test/integration/owner.api.test.ts @@ -1,4 +1,3 @@ -import type { Application } from 'express'; import validator from 'validator'; import type { SuperAgentTest } from 'supertest'; @@ -13,32 +12,28 @@ import { randomValidPassword, } from './shared/random'; import * as testDb from './shared/testDb'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; + +const testServer = utils.setupTestServer({ endpointGroups: ['owner'] }); -let app: Application; let globalOwnerRole: Role; let ownerShell: User; let authOwnerShellAgent: SuperAgentTest; beforeAll(async () => { - app = await utils.initTestServer({ endpointGroups: ['owner'] }); globalOwnerRole = await testDb.getGlobalOwnerRole(); }); beforeEach(async () => { - config.set('userManagement.isInstanceOwnerSetUp', false); ownerShell = await testDb.createUserShell(globalOwnerRole); - authOwnerShellAgent = utils.createAuthAgent(app)(ownerShell); + authOwnerShellAgent = testServer.authAgentFor(ownerShell); + config.set('userManagement.isInstanceOwnerSetUp', false); }); afterEach(async () => { await testDb.truncate(['User']); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('POST /owner/setup', () => { test('should create owner and enable isInstanceOwnerSetUp', async () => { const newOwnerData = { @@ -159,29 +154,9 @@ describe('POST /owner/setup', () => { ]; test('should fail with invalid inputs', async () => { - const authOwnerAgent = authOwnerShellAgent; - - await Promise.all( - INVALID_POST_OWNER_PAYLOADS.map(async (invalidPayload) => { - const response = await authOwnerAgent.post('/owner/setup').send(invalidPayload); - expect(response.statusCode).toBe(400); - }), - ); - }); -}); - -describe('POST /owner/skip-setup', () => { - test('should persist skipping setup to the DB', async () => { - const response = await authOwnerShellAgent.post('/owner/skip-setup').send(); - - expect(response.statusCode).toBe(200); - - const skipConfig = config.getEnv('userManagement.skipInstanceOwnerSetup'); - expect(skipConfig).toBe(true); - - const { value } = await Db.collections.Settings.findOneByOrFail({ - key: 'userManagement.skipInstanceOwnerSetup', - }); - expect(value).toBe('true'); + for (const invalidPayload of INVALID_POST_OWNER_PAYLOADS) { + const response = await authOwnerShellAgent.post('/owner/setup').send(invalidPayload); + expect(response.statusCode).toBe(400); + } }); }); diff --git a/packages/cli/test/integration/passwordReset.api.test.ts b/packages/cli/test/integration/passwordReset.api.test.ts index 85e3cb2d93..0e9e345cc6 100644 --- a/packages/cli/test/integration/passwordReset.api.test.ts +++ b/packages/cli/test/integration/passwordReset.api.test.ts @@ -1,4 +1,3 @@ -import type { SuperAgentTest } from 'supertest'; import { v4 as uuid } from 'uuid'; import { compare } from 'bcryptjs'; @@ -6,7 +5,7 @@ import * as Db from '@/Db'; import config from '@/config'; import type { Role } from '@db/entities/Role'; import type { User } from '@db/entities/User'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; import { randomEmail, randomInvalidPassword, @@ -22,30 +21,21 @@ jest.mock('@/UserManagement/email/NodeMailer'); let globalOwnerRole: Role; let globalMemberRole: Role; let owner: User; -let authlessAgent: SuperAgentTest; + const externalHooks = utils.mockInstance(ExternalHooks); +const testServer = utils.setupTestServer({ endpointGroups: ['passwordReset'] }); beforeAll(async () => { - const app = await utils.initTestServer({ endpointGroups: ['passwordReset'] }); - globalOwnerRole = await testDb.getGlobalOwnerRole(); globalMemberRole = await testDb.getGlobalMemberRole(); - - authlessAgent = utils.createAgent(app); }); beforeEach(async () => { await testDb.truncate(['User']); owner = await testDb.createUser({ globalRole: globalOwnerRole }); - - config.set('userManagement.isInstanceOwnerSetUp', true); externalHooks.run.mockReset(); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('POST /forgot-password', () => { test('should send password reset email', async () => { const member = await testDb.createUser({ @@ -57,7 +47,7 @@ describe('POST /forgot-password', () => { await Promise.all( [{ email: owner.email }, { email: member.email.toUpperCase() }].map(async (payload) => { - const response = await authlessAgent.post('/forgot-password').send(payload); + const response = await testServer.authlessAgent.post('/forgot-password').send(payload); expect(response.statusCode).toBe(200); expect(response.body).toEqual({}); @@ -72,7 +62,10 @@ describe('POST /forgot-password', () => { test('should fail if emailing is not set up', async () => { config.set('userManagement.emails.mode', ''); - await authlessAgent.post('/forgot-password').send({ email: owner.email }).expect(500); + await testServer.authlessAgent + .post('/forgot-password') + .send({ email: owner.email }) + .expect(500); const storedOwner = await Db.collections.User.findOneByOrFail({ email: owner.email }); expect(storedOwner.resetPasswordToken).toBeNull(); @@ -86,7 +79,10 @@ describe('POST /forgot-password', () => { globalRole: globalMemberRole, }); - await authlessAgent.post('/forgot-password').send({ email: member.email }).expect(403); + await testServer.authlessAgent + .post('/forgot-password') + .send({ email: member.email }) + .expect(403); const storedOwner = await Db.collections.User.findOneByOrFail({ email: member.email }); expect(storedOwner.resetPasswordToken).toBeNull(); @@ -97,7 +93,9 @@ describe('POST /forgot-password', () => { await setCurrentAuthenticationMethod('saml'); config.set('userManagement.emails.mode', 'smtp'); - const response = await authlessAgent.post('/forgot-password').send({ email: owner.email }); + const response = await testServer.authlessAgent + .post('/forgot-password') + .send({ email: owner.email }); expect(response.statusCode).toBe(200); expect(response.body).toEqual({}); @@ -118,21 +116,21 @@ describe('POST /forgot-password', () => { [{ email: randomName() }], ]; - await Promise.all( - invalidPayloads.map(async (invalidPayload) => { - const response = await authlessAgent.post('/forgot-password').send(invalidPayload); - expect(response.statusCode).toBe(400); + for (const invalidPayload of invalidPayloads) { + const response = await testServer.authlessAgent.post('/forgot-password').send(invalidPayload); + expect(response.statusCode).toBe(400); - const storedOwner = await Db.collections.User.findOneByOrFail({ email: owner.email }); - expect(storedOwner.resetPasswordToken).toBeNull(); - }), - ); + const storedOwner = await Db.collections.User.findOneByOrFail({ email: owner.email }); + expect(storedOwner.resetPasswordToken).toBeNull(); + } }); test('should fail if user is not found', async () => { config.set('userManagement.emails.mode', 'smtp'); - const response = await authlessAgent.post('/forgot-password').send({ email: randomEmail() }); + const response = await testServer.authlessAgent + .post('/forgot-password') + .send({ email: randomEmail() }); expect(response.statusCode).toBe(200); // expect 200 to remain vague }); @@ -152,7 +150,7 @@ describe('GET /resolve-password-token', () => { resetPasswordTokenExpiration, }); - const response = await authlessAgent + const response = await testServer.authlessAgent .get('/resolve-password-token') .query({ userId: owner.id, token: resetPasswordToken }); @@ -160,9 +158,13 @@ describe('GET /resolve-password-token', () => { }); test('should fail with invalid inputs', async () => { - const first = await authlessAgent.get('/resolve-password-token').query({ token: uuid() }); + const first = await testServer.authlessAgent + .get('/resolve-password-token') + .query({ token: uuid() }); - const second = await authlessAgent.get('/resolve-password-token').query({ userId: owner.id }); + const second = await testServer.authlessAgent + .get('/resolve-password-token') + .query({ userId: owner.id }); for (const response of [first, second]) { expect(response.statusCode).toBe(400); @@ -170,7 +172,7 @@ describe('GET /resolve-password-token', () => { }); test('should fail if user is not found', async () => { - const response = await authlessAgent + const response = await testServer.authlessAgent .get('/resolve-password-token') .query({ userId: owner.id, token: uuid() }); @@ -186,7 +188,7 @@ describe('GET /resolve-password-token', () => { resetPasswordTokenExpiration, }); - const response = await authlessAgent + const response = await testServer.authlessAgent .get('/resolve-password-token') .query({ userId: owner.id, token: resetPasswordToken }); @@ -206,7 +208,7 @@ describe('POST /change-password', () => { resetPasswordTokenExpiration, }); - const response = await authlessAgent.post('/change-password').send({ + const response = await testServer.authlessAgent.post('/change-password').send({ token: resetPasswordToken, userId: owner.id, password: passwordToStore, @@ -258,15 +260,15 @@ describe('POST /change-password', () => { }, ]; - await Promise.all( - invalidPayloads.map(async (invalidPayload) => { - const response = await authlessAgent.post('/change-password').query(invalidPayload); - expect(response.statusCode).toBe(400); + for (const invalidPayload of invalidPayloads) { + const response = await testServer.authlessAgent + .post('/change-password') + .query(invalidPayload); + expect(response.statusCode).toBe(400); - const { password: storedPassword } = await Db.collections.User.findOneByOrFail({}); - expect(owner.password).toBe(storedPassword); - }), - ); + const { password: storedPassword } = await Db.collections.User.findOneByOrFail({}); + expect(owner.password).toBe(storedPassword); + } }); test('should fail when token has expired', async () => { @@ -277,7 +279,7 @@ describe('POST /change-password', () => { resetPasswordTokenExpiration, }); - const response = await authlessAgent.post('/change-password').send({ + const response = await testServer.authlessAgent.post('/change-password').send({ token: resetPasswordToken, userId: owner.id, password: passwordToStore, diff --git a/packages/cli/test/integration/publicApi/credentials.test.ts b/packages/cli/test/integration/publicApi/credentials.test.ts index f6466ab6e5..30ea0dbf2d 100644 --- a/packages/cli/test/integration/publicApi/credentials.test.ts +++ b/packages/cli/test/integration/publicApi/credentials.test.ts @@ -4,8 +4,9 @@ import * as Db from '@/Db'; import type { Role } from '@db/entities/Role'; import type { User } from '@db/entities/User'; import { RESPONSE_ERROR_MESSAGES } from '@/constants'; + import { randomApiKey, randomName, randomString } from '../shared/random'; -import * as utils from '../shared/utils'; +import * as utils from '../shared/utils/'; import type { CredentialPayload, SaveCredentialFunction } from '../shared/types'; import * as testDb from '../shared/testDb'; @@ -18,14 +19,11 @@ let authMemberAgent: SuperAgentTest; let saveCredential: SaveCredentialFunction; -beforeAll(async () => { - const app = await utils.initTestServer({ - endpointGroups: ['publicApi'], - applyAuth: false, - enablePublicAPI: true, - }); +const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] }); - await utils.initConfigFile(); +beforeAll(async () => { + // TODO: mock encryption key + await utils.initEncryptionKey(); const [globalOwnerRole, fetchedGlobalMemberRole, _, fetchedCredentialOwnerRole] = await testDb.getAllRoles(); @@ -36,18 +34,8 @@ beforeAll(async () => { owner = await testDb.addApiKey(await testDb.createUserShell(globalOwnerRole)); member = await testDb.createUser({ globalRole: globalMemberRole, apiKey: randomApiKey() }); - authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, - }); - authMemberAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: member, - }); + authOwnerAgent = testServer.publicApiAgentFor(owner); + authMemberAgent = testServer.publicApiAgentFor(member); saveCredential = testDb.affixRoleToSaveCredential(credentialOwnerRole); @@ -58,10 +46,6 @@ beforeEach(async () => { await testDb.truncate(['SharedCredentials', 'Credentials']); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('POST /credentials', () => { test('should create credentials', async () => { const payload = { diff --git a/packages/cli/test/integration/publicApi/executions.test.ts b/packages/cli/test/integration/publicApi/executions.test.ts index 1165069666..2f5216a8b9 100644 --- a/packages/cli/test/integration/publicApi/executions.test.ts +++ b/packages/cli/test/integration/publicApi/executions.test.ts @@ -1,14 +1,11 @@ -import type { Application } from 'express'; import type { SuperAgentTest } from 'supertest'; -import config from '@/config'; import type { User } from '@db/entities/User'; import type { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; import { randomApiKey } from '../shared/random'; -import * as utils from '../shared/utils'; +import * as utils from '../shared/utils/'; import * as testDb from '../shared/testDb'; -let app: Application; let owner: User; let user1: User; let user2: User; @@ -17,19 +14,16 @@ let authUser1Agent: SuperAgentTest; let authUser2Agent: SuperAgentTest; let workflowRunner: ActiveWorkflowRunner; -beforeAll(async () => { - app = await utils.initTestServer({ - endpointGroups: ['publicApi'], - applyAuth: false, - enablePublicAPI: true, - }); +const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] }); +beforeAll(async () => { const globalOwnerRole = await testDb.getGlobalOwnerRole(); const globalUserRole = await testDb.getGlobalMemberRole(); owner = await testDb.createUser({ globalRole: globalOwnerRole, apiKey: randomApiKey() }); user1 = await testDb.createUser({ globalRole: globalUserRole, apiKey: randomApiKey() }); user2 = await testDb.createUser({ globalRole: globalUserRole, apiKey: randomApiKey() }); + // TODO: mock BinaryDataManager instead await utils.initBinaryManager(); await utils.initNodeTypes(); @@ -46,39 +40,15 @@ beforeEach(async () => { 'Settings', ]); - authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - auth: true, - user: owner, - version: 1, - }); - - authUser1Agent = utils.createAgent(app, { - apiPath: 'public', - auth: true, - user: user1, - version: 1, - }); - - authUser2Agent = utils.createAgent(app, { - apiPath: 'public', - auth: true, - user: user2, - version: 1, - }); - - config.set('userManagement.disabled', false); - config.set('userManagement.isInstanceOwnerSetUp', true); + authOwnerAgent = testServer.publicApiAgentFor(owner); + authUser1Agent = testServer.publicApiAgentFor(user1); + authUser2Agent = testServer.publicApiAgentFor(user2); }); afterEach(async () => { await workflowRunner?.removeAll(); }); -afterAll(async () => { - await testDb.terminate(); -}); - const testWithAPIKey = (method: 'get' | 'post' | 'put' | 'delete', url: string, apiKey: string | null) => async () => { void authOwnerAgent.set({ 'X-N8N-API-KEY': apiKey }); diff --git a/packages/cli/test/integration/publicApi/users.ee.test.ts b/packages/cli/test/integration/publicApi/users.ee.test.ts index 069cf90e21..95b48c243e 100644 --- a/packages/cli/test/integration/publicApi/users.ee.test.ts +++ b/packages/cli/test/integration/publicApi/users.ee.test.ts @@ -1,124 +1,141 @@ -import type express from 'express'; +import type { SuperAgentTest } from 'supertest'; import validator from 'validator'; import { v4 as uuid } from 'uuid'; -import config from '@/config'; -import type { Role } from '@/databases/entities/Role'; -import { randomApiKey } from '../shared/random'; - -import * as utils from '../shared/utils'; -import * as testDb from '../shared/testDb'; - +import type { Role } from '@db/entities/Role'; import { License } from '@/License'; -let app: express.Application; +import { randomApiKey } from '../shared/random'; +import * as utils from '../shared/utils/'; +import * as testDb from '../shared/testDb'; + +utils.mockInstance(License, { + getUsersLimit: jest.fn().mockReturnValue(-1), +}); + +const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] }); + let globalOwnerRole: Role; let globalMemberRole: Role; -const licenseLike = { - getUsersLimit: jest.fn().mockReturnValue(-1), -}; - -utils.mockInstance(License, licenseLike); - beforeAll(async () => { - app = await utils.initTestServer({ - endpointGroups: ['publicApi'], - applyAuth: false, - enablePublicAPI: true, - }); - - await testDb.init(); - - const [fetchedGlobalOwnerRole, fetchedGlobalMemberRole] = await testDb.getAllRoles(); - - globalOwnerRole = fetchedGlobalOwnerRole; - globalMemberRole = fetchedGlobalMemberRole; + [globalOwnerRole, globalMemberRole] = await testDb.getAllRoles(); }); beforeEach(async () => { - // do not combine calls - shared tables must be cleared first and separately - await testDb.truncate(['SharedCredentials', 'SharedWorkflow']); - await testDb.truncate(['User', 'Workflow', 'Credentials']); - - config.set('userManagement.disabled', false); - config.set('userManagement.isInstanceOwnerSetUp', true); - config.set('userManagement.emails.mode', 'smtp'); -}); - -afterAll(async () => { - await testDb.terminate(); + await testDb.truncate(['SharedCredentials', 'SharedWorkflow', 'Workflow', 'Credentials', 'User']); }); describe('With license unlimited quota:users', () => { - test('GET /users should fail due to missing API Key', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole }); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, + describe('GET /users', () => { + test('should fail due to missing API Key', async () => { + const owner = await testDb.createUser({ globalRole: globalOwnerRole }); + const authOwnerAgent = testServer.publicApiAgentFor(owner); + await authOwnerAgent.get('/users').expect(401); }); - await testDb.createUser(); + test('should fail due to invalid API Key', async () => { + const owner = await testDb.createUser({ + globalRole: globalOwnerRole, + apiKey: randomApiKey(), + }); + owner.apiKey = 'invalid-key'; + const authOwnerAgent = testServer.publicApiAgentFor(owner); + await authOwnerAgent.get('/users').expect(401); + }); - const response = await authOwnerAgent.get('/users'); + test('should fail due to member trying to access owner only endpoint', async () => { + const member = await testDb.createUser({ apiKey: randomApiKey() }); + const authMemberAgent = testServer.publicApiAgentFor(member); + await authMemberAgent.get('/users').expect(403); + }); - expect(response.statusCode).toBe(401); + test('should return all users', async () => { + const owner = await testDb.createUser({ + globalRole: globalOwnerRole, + apiKey: randomApiKey(), + }); + + const authOwnerAgent = testServer.publicApiAgentFor(owner); + + await testDb.createUser(); + + const response = await authOwnerAgent.get('/users').expect(200); + expect(response.body.data.length).toBe(2); + expect(response.body.nextCursor).toBeNull(); + + for (const user of response.body.data) { + const { + id, + email, + firstName, + lastName, + personalizationAnswers, + globalRole, + password, + resetPasswordToken, + isPending, + createdAt, + updatedAt, + } = user; + + expect(validator.isUUID(id)).toBe(true); + expect(email).toBeDefined(); + expect(firstName).toBeDefined(); + expect(lastName).toBeDefined(); + expect(personalizationAnswers).toBeUndefined(); + expect(password).toBeUndefined(); + expect(resetPasswordToken).toBeUndefined(); + expect(isPending).toBe(false); + expect(globalRole).toBeUndefined(); + expect(createdAt).toBeDefined(); + expect(updatedAt).toBeDefined(); + } + }); }); - test('GET /users should fail due to invalid API Key', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole, apiKey: randomApiKey() }); - - owner.apiKey = null; - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, + describe('GET /users/:id', () => { + test('should fail due to missing API Key', async () => { + const owner = await testDb.createUser({ globalRole: globalOwnerRole }); + const authOwnerAgent = testServer.publicApiAgentFor(owner); + await authOwnerAgent.get(`/users/${owner.id}`).expect(401); }); - const response = await authOwnerAgent.get('/users'); - - expect(response.statusCode).toBe(401); - }); - - test('GET /users should fail due to member trying to access owner only endpoint', async () => { - const member = await testDb.createUser({ apiKey: randomApiKey() }); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: member, + test('should fail due to invalid API Key', async () => { + const owner = await testDb.createUser({ + globalRole: globalOwnerRole, + apiKey: randomApiKey(), + }); + owner.apiKey = 'invalid-key'; + const authOwnerAgent = testServer.publicApiAgentFor(owner); + await authOwnerAgent.get(`/users/${owner.id}`).expect(401); }); - const response = await authOwnerAgent.get('/users'); - - expect(response.statusCode).toBe(403); - }); - - test('GET /users should return all users', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole, apiKey: randomApiKey() }); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, + test('should fail due to member trying to access owner only endpoint', async () => { + const member = await testDb.createUser({ apiKey: randomApiKey() }); + const authMemberAgent = testServer.publicApiAgentFor(member); + await authMemberAgent.get(`/users/${member.id}`).expect(403); + }); + test('should return 404 for non-existing id ', async () => { + const owner = await testDb.createUser({ + globalRole: globalOwnerRole, + apiKey: randomApiKey(), + }); + const authOwnerAgent = testServer.publicApiAgentFor(owner); + await authOwnerAgent.get(`/users/${uuid()}`).expect(404); }); - await testDb.createUser(); + test('should return a pending user', async () => { + const owner = await testDb.createUser({ + globalRole: globalOwnerRole, + apiKey: randomApiKey(), + }); - const response = await authOwnerAgent.get('/users'); + const { id: memberId } = await testDb.createUserShell(globalMemberRole); - expect(response.statusCode).toBe(200); - expect(response.body.data.length).toBe(2); - expect(response.body.nextCursor).toBeNull(); + const authOwnerAgent = testServer.publicApiAgentFor(owner); + const response = await authOwnerAgent.get(`/users/${memberId}`).expect(200); - for (const user of response.body.data) { const { id, email, @@ -131,7 +148,55 @@ describe('With license unlimited quota:users', () => { isPending, createdAt, updatedAt, - } = user; + } = response.body; + + expect(validator.isUUID(id)).toBe(true); + expect(email).toBeDefined(); + expect(firstName).toBeDefined(); + expect(lastName).toBeDefined(); + expect(personalizationAnswers).toBeUndefined(); + expect(password).toBeUndefined(); + expect(resetPasswordToken).toBeUndefined(); + expect(globalRole).toBeUndefined(); + expect(createdAt).toBeDefined(); + expect(isPending).toBeDefined(); + expect(isPending).toBeTruthy(); + expect(updatedAt).toBeDefined(); + }); + }); + + describe('GET /users/:email', () => { + test('with non-existing email should return 404', async () => { + const owner = await testDb.createUser({ + globalRole: globalOwnerRole, + apiKey: randomApiKey(), + }); + const authOwnerAgent = testServer.publicApiAgentFor(owner); + await authOwnerAgent.get('/users/jhondoe@gmail.com').expect(404); + }); + + test('should return a user', async () => { + const owner = await testDb.createUser({ + globalRole: globalOwnerRole, + apiKey: randomApiKey(), + }); + + const authOwnerAgent = testServer.publicApiAgentFor(owner); + const response = await authOwnerAgent.get(`/users/${owner.email}`).expect(200); + + const { + id, + email, + firstName, + lastName, + personalizationAnswers, + globalRole, + password, + resetPasswordToken, + isPending, + createdAt, + updatedAt, + } = response.body; expect(validator.isUUID(id)).toBe(true); expect(email).toBeDefined(); @@ -144,200 +209,28 @@ describe('With license unlimited quota:users', () => { expect(globalRole).toBeUndefined(); expect(createdAt).toBeDefined(); expect(updatedAt).toBeDefined(); - } - }); - - test('GET /users/:identifier should fail due to missing API Key', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole }); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, }); - - await testDb.createUser(); - - const response = await authOwnerAgent.get(`/users/${owner.id}`); - - expect(response.statusCode).toBe(401); - }); - - test('GET /users/:identifier should fail due to invalid API Key', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole, apiKey: randomApiKey() }); - - owner.apiKey = null; - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, - }); - - const response = await authOwnerAgent.get(`/users/${owner.id}`); - - expect(response.statusCode).toBe(401); - }); - - test('GET /users/:identifier should fail due to member trying to access owner only endpoint', async () => { - const member = await testDb.createUser({ apiKey: randomApiKey() }); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: member, - }); - - const response = await authOwnerAgent.get(`/users/${member.id}`); - - expect(response.statusCode).toBe(403); - }); - - test('GET /users/:email with non-existing email should return 404', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole, apiKey: randomApiKey() }); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, - }); - - const response = await authOwnerAgent.get('/users/jhondoe@gmail.com'); - - expect(response.statusCode).toBe(404); - }); - - test('GET /users/:id with non-existing id should return 404', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole, apiKey: randomApiKey() }); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, - }); - - const response = await authOwnerAgent.get(`/users/${uuid()}`); - - expect(response.statusCode).toBe(404); - }); - - test('GET /users/:email should return a user', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole, apiKey: randomApiKey() }); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, - }); - - const response = await authOwnerAgent.get(`/users/${owner.email}`); - - expect(response.statusCode).toBe(200); - - const { - id, - email, - firstName, - lastName, - personalizationAnswers, - globalRole, - password, - resetPasswordToken, - isPending, - createdAt, - updatedAt, - } = response.body; - - expect(validator.isUUID(id)).toBe(true); - expect(email).toBeDefined(); - expect(firstName).toBeDefined(); - expect(lastName).toBeDefined(); - expect(personalizationAnswers).toBeUndefined(); - expect(password).toBeUndefined(); - expect(resetPasswordToken).toBeUndefined(); - expect(isPending).toBe(false); - expect(globalRole).toBeUndefined(); - expect(createdAt).toBeDefined(); - expect(updatedAt).toBeDefined(); - }); - - test('GET /users/:id should return a pending user', async () => { - const owner = await testDb.createUser({ globalRole: globalOwnerRole, apiKey: randomApiKey() }); - - const { id: memberId } = await testDb.createUserShell(globalMemberRole); - - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: owner, - }); - - const response = await authOwnerAgent.get(`/users/${memberId}`); - - expect(response.statusCode).toBe(200); - - const { - id, - email, - firstName, - lastName, - personalizationAnswers, - globalRole, - password, - resetPasswordToken, - isPending, - createdAt, - updatedAt, - } = response.body; - - expect(validator.isUUID(id)).toBe(true); - expect(email).toBeDefined(); - expect(firstName).toBeDefined(); - expect(lastName).toBeDefined(); - expect(personalizationAnswers).toBeUndefined(); - expect(password).toBeUndefined(); - expect(resetPasswordToken).toBeUndefined(); - expect(globalRole).toBeUndefined(); - expect(createdAt).toBeDefined(); - expect(isPending).toBeDefined(); - expect(isPending).toBeTruthy(); - expect(updatedAt).toBeDefined(); }); }); describe('With license without quota:users', () => { + let authOwnerAgent: SuperAgentTest; + beforeEach(async () => { utils.mockInstance(License, { getUsersLimit: jest.fn().mockReturnValue(null) }); + + const owner = await testDb.createUser({ + globalRole: globalOwnerRole, + apiKey: randomApiKey(), + }); + authOwnerAgent = testServer.publicApiAgentFor(owner); }); test('GET /users should fail due to invalid license', async () => { - const member = await testDb.createUser({ apiKey: randomApiKey() }); - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: member, - }); - const response = await authOwnerAgent.get('/users'); - expect(response.statusCode).toBe(403); + await authOwnerAgent.get('/users').expect(403); }); test('GET /users/:id should fail due to invalid license', async () => { - const member = await testDb.createUser({ apiKey: randomApiKey() }); - const authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - version: 1, - auth: true, - user: member, - }); - const response = await authOwnerAgent.get(`/users/${member.id}`); - expect(response.statusCode).toBe(403); + await authOwnerAgent.get(`/users/${uuid()}`).expect(403); }); }); diff --git a/packages/cli/test/integration/publicApi/workflows.test.ts b/packages/cli/test/integration/publicApi/workflows.test.ts index 89b0a5a8b2..cc8e9d234a 100644 --- a/packages/cli/test/integration/publicApi/workflows.test.ts +++ b/packages/cli/test/integration/publicApi/workflows.test.ts @@ -1,18 +1,14 @@ -import type { Application } from 'express'; import type { SuperAgentTest } from 'supertest'; import * as Db from '@/Db'; -import config from '@/config'; import type { Role } from '@db/entities/Role'; import type { TagEntity } from '@db/entities/TagEntity'; import type { User } from '@db/entities/User'; import type { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; import { randomApiKey } from '../shared/random'; -import * as utils from '../shared/utils'; +import * as utils from '../shared/utils/'; import * as testDb from '../shared/testDb'; -// import { generateNanoId } from '@/databases/utils/generators'; -let app: Application; let workflowOwnerRole: Role; let owner: User; let member: User; @@ -20,13 +16,9 @@ let authOwnerAgent: SuperAgentTest; let authMemberAgent: SuperAgentTest; let workflowRunner: ActiveWorkflowRunner; -beforeAll(async () => { - app = await utils.initTestServer({ - endpointGroups: ['publicApi'], - applyAuth: false, - enablePublicAPI: true, - }); +const testServer = utils.setupTestServer({ endpointGroups: ['publicApi'] }); +beforeAll(async () => { const [globalOwnerRole, globalMemberRole, fetchedWorkflowOwnerRole] = await testDb.getAllRoles(); workflowOwnerRole = fetchedWorkflowOwnerRole; @@ -41,7 +33,7 @@ beforeAll(async () => { apiKey: randomApiKey(), }); - await utils.initConfigFile(); + await utils.initEncryptionKey(); await utils.initNodeTypes(); workflowRunner = await utils.initActiveWorkflowRunner(); }); @@ -49,32 +41,14 @@ beforeAll(async () => { beforeEach(async () => { await testDb.truncate(['SharedCredentials', 'SharedWorkflow', 'Tag', 'Workflow', 'Credentials']); - authOwnerAgent = utils.createAgent(app, { - apiPath: 'public', - auth: true, - user: owner, - version: 1, - }); - - authMemberAgent = utils.createAgent(app, { - apiPath: 'public', - auth: true, - user: member, - version: 1, - }); - - config.set('userManagement.disabled', false); - config.set('userManagement.isInstanceOwnerSetUp', true); + authOwnerAgent = testServer.publicApiAgentFor(owner); + authMemberAgent = testServer.publicApiAgentFor(member); }); afterEach(async () => { await workflowRunner?.removeAll(); }); -afterAll(async () => { - await testDb.terminate(); -}); - const testWithAPIKey = (method: 'get' | 'post' | 'put' | 'delete', url: string, apiKey: string | null) => async () => { void authOwnerAgent.set({ 'X-N8N-API-KEY': apiKey }); diff --git a/packages/cli/test/integration/saml/saml.api.test.ts b/packages/cli/test/integration/saml/saml.api.test.ts index a9bda3c53f..302070df16 100644 --- a/packages/cli/test/integration/saml/saml.api.test.ts +++ b/packages/cli/test/integration/saml/saml.api.test.ts @@ -1,22 +1,21 @@ import { Container } from 'typedi'; import type { SuperAgentTest } from 'supertest'; +import type { AuthenticationMethod } from 'n8n-workflow'; import type { User } from '@db/entities/User'; import { setSamlLoginEnabled } from '@/sso/saml/samlHelpers'; import { getCurrentAuthenticationMethod, setCurrentAuthenticationMethod } from '@/sso/ssoHelpers'; import { SamlUrls } from '@/sso/saml/constants'; -import { License } from '@/License'; -import { randomEmail, randomName, randomValidPassword } from '../shared/random'; -import * as testDb from '../shared/testDb'; -import * as utils from '../shared/utils'; -import { sampleConfig } from './sampleMetadata'; import { InternalHooks } from '@/InternalHooks'; import { SamlService } from '@/sso/saml/saml.service.ee'; import type { SamlUserAttributes } from '@/sso/saml/types/samlUserAttributes'; -import type { AuthenticationMethod } from 'n8n-workflow'; + +import { randomEmail, randomName, randomValidPassword } from '../shared/random'; +import * as testDb from '../shared/testDb'; +import * as utils from '../shared/utils/'; +import { sampleConfig } from './sampleMetadata'; let someUser: User; let owner: User; -let noAuthMemberAgent: SuperAgentTest; let authMemberAgent: SuperAgentTest; let authOwnerAgent: SuperAgentTest; @@ -24,18 +23,16 @@ async function enableSaml(enable: boolean) { await setSamlLoginEnabled(enable); } -beforeAll(async () => { - Container.get(License).isSamlEnabled = () => true; - const app = await utils.initTestServer({ endpointGroups: ['me', 'saml'] }); - owner = await testDb.createOwner(); - someUser = await testDb.createUser(); - authOwnerAgent = utils.createAuthAgent(app)(owner); - authMemberAgent = utils.createAgent(app, { auth: true, user: someUser }); - noAuthMemberAgent = utils.createAgent(app, { auth: false, user: someUser }); +const testServer = utils.setupTestServer({ + endpointGroups: ['me', 'saml'], + enabledFeatures: ['feat:saml'], }); -afterAll(async () => { - await testDb.terminate(); +beforeAll(async () => { + owner = await testDb.createOwner(); + someUser = await testDb.createUser(); + authOwnerAgent = testServer.authAgentFor(owner); + authMemberAgent = testServer.authAgentFor(someUser); }); describe('Instance owner', () => { @@ -181,7 +178,7 @@ describe('Check endpoint permissions', () => { }); test(`should be able to access GET ${SamlUrls.initSSO}`, async () => { - const response = await authOwnerAgent.get(`/sso/saml${SamlUrls.initSSO}`).expect(200); + await authOwnerAgent.get(`/sso/saml${SamlUrls.initSSO}`).expect(200); }); test(`should be able to access GET ${SamlUrls.configTest}`, async () => { @@ -220,7 +217,7 @@ describe('Check endpoint permissions', () => { }); test(`should be able to access GET ${SamlUrls.initSSO}`, async () => { - const response = await authMemberAgent.get(`/sso/saml${SamlUrls.initSSO}`).expect(200); + await authMemberAgent.get(`/sso/saml${SamlUrls.initSSO}`).expect(200); }); test(`should NOT be able to access GET ${SamlUrls.configTest}`, async () => { @@ -229,41 +226,43 @@ describe('Check endpoint permissions', () => { }); describe('Non-Authenticated User', () => { test(`should be able to access ${SamlUrls.metadata}`, async () => { - await noAuthMemberAgent.get(`/sso/saml${SamlUrls.metadata}`).expect(200); + await testServer.authlessAgent.get(`/sso/saml${SamlUrls.metadata}`).expect(200); }); test(`should NOT be able to access GET ${SamlUrls.config}`, async () => { - await noAuthMemberAgent.get(`/sso/saml${SamlUrls.config}`).expect(401); + await testServer.authlessAgent.get(`/sso/saml${SamlUrls.config}`).expect(401); }); test(`should NOT be able to access POST ${SamlUrls.config}`, async () => { - await noAuthMemberAgent.post(`/sso/saml${SamlUrls.config}`).expect(401); + await testServer.authlessAgent.post(`/sso/saml${SamlUrls.config}`).expect(401); }); test(`should NOT be able to access POST ${SamlUrls.configToggleEnabled}`, async () => { - await noAuthMemberAgent.post(`/sso/saml${SamlUrls.configToggleEnabled}`).expect(401); + await testServer.authlessAgent.post(`/sso/saml${SamlUrls.configToggleEnabled}`).expect(401); }); test(`should be able to access GET ${SamlUrls.acs}`, async () => { // Note that 401 here is coming from the missing SAML object, // not from not being able to access the endpoint, so this is expected! - const response = await noAuthMemberAgent.get(`/sso/saml${SamlUrls.acs}`).expect(401); + const response = await testServer.authlessAgent.get(`/sso/saml${SamlUrls.acs}`).expect(401); expect(response.text).toContain('SAML Authentication failed'); }); test(`should be able to access POST ${SamlUrls.acs}`, async () => { // Note that 401 here is coming from the missing SAML object, // not from not being able to access the endpoint, so this is expected! - const response = await noAuthMemberAgent.post(`/sso/saml${SamlUrls.acs}`).expect(401); + const response = await testServer.authlessAgent.post(`/sso/saml${SamlUrls.acs}`).expect(401); expect(response.text).toContain('SAML Authentication failed'); }); test(`should be able to access GET ${SamlUrls.initSSO}`, async () => { - const response = await noAuthMemberAgent.get(`/sso/saml${SamlUrls.initSSO}`).expect(200); + const response = await testServer.authlessAgent + .get(`/sso/saml${SamlUrls.initSSO}`) + .expect(200); }); test(`should NOT be able to access GET ${SamlUrls.configTest}`, async () => { - await noAuthMemberAgent.get(`/sso/saml${SamlUrls.configTest}`).expect(401); + await testServer.authlessAgent.get(`/sso/saml${SamlUrls.configTest}`).expect(401); }); }); }); diff --git a/packages/cli/test/integration/shared/constants.ts b/packages/cli/test/integration/shared/constants.ts index d259011696..affb51990b 100644 --- a/packages/cli/test/integration/shared/constants.ts +++ b/packages/cli/test/integration/shared/constants.ts @@ -24,29 +24,6 @@ export const LOGGED_OUT_RESPONSE_BODY = { }, }; -/** - * Routes requiring a valid `n8n-auth` cookie for a user, either owner or member. - */ -export const ROUTES_REQUIRING_AUTHENTICATION: Readonly = [ - 'PATCH /me', - 'PATCH /me/password', - 'POST /me/survey', - 'POST /owner/setup', - 'GET /non-existent', -]; - -/** - * Routes requiring a valid `n8n-auth` cookie for an owner. - */ -export const ROUTES_REQUIRING_AUTHORIZATION: Readonly = [ - 'POST /users', - 'DELETE /users/123', - 'POST /users/123/reinvite', - 'GET /owner/pre-setup', - 'POST /owner/setup', - 'POST /owner/skip-setup', -]; - export const COMMUNITY_PACKAGE_VERSION = { CURRENT: '0.1.0', UPDATED: '0.2.0', diff --git a/packages/cli/test/integration/shared/testDb.ts b/packages/cli/test/integration/shared/testDb.ts index c9df9026b8..92fb56ea32 100644 --- a/packages/cli/test/integration/shared/testDb.ts +++ b/packages/cli/test/integration/shared/testDb.ts @@ -32,8 +32,8 @@ import type { InstalledPackagePayload, PostgresSchemaSection, } from './types'; -import type { ExecutionData } from '@/databases/entities/ExecutionData'; -import { generateNanoId } from '@/databases/utils/generators'; +import type { ExecutionData } from '@db/entities/ExecutionData'; +import { generateNanoId } from '@db/utils/generators'; export type TestDBType = 'postgres' | 'mysql'; @@ -181,6 +181,7 @@ export async function createUser(attributes: Partial = {}): Promise firstName: firstName ?? randomName(), lastName: lastName ?? randomName(), globalRoleId: (globalRole ?? (await getGlobalMemberRole())).id, + globalRole, ...rest, }; diff --git a/packages/cli/test/integration/shared/types.d.ts b/packages/cli/test/integration/shared/types.ts similarity index 67% rename from packages/cli/test/integration/shared/types.d.ts rename to packages/cli/test/integration/shared/types.ts index bfbf1fd767..8e25c62a5b 100644 --- a/packages/cli/test/integration/shared/types.d.ts +++ b/packages/cli/test/integration/shared/types.ts @@ -1,17 +1,15 @@ +import type { Application } from 'express'; import type { ICredentialDataDecryptedObject, ICredentialNodeAccess } from 'n8n-workflow'; import type { SuperAgentTest } from 'supertest'; +import type { Server } from 'http'; import type { CredentialsEntity } from '@db/entities/CredentialsEntity'; import type { User } from '@db/entities/User'; -import type { ICredentialsDb, IDatabaseCollections } from '@/Interfaces'; +import type { BooleanLicenseFeature, ICredentialsDb, IDatabaseCollections } from '@/Interfaces'; export type CollectionName = keyof IDatabaseCollections; -export type ApiPath = 'internal' | 'public'; - -export type AuthAgent = (user: User) => SuperAgentTest; - -type EndpointGroup = +export type EndpointGroup = | 'me' | 'users' | 'auth' @@ -28,6 +26,20 @@ type EndpointGroup = | 'license' | 'variables'; +export interface SetupProps { + applyAuth?: boolean; + endpointGroups?: EndpointGroup[]; + enabledFeatures?: BooleanLicenseFeature[]; +} + +export interface TestServer { + app: Application; + httpServer: Server; + authAgentFor: (user: User) => SuperAgentTest; + publicApiAgentFor: (user: User) => SuperAgentTest; + authlessAgent: SuperAgentTest; +} + export type CredentialPayload = { name: string; type: string; diff --git a/packages/cli/test/integration/shared/utils/communityNodes.ts b/packages/cli/test/integration/shared/utils/communityNodes.ts new file mode 100644 index 0000000000..cff99bb90a --- /dev/null +++ b/packages/cli/test/integration/shared/utils/communityNodes.ts @@ -0,0 +1,29 @@ +import { NODE_PACKAGE_PREFIX } from '@/constants'; +import { InstalledPackages } from '@db/entities/InstalledPackages'; + +import { randomName } from '../random'; +import { COMMUNITY_NODE_VERSION, COMMUNITY_PACKAGE_VERSION } from '../constants'; +import type { InstalledNodePayload, InstalledPackagePayload } from '../types'; + +export function installedPackagePayload(): InstalledPackagePayload { + return { + packageName: NODE_PACKAGE_PREFIX + randomName(), + installedVersion: COMMUNITY_PACKAGE_VERSION.CURRENT, + }; +} + +export function installedNodePayload(packageName: string): InstalledNodePayload { + const nodeName = randomName(); + return { + name: nodeName, + type: nodeName, + latestVersion: COMMUNITY_NODE_VERSION.CURRENT, + package: packageName, + }; +} + +export const emptyPackage = async () => { + const installedPackage = new InstalledPackages(); + installedPackage.installedNodes = []; + return installedPackage; +}; diff --git a/packages/cli/test/integration/shared/utils.ts b/packages/cli/test/integration/shared/utils/index.ts similarity index 55% rename from packages/cli/test/integration/shared/utils.ts rename to packages/cli/test/integration/shared/utils/index.ts index ef6dbf2e96..01ecb0154b 100644 --- a/packages/cli/test/integration/shared/utils.ts +++ b/packages/cli/test/integration/shared/utils/index.ts @@ -1,10 +1,7 @@ import { Container } from 'typedi'; import { randomBytes } from 'crypto'; import { existsSync } from 'fs'; - -import bodyParser from 'body-parser'; import { CronJob } from 'cron'; -import express from 'express'; import set from 'lodash/set'; import { BinaryDataManager, UserSettings } from 'n8n-core'; import type { @@ -18,275 +15,20 @@ import type { TriggerTime, } from 'n8n-workflow'; import { deepCopy } from 'n8n-workflow'; -import { LoggerProxy, NodeHelpers, toCronExpression } from 'n8n-workflow'; -import type superagent from 'superagent'; -import request from 'supertest'; -import { URL } from 'url'; -import { mock } from 'jest-mock-extended'; -import type { DeepPartial } from 'ts-essentials'; +import { NodeHelpers, toCronExpression } from 'n8n-workflow'; +import type request from 'supertest'; +import { v4 as uuid } from 'uuid'; + import config from '@/config'; import * as Db from '@/Db'; import { WorkflowEntity } from '@db/entities/WorkflowEntity'; -import { ExternalHooks } from '@/ExternalHooks'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; -import { workflowsController } from '@/workflows/workflows.controller'; -import { AUTH_COOKIE_NAME, NODE_PACKAGE_PREFIX } from '@/constants'; -import { credentialsController } from '@/credentials/credentials.controller'; -import { InstalledPackages } from '@db/entities/InstalledPackages'; -import type { User } from '@db/entities/User'; -import { getLogger } from '@/Logger'; -import { loadPublicApiVersions } from '@/PublicApi/'; -import { issueJWT } from '@/auth/jwt'; -import { UserManagementMailer } from '@/UserManagement/email/UserManagementMailer'; -import { - AUTHLESS_ENDPOINTS, - COMMUNITY_NODE_VERSION, - COMMUNITY_PACKAGE_VERSION, - PUBLIC_API_REST_PATH_SEGMENT, - REST_PATH_SEGMENT, -} from './constants'; -import { randomName } from './random'; -import type { - ApiPath, - EndpointGroup, - InstalledNodePayload, - InstalledPackagePayload, -} from './types'; -import { licenseController } from '@/license/license.controller'; -import { registerController } from '@/decorators'; -import { - AuthController, - LdapController, - MeController, - NodesController, - OwnerController, - PasswordResetController, - UsersController, -} from '@/controllers'; -import { setupAuthMiddlewares } from '@/middlewares'; -import * as testDb from '../shared/testDb'; +import { AUTH_COOKIE_NAME } from '@/constants'; -import { v4 as uuid } from 'uuid'; -import { InternalHooks } from '@/InternalHooks'; import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; -import { PostHogClient } from '@/posthog'; -import { variablesController } from '@/environments/variables/variables.controller'; -import { LdapManager } from '@/Ldap/LdapManager.ee'; -import { handleLdapInit } from '@/Ldap/helpers'; -import { Push } from '@/push'; -import { setSamlLoginEnabled } from '@/sso/saml/samlHelpers'; -import { SamlService } from '@/sso/saml/saml.service.ee'; -import { SamlController } from '@/sso/saml/routes/saml.controller.ee'; -import { EventBusController } from '@/eventbus/eventBus.controller'; -import { License } from '@/License'; -import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee'; -import { SourceControlController } from '@/environments/sourceControl/sourceControl.controller.ee'; -import { SourceControlPreferencesService } from '@/environments/sourceControl/sourceControlPreferences.service.ee'; -export const mockInstance = ( - ctor: new (...args: any[]) => T, - data: DeepPartial | undefined = undefined, -) => { - const instance = mock(data); - Container.set(ctor, instance); - return instance; -}; - -/** - * Initialize a test server. - */ -export async function initTestServer({ - applyAuth = true, - endpointGroups, - enablePublicAPI = false, -}: { - applyAuth?: boolean; - endpointGroups?: EndpointGroup[]; - enablePublicAPI?: boolean; -}) { - await testDb.init(); - const testServer = { - app: express(), - restEndpoint: REST_PATH_SEGMENT, - publicApiEndpoint: PUBLIC_API_REST_PATH_SEGMENT, - externalHooks: {}, - }; - - const logger = getLogger(); - LoggerProxy.init(logger); - - // Mock all telemetry. - mockInstance(InternalHooks); - mockInstance(PostHogClient); - - testServer.app.use(bodyParser.json()); - testServer.app.use(bodyParser.urlencoded({ extended: true })); - - config.set('userManagement.jwtSecret', 'My JWT secret'); - config.set('userManagement.isInstanceOwnerSetUp', false); - - if (applyAuth) { - setupAuthMiddlewares( - testServer.app, - AUTHLESS_ENDPOINTS, - REST_PATH_SEGMENT, - Db.collections.User, - ); - } - - if (!endpointGroups) return testServer.app; - - if ( - endpointGroups.includes('credentials') || - endpointGroups.includes('me') || - endpointGroups.includes('users') || - endpointGroups.includes('passwordReset') - ) { - testServer.externalHooks = Container.get(ExternalHooks); - } - - const [routerEndpoints, functionEndpoints] = classifyEndpointGroups(endpointGroups); - - if (routerEndpoints.length) { - const map: Record = { - credentials: { controller: credentialsController, path: 'credentials' }, - workflows: { controller: workflowsController, path: 'workflows' }, - license: { controller: licenseController, path: 'license' }, - variables: { controller: variablesController, path: 'variables' }, - }; - - if (enablePublicAPI) { - const { apiRouters } = await loadPublicApiVersions(testServer.publicApiEndpoint); - map.publicApi = apiRouters; - } - - for (const group of routerEndpoints) { - if (group === 'publicApi') { - testServer.app.use(...(map[group] as express.Router[])); - } else { - testServer.app.use(`/${testServer.restEndpoint}/${map[group].path}`, map[group].controller); - } - } - } - - if (functionEndpoints.length) { - const externalHooks = Container.get(ExternalHooks); - const internalHooks = Container.get(InternalHooks); - const mailer = Container.get(UserManagementMailer); - const repositories = Db.collections; - - for (const group of functionEndpoints) { - switch (group) { - case 'eventBus': - registerController(testServer.app, config, new EventBusController()); - break; - case 'auth': - registerController( - testServer.app, - config, - new AuthController({ config, logger, internalHooks, repositories }), - ); - break; - case 'ldap': - Container.get(License).isLdapEnabled = () => true; - await handleLdapInit(); - const { service, sync } = LdapManager.getInstance(); - registerController( - testServer.app, - config, - new LdapController(service, sync, internalHooks), - ); - break; - case 'saml': - await setSamlLoginEnabled(true); - const samlService = Container.get(SamlService); - registerController(testServer.app, config, new SamlController(samlService)); - break; - case 'sourceControl': - const sourceControlService = Container.get(SourceControlService); - const sourceControlPreferencesService = Container.get(SourceControlPreferencesService); - registerController( - testServer.app, - config, - new SourceControlController(sourceControlService, sourceControlPreferencesService), - ); - break; - case 'nodes': - registerController( - testServer.app, - config, - new NodesController( - config, - Container.get(LoadNodesAndCredentials), - Container.get(Push), - internalHooks, - ), - ); - case 'me': - registerController( - testServer.app, - config, - new MeController({ logger, externalHooks, internalHooks, repositories }), - ); - break; - case 'passwordReset': - registerController( - testServer.app, - config, - new PasswordResetController({ - config, - logger, - externalHooks, - internalHooks, - mailer, - repositories, - }), - ); - break; - case 'owner': - registerController( - testServer.app, - config, - new OwnerController({ config, logger, internalHooks, repositories }), - ); - break; - case 'users': - registerController( - testServer.app, - config, - new UsersController({ - config, - mailer, - externalHooks, - internalHooks, - repositories, - activeWorkflowRunner: Container.get(ActiveWorkflowRunner), - logger, - }), - ); - } - } - } - - return testServer.app; -} - -/** - * Classify endpoint groups into `routerEndpoints` (newest, using `express.Router`), - * and `functionEndpoints` (legacy, namespaced inside a function). - */ -const classifyEndpointGroups = (endpointGroups: EndpointGroup[]) => { - const routerEndpoints: EndpointGroup[] = []; - const functionEndpoints: EndpointGroup[] = []; - - const ROUTER_GROUP = ['credentials', 'workflows', 'publicApi', 'license', 'variables']; - - endpointGroups.forEach((group) => - (ROUTER_GROUP.includes(group) ? routerEndpoints : functionEndpoints).push(group), - ); - - return [routerEndpoints, functionEndpoints]; -}; +export { mockInstance } from './mocking'; +export { setupTestServer } from './testServer'; // ---------------------------------- // initializers @@ -656,7 +398,8 @@ export async function initBinaryManager() { /** * Initialize a user settings config file if non-existent. */ -export async function initConfigFile() { +// TODO: this should be mocked +export async function initEncryptionKey() { const settingsPath = UserSettings.getUserSettingsPath(); if (!existsSync(settingsPath)) { @@ -665,62 +408,6 @@ export async function initConfigFile() { } } -// ---------------------------------- -// request agent -// ---------------------------------- - -/** - * Create a request agent, optionally with an auth cookie. - */ -export function createAgent( - app: express.Application, - options?: { auth: boolean; user: User; apiPath?: ApiPath; version?: string | number }, -) { - const agent = request.agent(app); - - if (options?.apiPath === undefined || options?.apiPath === 'internal') { - void agent.use(prefix(REST_PATH_SEGMENT)); - if (options?.auth && options?.user) { - const { token } = issueJWT(options.user); - agent.jar.setCookie(`${AUTH_COOKIE_NAME}=${token}`); - } - } - - if (options?.apiPath === 'public') { - void agent.use(prefix(`${PUBLIC_API_REST_PATH_SEGMENT}/v${options?.version}`)); - - if (options?.auth && options?.user.apiKey) { - void agent.set({ 'X-N8N-API-KEY': options.user.apiKey }); - } - } - - return agent; -} - -export function createAuthAgent(app: express.Application) { - return (user: User) => createAgent(app, { auth: true, user }); -} - -/** - * Plugin to prefix a path segment into a request URL pathname. - * - * Example: http://127.0.0.1:62100/me/password → http://127.0.0.1:62100/rest/me/password - */ -export function prefix(pathSegment: string) { - return async function (request: superagent.SuperAgentRequest) { - const url = new URL(request.url); - - // enforce consistency at call sites - if (url.pathname[0] !== '/') { - throw new Error('Pathname must start with a forward slash'); - } - - url.pathname = pathSegment + url.pathname; - request.url = url.toString(); - return request; - }; -} - /** * Extract the value (token) of the auth cookie in a response. */ @@ -765,29 +452,7 @@ export const setInstanceOwnerSetUp = async (value: boolean) => { // community nodes // ---------------------------------- -export function installedPackagePayload(): InstalledPackagePayload { - return { - packageName: NODE_PACKAGE_PREFIX + randomName(), - installedVersion: COMMUNITY_PACKAGE_VERSION.CURRENT, - }; -} - -export function installedNodePayload(packageName: string): InstalledNodePayload { - const nodeName = randomName(); - return { - name: nodeName, - type: nodeName, - latestVersion: COMMUNITY_NODE_VERSION.CURRENT, - package: packageName, - }; -} - -export const emptyPackage = async () => { - const installedPackage = new InstalledPackages(); - installedPackage.installedNodes = []; - - return installedPackage; -}; +export * from './communityNodes'; // ---------------------------------- // workflow diff --git a/packages/cli/test/integration/shared/utils/mocking.ts b/packages/cli/test/integration/shared/utils/mocking.ts new file mode 100644 index 0000000000..a7fc5d183e --- /dev/null +++ b/packages/cli/test/integration/shared/utils/mocking.ts @@ -0,0 +1,12 @@ +import { Container } from 'typedi'; +import { mock } from 'jest-mock-extended'; +import type { DeepPartial } from 'ts-essentials'; + +export const mockInstance = ( + ctor: new (...args: unknown[]) => T, + data: DeepPartial | undefined = undefined, +) => { + const instance = mock(data); + Container.set(ctor, instance); + return instance; +}; diff --git a/packages/cli/test/integration/shared/utils/testServer.ts b/packages/cli/test/integration/shared/utils/testServer.ts new file mode 100644 index 0000000000..8f77ee75df --- /dev/null +++ b/packages/cli/test/integration/shared/utils/testServer.ts @@ -0,0 +1,275 @@ +import { Container } from 'typedi'; +import cookieParser from 'cookie-parser'; +import bodyParser from 'body-parser'; +import express from 'express'; +import { LoggerProxy } from 'n8n-workflow'; +import type superagent from 'superagent'; +import request from 'supertest'; +import { URL } from 'url'; + +import config from '@/config'; +import * as Db from '@/Db'; +import { ExternalHooks } from '@/ExternalHooks'; +import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; +import { workflowsController } from '@/workflows/workflows.controller'; +import { AUTH_COOKIE_NAME } from '@/constants'; +import { credentialsController } from '@/credentials/credentials.controller'; +import type { User } from '@db/entities/User'; +import { getLogger } from '@/Logger'; +import { loadPublicApiVersions } from '@/PublicApi/'; +import { issueJWT } from '@/auth/jwt'; +import { UserManagementMailer } from '@/UserManagement/email/UserManagementMailer'; +import { licenseController } from '@/license/license.controller'; +import { registerController } from '@/decorators'; +import { + AuthController, + LdapController, + MeController, + NodesController, + OwnerController, + PasswordResetController, + UsersController, +} from '@/controllers'; +import { setupAuthMiddlewares } from '@/middlewares'; + +import { InternalHooks } from '@/InternalHooks'; +import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; +import { PostHogClient } from '@/posthog'; +import { variablesController } from '@/environments/variables/variables.controller'; +import { LdapManager } from '@/Ldap/LdapManager.ee'; +import { handleLdapInit } from '@/Ldap/helpers'; +import { Push } from '@/push'; +import { setSamlLoginEnabled } from '@/sso/saml/samlHelpers'; +import { SamlController } from '@/sso/saml/routes/saml.controller.ee'; +import { EventBusController } from '@/eventbus/eventBus.controller'; +import { License } from '@/License'; +import { SourceControlController } from '@/environments/sourceControl/sourceControl.controller.ee'; + +import * as testDb from '../../shared/testDb'; +import { AUTHLESS_ENDPOINTS, PUBLIC_API_REST_PATH_SEGMENT, REST_PATH_SEGMENT } from '../constants'; +import type { EndpointGroup, SetupProps, TestServer } from '../types'; +import { mockInstance } from './mocking'; + +/** + * Plugin to prefix a path segment into a request URL pathname. + * + * Example: http://127.0.0.1:62100/me/password → http://127.0.0.1:62100/rest/me/password + */ +function prefix(pathSegment: string) { + return async function (request: superagent.SuperAgentRequest) { + const url = new URL(request.url); + + // enforce consistency at call sites + if (url.pathname[0] !== '/') { + throw new Error('Pathname must start with a forward slash'); + } + + url.pathname = pathSegment + url.pathname; + request.url = url.toString(); + return request; + }; +} + +/** + * Classify endpoint groups into `routerEndpoints` (newest, using `express.Router`), + * and `functionEndpoints` (legacy, namespaced inside a function). + */ +const classifyEndpointGroups = (endpointGroups: EndpointGroup[]) => { + const routerEndpoints: EndpointGroup[] = []; + const functionEndpoints: EndpointGroup[] = []; + + const ROUTER_GROUP = ['credentials', 'workflows', 'publicApi', 'license', 'variables']; + + endpointGroups.forEach((group) => + (ROUTER_GROUP.includes(group) ? routerEndpoints : functionEndpoints).push(group), + ); + + return [routerEndpoints, functionEndpoints]; +}; + +function createAgent(app: express.Application, options?: { auth: boolean; user: User }) { + const agent = request.agent(app); + void agent.use(prefix(REST_PATH_SEGMENT)); + if (options?.auth && options?.user) { + try { + const { token } = issueJWT(options.user); + agent.jar.setCookie(`${AUTH_COOKIE_NAME}=${token}`); + } catch {} + } + return agent; +} + +function publicApiAgent( + app: express.Application, + { user, version = 1 }: { user: User; version?: number }, +) { + const agent = request.agent(app); + void agent.use(prefix(`${PUBLIC_API_REST_PATH_SEGMENT}/v${version}`)); + if (user.apiKey) { + void agent.set({ 'X-N8N-API-KEY': user.apiKey }); + } + return agent; +} + +export const setupTestServer = ({ + endpointGroups, + applyAuth = true, + enabledFeatures, +}: SetupProps): TestServer => { + const app = express(); + const testServer: TestServer = { + app, + httpServer: app.listen(0), + authAgentFor: (user: User) => createAgent(app, { auth: true, user }), + authlessAgent: createAgent(app), + publicApiAgentFor: (user) => publicApiAgent(app, { user }), + }; + + beforeAll(async () => { + await testDb.init(); + + const logger = getLogger(); + LoggerProxy.init(logger); + + // Mock all telemetry. + mockInstance(InternalHooks); + mockInstance(PostHogClient); + + app.use(bodyParser.json()); + app.use(bodyParser.urlencoded({ extended: true })); + app.use(cookieParser()); + + config.set('userManagement.jwtSecret', 'My JWT secret'); + config.set('userManagement.isInstanceOwnerSetUp', true); + + if (enabledFeatures) { + Container.get(License).isFeatureEnabled = (feature) => enabledFeatures.includes(feature); + } + + const enablePublicAPI = endpointGroups?.includes('publicApi'); + if (applyAuth && !enablePublicAPI) { + setupAuthMiddlewares(app, AUTHLESS_ENDPOINTS, REST_PATH_SEGMENT); + } + + if (!endpointGroups) return; + + const [routerEndpoints, functionEndpoints] = classifyEndpointGroups(endpointGroups); + + if (routerEndpoints.length) { + const map: Record = { + credentials: { controller: credentialsController, path: 'credentials' }, + workflows: { controller: workflowsController, path: 'workflows' }, + license: { controller: licenseController, path: 'license' }, + variables: { controller: variablesController, path: 'variables' }, + }; + + if (enablePublicAPI) { + const { apiRouters } = await loadPublicApiVersions(PUBLIC_API_REST_PATH_SEGMENT); + map.publicApi = apiRouters; + } + + for (const group of routerEndpoints) { + if (group === 'publicApi') { + app.use(...(map[group] as express.Router[])); + } else { + app.use(`/${REST_PATH_SEGMENT}/${map[group].path}`, map[group].controller); + } + } + } + + if (functionEndpoints.length) { + const externalHooks = Container.get(ExternalHooks); + const internalHooks = Container.get(InternalHooks); + const mailer = Container.get(UserManagementMailer); + const repositories = Db.collections; + + for (const group of functionEndpoints) { + switch (group) { + case 'eventBus': + registerController(app, config, new EventBusController()); + break; + case 'auth': + registerController( + app, + config, + new AuthController({ config, logger, internalHooks, repositories }), + ); + break; + case 'ldap': + Container.get(License).isLdapEnabled = () => true; + await handleLdapInit(); + const { service, sync } = LdapManager.getInstance(); + registerController(app, config, new LdapController(service, sync, internalHooks)); + break; + case 'saml': + await setSamlLoginEnabled(true); + registerController(app, config, Container.get(SamlController)); + break; + case 'sourceControl': + registerController(app, config, Container.get(SourceControlController)); + break; + case 'nodes': + registerController( + app, + config, + new NodesController( + config, + Container.get(LoadNodesAndCredentials), + Container.get(Push), + internalHooks, + ), + ); + case 'me': + registerController( + app, + config, + new MeController({ logger, externalHooks, internalHooks, repositories }), + ); + break; + case 'passwordReset': + registerController( + app, + config, + new PasswordResetController({ + config, + logger, + externalHooks, + internalHooks, + mailer, + repositories, + }), + ); + break; + case 'owner': + registerController( + app, + config, + new OwnerController({ config, logger, internalHooks, repositories }), + ); + break; + case 'users': + registerController( + app, + config, + new UsersController({ + config, + mailer, + externalHooks, + internalHooks, + repositories, + activeWorkflowRunner: Container.get(ActiveWorkflowRunner), + logger, + }), + ); + } + } + } + }); + + afterAll(async () => { + await testDb.terminate(); + testServer.httpServer.close(); + }); + + return testServer; +}; diff --git a/packages/cli/test/integration/users.api.test.ts b/packages/cli/test/integration/users.api.test.ts index b600fd838c..f0baf4e26d 100644 --- a/packages/cli/test/integration/users.api.test.ts +++ b/packages/cli/test/integration/users.api.test.ts @@ -21,7 +21,7 @@ import { randomValidPassword, } from './shared/random'; import * as testDb from './shared/testDb'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; jest.mock('@/UserManagement/email/NodeMailer'); @@ -29,13 +29,11 @@ let globalMemberRole: Role; let workflowOwnerRole: Role; let credentialOwnerRole: Role; let owner: User; -let authlessAgent: SuperAgentTest; let authOwnerAgent: SuperAgentTest; -let authAgentFor: (user: User) => SuperAgentTest; + +const testServer = utils.setupTestServer({ endpointGroups: ['users'] }); beforeAll(async () => { - const app = await utils.initTestServer({ endpointGroups: ['users'] }); - const [ globalOwnerRole, fetchedGlobalMemberRole, @@ -49,9 +47,7 @@ beforeAll(async () => { owner = await testDb.createUser({ globalRole: globalOwnerRole }); - authlessAgent = utils.createAgent(app); - authAgentFor = utils.createAuthAgent(app); - authOwnerAgent = authAgentFor(owner); + authOwnerAgent = testServer.authAgentFor(owner); }); beforeEach(async () => { @@ -60,16 +56,10 @@ beforeEach(async () => { jest.mock('@/config'); - config.set('userManagement.disabled', false); - config.set('userManagement.isInstanceOwnerSetUp', true); config.set('userManagement.emails.mode', 'smtp'); config.set('userManagement.emails.smtp.host', ''); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('GET /users', () => { test('should return all users (for owner)', async () => { await testDb.createUser({ globalRole: globalMemberRole }); @@ -108,7 +98,7 @@ describe('GET /users', () => { test('should return all users (for member)', async () => { const member = await testDb.createUser({ globalRole: globalMemberRole }); - const response = await authAgentFor(member).get('/users'); + const response = await testServer.authAgentFor(member).get('/users'); expect(response.statusCode).toBe(200); expect(response.body.data.length).toBe(2); @@ -253,7 +243,9 @@ describe('POST /users/:id', () => { password: randomValidPassword(), }; - const response = await authlessAgent.post(`/users/${memberShell.id}`).send(memberData); + const response = await testServer.authlessAgent + .post(`/users/${memberShell.id}`) + .send(memberData); const { id, @@ -325,20 +317,20 @@ describe('POST /users/:id', () => { }, ]; - await Promise.all( - invalidPayloads.map(async (invalidPayload) => { - const response = await authlessAgent.post(`/users/${memberShell.id}`).send(invalidPayload); - expect(response.statusCode).toBe(400); + for (const invalidPayload of invalidPayloads) { + const response = await testServer.authlessAgent + .post(`/users/${memberShell.id}`) + .send(invalidPayload); + expect(response.statusCode).toBe(400); - const storedUser = await Db.collections.User.findOneOrFail({ - where: { email: memberShellEmail }, - }); + const storedUser = await Db.collections.User.findOneOrFail({ + where: { email: memberShellEmail }, + }); - expect(storedUser.firstName).toBeNull(); - expect(storedUser.lastName).toBeNull(); - expect(storedUser.password).toBeNull(); - }), - ); + expect(storedUser.firstName).toBeNull(); + expect(storedUser.lastName).toBeNull(); + expect(storedUser.password).toBeNull(); + } }); test('should fail with already accepted invite', async () => { @@ -351,7 +343,7 @@ describe('POST /users/:id', () => { password: randomValidPassword(), }; - const response = await authlessAgent.post(`/users/${member.id}`).send(newMemberData); + const response = await testServer.authlessAgent.post(`/users/${member.id}`).send(newMemberData); expect(response.statusCode).toBe(400); @@ -379,15 +371,6 @@ describe('POST /users', () => { expect(response.body.data[0].user.inviteAcceptUrl).toBeDefined(); }); - test('should fail if user management is disabled', async () => { - config.set('userManagement.disabled', true); - config.set('userManagement.isInstanceOwnerSetUp', false); - - const response = await authOwnerAgent.post('/users').send([{ email: randomEmail() }]); - - expect(response.statusCode).toBe(400); - }); - test('should email invites and create user shells but ignore existing', async () => { const member = await testDb.createUser({ globalRole: globalMemberRole }); const memberShell = await testDb.createUserShell(globalMemberRole); diff --git a/packages/cli/test/integration/variables.test.ts b/packages/cli/test/integration/variables.test.ts index abe70572a8..3fdd1b4109 100644 --- a/packages/cli/test/integration/variables.test.ts +++ b/packages/cli/test/integration/variables.test.ts @@ -1,33 +1,28 @@ -import type { Application } from 'express'; - -import type { User } from '@/databases/entities/User'; -import * as testDb from './shared/testDb'; -import * as utils from './shared/utils'; - -import type { AuthAgent } from './shared/types'; +import type { SuperAgentTest } from 'supertest'; +import type { Variables } from '@db/entities/Variables'; import { License } from '@/License'; +import * as testDb from './shared/testDb'; +import * as utils from './shared/utils/'; + +let authOwnerAgent: SuperAgentTest; +let authMemberAgent: SuperAgentTest; -// mock that credentialsSharing is not enabled -let app: Application; -let ownerUser: User; -let memberUser: User; -let authAgent: AuthAgent; -let variablesSpy: jest.SpyInstance; const licenseLike = { isVariablesEnabled: jest.fn().mockReturnValue(true), getVariablesLimit: jest.fn().mockReturnValue(-1), + isWithinUsersLimit: jest.fn().mockReturnValue(true), }; -beforeAll(async () => { - app = await utils.initTestServer({ endpointGroups: ['variables'] }); +const testServer = utils.setupTestServer({ endpointGroups: ['variables'] }); - await utils.initConfigFile(); +beforeAll(async () => { + await utils.initEncryptionKey(); utils.mockInstance(License, licenseLike); - ownerUser = await testDb.createOwner(); - memberUser = await testDb.createUser(); - - authAgent = utils.createAuthAgent(app); + const owner = await testDb.createOwner(); + authOwnerAgent = testServer.authAgentFor(owner); + const member = await testDb.createUser(); + authMemberAgent = testServer.authAgentFor(member); }); beforeEach(async () => { @@ -36,343 +31,306 @@ beforeEach(async () => { licenseLike.getVariablesLimit.mockReturnValue(-1); }); -afterAll(async () => { - await testDb.terminate(); -}); - // ---------------------------------------- // GET /variables - fetch all variables // ---------------------------------------- +describe('GET /variables', () => { + beforeEach(async () => { + await Promise.all([ + testDb.createVariable('test1', 'value1'), + testDb.createVariable('test2', 'value2'), + ]); + }); -test('GET /variables should return all variables for an owner', async () => { - await Promise.all([ - testDb.createVariable('test1', 'value1'), - testDb.createVariable('test2', 'value2'), - ]); + test('should return all variables for an owner', async () => { + const response = await authOwnerAgent.get('/variables'); + expect(response.statusCode).toBe(200); + expect(response.body.data.length).toBe(2); + }); - const response = await authAgent(ownerUser).get('/variables'); - expect(response.statusCode).toBe(200); - expect(response.body.data.length).toBe(2); -}); - -test('GET /variables should return all variables for a member', async () => { - await Promise.all([ - testDb.createVariable('test1', 'value1'), - testDb.createVariable('test2', 'value2'), - ]); - - const response = await authAgent(memberUser).get('/variables'); - expect(response.statusCode).toBe(200); - expect(response.body.data.length).toBe(2); + test('should return all variables for a member', async () => { + const response = await authMemberAgent.get('/variables'); + expect(response.statusCode).toBe(200); + expect(response.body.data.length).toBe(2); + }); }); // ---------------------------------------- // GET /variables/:id - get a single variable // ---------------------------------------- +describe('GET /variables/:id', () => { + let var1: Variables, var2: Variables; + beforeEach(async () => { + [var1, var2] = await Promise.all([ + testDb.createVariable('test1', 'value1'), + testDb.createVariable('test2', 'value2'), + ]); + }); -test('GET /variables/:id should return a single variable for an owner', async () => { - const [var1, var2] = await Promise.all([ - testDb.createVariable('test1', 'value1'), - testDb.createVariable('test2', 'value2'), - ]); + test('should return a single variable for an owner', async () => { + const response1 = await authOwnerAgent.get(`/variables/${var1.id}`); + expect(response1.statusCode).toBe(200); + expect(response1.body.data.key).toBe('test1'); - const response1 = await authAgent(ownerUser).get(`/variables/${var1.id}`); - expect(response1.statusCode).toBe(200); - expect(response1.body.data.key).toBe('test1'); + const response2 = await authOwnerAgent.get(`/variables/${var2.id}`); + expect(response2.statusCode).toBe(200); + expect(response2.body.data.key).toBe('test2'); + }); - const response2 = await authAgent(ownerUser).get(`/variables/${var2.id}`); - expect(response2.statusCode).toBe(200); - expect(response2.body.data.key).toBe('test2'); -}); + test('should return a single variable for a member', async () => { + const response1 = await authMemberAgent.get(`/variables/${var1.id}`); + expect(response1.statusCode).toBe(200); + expect(response1.body.data.key).toBe('test1'); -test('GET /variables/:id should return a single variable for a member', async () => { - const [var1, var2] = await Promise.all([ - testDb.createVariable('test1', 'value1'), - testDb.createVariable('test2', 'value2'), - ]); - - const response1 = await authAgent(memberUser).get(`/variables/${var1.id}`); - expect(response1.statusCode).toBe(200); - expect(response1.body.data.key).toBe('test1'); - - const response2 = await authAgent(memberUser).get(`/variables/${var2.id}`); - expect(response2.statusCode).toBe(200); - expect(response2.body.data.key).toBe('test2'); + const response2 = await authMemberAgent.get(`/variables/${var2.id}`); + expect(response2.statusCode).toBe(200); + expect(response2.body.data.key).toBe('test2'); + }); }); // ---------------------------------------- // POST /variables - create a new variable // ---------------------------------------- - -test('POST /variables should create a new credential and return it for an owner', async () => { - const toCreate = { - key: 'create1', - value: 'createvalue1', - }; - const response = await authAgent(ownerUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(200); - expect(response.body.data.key).toBe(toCreate.key); - expect(response.body.data.value).toBe(toCreate.value); - - const [byId, byKey] = await Promise.all([ - testDb.getVariableById(response.body.data.id), - testDb.getVariableByKey(toCreate.key), - ]); - - expect(byId).not.toBeNull(); - expect(byId!.key).toBe(toCreate.key); - expect(byId!.value).toBe(toCreate.value); - - expect(byKey).not.toBeNull(); - expect(byKey!.id).toBe(response.body.data.id); - expect(byKey!.value).toBe(toCreate.value); -}); - -test('POST /variables should not create a new credential and return it for a member', async () => { - const toCreate = { - key: 'create1', - value: 'createvalue1', - }; - const response = await authAgent(memberUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(401); - expect(response.body.data?.key).not.toBe(toCreate.key); - expect(response.body.data?.value).not.toBe(toCreate.value); - - const byKey = await testDb.getVariableByKey(toCreate.key); - expect(byKey).toBeNull(); -}); - -test("POST /variables should not create a new credential and return it if the instance doesn't have a license", async () => { - licenseLike.isVariablesEnabled.mockReturnValue(false); - const toCreate = { - key: 'create1', - value: 'createvalue1', - }; - const response = await authAgent(ownerUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(400); - expect(response.body.data?.key).not.toBe(toCreate.key); - expect(response.body.data?.value).not.toBe(toCreate.value); - - const byKey = await testDb.getVariableByKey(toCreate.key); - expect(byKey).toBeNull(); -}); - -test('POST /variables should fail to create a new credential and if one with the same key exists', async () => { - const toCreate = { - key: 'create1', - value: 'createvalue1', - }; - await testDb.createVariable(toCreate.key, toCreate.value); - const response = await authAgent(ownerUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(500); - expect(response.body.data?.key).not.toBe(toCreate.key); - expect(response.body.data?.value).not.toBe(toCreate.value); -}); - -test('POST /variables should not fail if variable limit not reached', async () => { - licenseLike.getVariablesLimit.mockReturnValue(5); - let i = 1; - let toCreate = { +describe('POST /variables', () => { + const generatePayload = (i = 1) => ({ key: `create${i}`, value: `createvalue${i}`, - }; - while (i < 3) { + }); + const toCreate = generatePayload(); + + test('should create a new credential and return it for an owner', async () => { + const response = await authOwnerAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(200); + expect(response.body.data.key).toBe(toCreate.key); + expect(response.body.data.value).toBe(toCreate.value); + + const [byId, byKey] = await Promise.all([ + testDb.getVariableById(response.body.data.id), + testDb.getVariableByKey(toCreate.key), + ]); + + expect(byId).not.toBeNull(); + expect(byId!.key).toBe(toCreate.key); + expect(byId!.value).toBe(toCreate.value); + + expect(byKey).not.toBeNull(); + expect(byKey!.id).toBe(response.body.data.id); + expect(byKey!.value).toBe(toCreate.value); + }); + + test('should not create a new credential and return it for a member', async () => { + const response = await authMemberAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(401); + expect(response.body.data?.key).not.toBe(toCreate.key); + expect(response.body.data?.value).not.toBe(toCreate.value); + + const byKey = await testDb.getVariableByKey(toCreate.key); + expect(byKey).toBeNull(); + }); + + test("POST /variables should not create a new credential and return it if the instance doesn't have a license", async () => { + licenseLike.isVariablesEnabled.mockReturnValue(false); + const response = await authOwnerAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(400); + expect(response.body.data?.key).not.toBe(toCreate.key); + expect(response.body.data?.value).not.toBe(toCreate.value); + + const byKey = await testDb.getVariableByKey(toCreate.key); + expect(byKey).toBeNull(); + }); + + test('should fail to create a new credential and if one with the same key exists', async () => { await testDb.createVariable(toCreate.key, toCreate.value); - i++; - toCreate = { - key: `create${i}`, - value: `createvalue${i}`, + const response = await authOwnerAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(500); + expect(response.body.data?.key).not.toBe(toCreate.key); + expect(response.body.data?.value).not.toBe(toCreate.value); + }); + + test('should not fail if variable limit not reached', async () => { + licenseLike.getVariablesLimit.mockReturnValue(5); + let i = 1; + let toCreate = generatePayload(i); + while (i < 3) { + await testDb.createVariable(toCreate.key, toCreate.value); + i++; + toCreate = generatePayload(i); + } + const response = await authOwnerAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(200); + expect(response.body.data?.key).toBe(toCreate.key); + expect(response.body.data?.value).toBe(toCreate.value); + }); + + test('should fail if variable limit reached', async () => { + licenseLike.getVariablesLimit.mockReturnValue(5); + let i = 1; + let toCreate = generatePayload(i); + while (i < 6) { + await testDb.createVariable(toCreate.key, toCreate.value); + i++; + toCreate = generatePayload(i); + } + const response = await authOwnerAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(400); + expect(response.body.data?.key).not.toBe(toCreate.key); + expect(response.body.data?.value).not.toBe(toCreate.value); + }); + + test('should fail if key too long', async () => { + const toCreate = { + // 51 'a's + key: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + value: 'value', }; - } - const response = await authAgent(ownerUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(200); - expect(response.body.data?.key).toBe(toCreate.key); - expect(response.body.data?.value).toBe(toCreate.value); -}); + const response = await authOwnerAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(400); + expect(response.body.data?.key).not.toBe(toCreate.key); + expect(response.body.data?.value).not.toBe(toCreate.value); + }); -test('POST /variables should fail if variable limit reached', async () => { - licenseLike.getVariablesLimit.mockReturnValue(5); - let i = 1; - let toCreate = { - key: `create${i}`, - value: `createvalue${i}`, - }; - while (i < 6) { - await testDb.createVariable(toCreate.key, toCreate.value); - i++; - toCreate = { - key: `create${i}`, - value: `createvalue${i}`, + test('should fail if value too long', async () => { + const toCreate = { + key: 'key', + // 256 'a's + value: + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', }; - } - const response = await authAgent(ownerUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(400); - expect(response.body.data?.key).not.toBe(toCreate.key); - expect(response.body.data?.value).not.toBe(toCreate.value); -}); + const response = await authOwnerAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(400); + expect(response.body.data?.key).not.toBe(toCreate.key); + expect(response.body.data?.value).not.toBe(toCreate.value); + }); -test('POST /variables should fail if key too long', async () => { - const toCreate = { - // 51 'a's - key: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - value: 'value', - }; - const response = await authAgent(ownerUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(400); - expect(response.body.data?.key).not.toBe(toCreate.key); - expect(response.body.data?.value).not.toBe(toCreate.value); -}); - -test('POST /variables should fail if value too long', async () => { - const toCreate = { - key: 'key', - // 256 'a's - value: - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - }; - const response = await authAgent(ownerUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(400); - expect(response.body.data?.key).not.toBe(toCreate.key); - expect(response.body.data?.value).not.toBe(toCreate.value); -}); - -test("POST /variables should fail if key contain's prohibited characters", async () => { - const toCreate = { - // 51 'a's - key: 'te$t', - value: 'value', - }; - const response = await authAgent(ownerUser).post('/variables').send(toCreate); - expect(response.statusCode).toBe(400); - expect(response.body.data?.key).not.toBe(toCreate.key); - expect(response.body.data?.value).not.toBe(toCreate.value); + test("should fail if key contain's prohibited characters", async () => { + const toCreate = { + // 51 'a's + key: 'te$t', + value: 'value', + }; + const response = await authOwnerAgent.post('/variables').send(toCreate); + expect(response.statusCode).toBe(400); + expect(response.body.data?.key).not.toBe(toCreate.key); + expect(response.body.data?.value).not.toBe(toCreate.value); + }); }); // ---------------------------------------- // PATCH /variables/:id - change a variable // ---------------------------------------- - -test('PATCH /variables/:id should modify existing credential if use is an owner', async () => { - const variable = await testDb.createVariable('test1', 'value1'); +describe('PATCH /variables/:id', () => { const toModify = { key: 'create1', value: 'createvalue1', }; - const response = await authAgent(ownerUser).patch(`/variables/${variable.id}`).send(toModify); - expect(response.statusCode).toBe(200); - expect(response.body.data.key).toBe(toModify.key); - expect(response.body.data.value).toBe(toModify.value); - const [byId, byKey] = await Promise.all([ - testDb.getVariableById(response.body.data.id), - testDb.getVariableByKey(toModify.key), - ]); + test('should modify existing credential if use is an owner', async () => { + const variable = await testDb.createVariable('test1', 'value1'); + const response = await authOwnerAgent.patch(`/variables/${variable.id}`).send(toModify); + expect(response.statusCode).toBe(200); + expect(response.body.data.key).toBe(toModify.key); + expect(response.body.data.value).toBe(toModify.value); - expect(byId).not.toBeNull(); - expect(byId!.key).toBe(toModify.key); - expect(byId!.value).toBe(toModify.value); + const [byId, byKey] = await Promise.all([ + testDb.getVariableById(response.body.data.id), + testDb.getVariableByKey(toModify.key), + ]); - expect(byKey).not.toBeNull(); - expect(byKey!.id).toBe(response.body.data.id); - expect(byKey!.value).toBe(toModify.value); -}); + expect(byId).not.toBeNull(); + expect(byId!.key).toBe(toModify.key); + expect(byId!.value).toBe(toModify.value); -test('PATCH /variables/:id should modify existing credential if use is an owner', async () => { - const variable = await testDb.createVariable('test1', 'value1'); - const toModify = { - key: 'create1', - value: 'createvalue1', - }; - const response = await authAgent(ownerUser).patch(`/variables/${variable.id}`).send(toModify); - expect(response.statusCode).toBe(200); - expect(response.body.data.key).toBe(toModify.key); - expect(response.body.data.value).toBe(toModify.value); + expect(byKey).not.toBeNull(); + expect(byKey!.id).toBe(response.body.data.id); + expect(byKey!.value).toBe(toModify.value); + }); - const [byId, byKey] = await Promise.all([ - testDb.getVariableById(response.body.data.id), - testDb.getVariableByKey(toModify.key), - ]); + test('should modify existing credential if use is an owner', async () => { + const variable = await testDb.createVariable('test1', 'value1'); + const response = await authOwnerAgent.patch(`/variables/${variable.id}`).send(toModify); + expect(response.statusCode).toBe(200); + expect(response.body.data.key).toBe(toModify.key); + expect(response.body.data.value).toBe(toModify.value); - expect(byId).not.toBeNull(); - expect(byId!.key).toBe(toModify.key); - expect(byId!.value).toBe(toModify.value); + const [byId, byKey] = await Promise.all([ + testDb.getVariableById(response.body.data.id), + testDb.getVariableByKey(toModify.key), + ]); - expect(byKey).not.toBeNull(); - expect(byKey!.id).toBe(response.body.data.id); - expect(byKey!.value).toBe(toModify.value); -}); + expect(byId).not.toBeNull(); + expect(byId!.key).toBe(toModify.key); + expect(byId!.value).toBe(toModify.value); -test('PATCH /variables/:id should not modify existing credential if use is a member', async () => { - const variable = await testDb.createVariable('test1', 'value1'); - const toModify = { - key: 'create1', - value: 'createvalue1', - }; - const response = await authAgent(memberUser).patch(`/variables/${variable.id}`).send(toModify); - expect(response.statusCode).toBe(401); - expect(response.body.data?.key).not.toBe(toModify.key); - expect(response.body.data?.value).not.toBe(toModify.value); + expect(byKey).not.toBeNull(); + expect(byKey!.id).toBe(response.body.data.id); + expect(byKey!.value).toBe(toModify.value); + }); - const byId = await testDb.getVariableById(variable.id); - expect(byId).not.toBeNull(); - expect(byId!.key).not.toBe(toModify.key); - expect(byId!.value).not.toBe(toModify.value); -}); + test('should not modify existing credential if use is a member', async () => { + const variable = await testDb.createVariable('test1', 'value1'); + const response = await authMemberAgent.patch(`/variables/${variable.id}`).send(toModify); + expect(response.statusCode).toBe(401); + expect(response.body.data?.key).not.toBe(toModify.key); + expect(response.body.data?.value).not.toBe(toModify.value); -test('PATCH /variables/:id should not modify existing credential if one with the same key exists', async () => { - const toModify = { - key: 'create1', - value: 'createvalue1', - }; - const [var1, var2] = await Promise.all([ - testDb.createVariable('test1', 'value1'), - testDb.createVariable(toModify.key, toModify.value), - ]); - const response = await authAgent(ownerUser).patch(`/variables/${var1.id}`).send(toModify); - expect(response.statusCode).toBe(500); - expect(response.body.data?.key).not.toBe(toModify.key); - expect(response.body.data?.value).not.toBe(toModify.value); + const byId = await testDb.getVariableById(variable.id); + expect(byId).not.toBeNull(); + expect(byId!.key).not.toBe(toModify.key); + expect(byId!.value).not.toBe(toModify.value); + }); - const byId = await testDb.getVariableById(var1.id); - expect(byId).not.toBeNull(); - expect(byId!.key).toBe(var1.key); - expect(byId!.value).toBe(var1.value); + test('should not modify existing credential if one with the same key exists', async () => { + const [var1, var2] = await Promise.all([ + testDb.createVariable('test1', 'value1'), + testDb.createVariable(toModify.key, toModify.value), + ]); + const response = await authOwnerAgent.patch(`/variables/${var1.id}`).send(toModify); + expect(response.statusCode).toBe(500); + expect(response.body.data?.key).not.toBe(toModify.key); + expect(response.body.data?.value).not.toBe(toModify.value); + + const byId = await testDb.getVariableById(var1.id); + expect(byId).not.toBeNull(); + expect(byId!.key).toBe(var1.key); + expect(byId!.value).toBe(var1.value); + }); }); // ---------------------------------------- // DELETE /variables/:id - change a variable // ---------------------------------------- +describe('DELETE /variables/:id', () => { + test('should delete a single credential for an owner', async () => { + const [var1, var2, var3] = await Promise.all([ + testDb.createVariable('test1', 'value1'), + testDb.createVariable('test2', 'value2'), + testDb.createVariable('test3', 'value3'), + ]); -test('DELETE /variables/:id should delete a single credential for an owner', async () => { - const [var1, var2, var3] = await Promise.all([ - testDb.createVariable('test1', 'value1'), - testDb.createVariable('test2', 'value2'), - testDb.createVariable('test3', 'value3'), - ]); + const delResponse = await authOwnerAgent.delete(`/variables/${var1.id}`); + expect(delResponse.statusCode).toBe(200); - const delResponse = await authAgent(ownerUser).delete(`/variables/${var1.id}`); - expect(delResponse.statusCode).toBe(200); + const byId = await testDb.getVariableById(var1.id); + expect(byId).toBeNull(); - const byId = await testDb.getVariableById(var1.id); - expect(byId).toBeNull(); + const getResponse = await authOwnerAgent.get('/variables'); + expect(getResponse.body.data.length).toBe(2); + }); - const getResponse = await authAgent(ownerUser).get('/variables'); - expect(getResponse.body.data.length).toBe(2); -}); - -test('DELETE /variables/:id should not delete a single credential for a member', async () => { - const [var1, var2, var3] = await Promise.all([ - testDb.createVariable('test1', 'value1'), - testDb.createVariable('test2', 'value2'), - testDb.createVariable('test3', 'value3'), - ]); - - const delResponse = await authAgent(memberUser).delete(`/variables/${var1.id}`); - expect(delResponse.statusCode).toBe(401); - - const byId = await testDb.getVariableById(var1.id); - expect(byId).not.toBeNull(); - - const getResponse = await authAgent(memberUser).get('/variables'); - expect(getResponse.body.data.length).toBe(3); + test('should not delete a single credential for a member', async () => { + const [var1, var2, var3] = await Promise.all([ + testDb.createVariable('test1', 'value1'), + testDb.createVariable('test2', 'value2'), + testDb.createVariable('test3', 'value3'), + ]); + + const delResponse = await authMemberAgent.delete(`/variables/${var1.id}`); + expect(delResponse.statusCode).toBe(401); + + const byId = await testDb.getVariableById(var1.id); + expect(byId).not.toBeNull(); + + const getResponse = await authMemberAgent.get('/variables'); + expect(getResponse.body.data.length).toBe(3); + }); }); diff --git a/packages/cli/test/integration/workflows.controller.ee.test.ts b/packages/cli/test/integration/workflows.controller.ee.test.ts index f2949c3ed5..e52826bf4c 100644 --- a/packages/cli/test/integration/workflows.controller.ee.test.ts +++ b/packages/cli/test/integration/workflows.controller.ee.test.ts @@ -1,19 +1,17 @@ -import { Container } from 'typedi'; import type { SuperAgentTest } from 'supertest'; import { v4 as uuid } from 'uuid'; import type { INode } from 'n8n-workflow'; import * as UserManagementHelpers from '@/UserManagement/UserManagementHelper'; import type { User } from '@db/entities/User'; +import { getSharedWorkflowIds } from '@/WorkflowHelpers'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; import * as testDb from './shared/testDb'; import { createWorkflow, getGlobalMemberRole, getGlobalOwnerRole } from './shared/testDb'; import type { SaveCredentialFunction } from './shared/types'; -import { makeWorkflow } from './shared/utils'; +import { makeWorkflow } from './shared/utils/'; import { randomCredentialPayload } from './shared/random'; -import { License } from '@/License'; -import { getSharedWorkflowIds } from '../../src/WorkflowHelpers'; let owner: User; let member: User; @@ -22,12 +20,14 @@ let authOwnerAgent: SuperAgentTest; let authMemberAgent: SuperAgentTest; let authAnotherMemberAgent: SuperAgentTest; let saveCredential: SaveCredentialFunction; -let sharingSpy: jest.SpyInstance; + +const sharingSpy = jest.spyOn(UserManagementHelpers, 'isSharingEnabled').mockReturnValue(true); +const testServer = utils.setupTestServer({ + endpointGroups: ['workflows'], + enabledFeatures: ['feat:sharing'], +}); beforeAll(async () => { - Container.get(License).isSharingEnabled = () => true; - const app = await utils.initTestServer({ endpointGroups: ['workflows'] }); - const globalOwnerRole = await testDb.getGlobalOwnerRole(); const globalMemberRole = await testDb.getGlobalMemberRole(); const credentialOwnerRole = await testDb.getCredentialOwnerRole(); @@ -36,13 +36,11 @@ beforeAll(async () => { member = await testDb.createUser({ globalRole: globalMemberRole }); anotherMember = await testDb.createUser({ globalRole: globalMemberRole }); - const authAgent = utils.createAuthAgent(app); - authOwnerAgent = authAgent(owner); - authMemberAgent = authAgent(member); - authAnotherMemberAgent = authAgent(anotherMember); + authOwnerAgent = testServer.authAgentFor(owner); + authMemberAgent = testServer.authAgentFor(member); + authAnotherMemberAgent = testServer.authAgentFor(anotherMember); saveCredential = testDb.affixRoleToSaveCredential(credentialOwnerRole); - sharingSpy = jest.spyOn(UserManagementHelpers, 'isSharingEnabled').mockReturnValue(true); await utils.initNodeTypes(); }); @@ -51,10 +49,6 @@ beforeEach(async () => { await testDb.truncate(['Workflow', 'SharedWorkflow']); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('router should switch based on flag', () => { let savedWorkflowId: string; @@ -196,6 +190,23 @@ describe('GET /workflows', () => { }); }); +describe('GET /workflows/new', () => { + [true, false].forEach((sharingEnabled) => { + test(`should return an auto-incremented name, even when sharing is ${ + sharingEnabled ? 'enabled' : 'disabled' + }`, async () => { + sharingSpy.mockReturnValueOnce(sharingEnabled); + + await createWorkflow({ name: 'My workflow' }, owner); + await createWorkflow({ name: 'My workflow 7' }, owner); + + const response = await authOwnerAgent.get('/workflows/new'); + expect(response.statusCode).toBe(200); + expect(response.body.data.name).toEqual('My workflow 8'); + }); + }); +}); + describe('GET /workflows/:id', () => { test('GET should fail with invalid id due to route rule', async () => { const response = await authOwnerAgent.get('/workflows/potatoes'); diff --git a/packages/cli/test/integration/workflows.controller.test.ts b/packages/cli/test/integration/workflows.controller.test.ts index 62fe7d5e74..dfc3689760 100644 --- a/packages/cli/test/integration/workflows.controller.test.ts +++ b/packages/cli/test/integration/workflows.controller.test.ts @@ -1,57 +1,41 @@ import type { SuperAgentTest } from 'supertest'; import type { IPinData } from 'n8n-workflow'; - -import type { User } from '@db/entities/User'; import * as UserManagementHelpers from '@/UserManagement/UserManagementHelper'; -import * as utils from './shared/utils'; +import * as utils from './shared/utils/'; import * as testDb from './shared/testDb'; -import { makeWorkflow, MOCK_PINDATA } from './shared/utils'; -import * as Db from '@/Db'; +import { makeWorkflow, MOCK_PINDATA } from './shared/utils/'; -let ownerShell: User; let authOwnerAgent: SuperAgentTest; -beforeAll(async () => { - const app = await utils.initTestServer({ endpointGroups: ['workflows'] }); - const globalOwnerRole = await testDb.getGlobalOwnerRole(); - ownerShell = await testDb.createUserShell(globalOwnerRole); - authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell }); +jest.spyOn(UserManagementHelpers, 'isSharingEnabled').mockReturnValue(false); +const testServer = utils.setupTestServer({ endpointGroups: ['workflows'] }); - // mock whether sharing is enabled or not - jest.spyOn(UserManagementHelpers, 'isSharingEnabled').mockReturnValue(false); +beforeAll(async () => { + const globalOwnerRole = await testDb.getGlobalOwnerRole(); + const ownerShell = await testDb.createUserShell(globalOwnerRole); + authOwnerAgent = testServer.authAgentFor(ownerShell); }); beforeEach(async () => { await testDb.truncate(['Workflow', 'SharedWorkflow']); }); -afterAll(async () => { - await testDb.terminate(); -}); - describe('POST /workflows', () => { - test('should store pin data for node in workflow', async () => { - const workflow = makeWorkflow({ withPinData: true }); - + const testWithPinData = async (withPinData: boolean) => { + const workflow = makeWorkflow({ withPinData }); const response = await authOwnerAgent.post('/workflows').send(workflow); - expect(response.statusCode).toBe(200); + return (response.body.data as { pinData: IPinData }).pinData; + }; - const { pinData } = response.body.data as { pinData: IPinData }; - + test('should store pin data for node in workflow', async () => { + const pinData = await testWithPinData(true); expect(pinData).toMatchObject(MOCK_PINDATA); }); test('should set pin data to null if no pin data', async () => { - const workflow = makeWorkflow({ withPinData: false }); - - const response = await authOwnerAgent.post('/workflows').send(workflow); - - expect(response.statusCode).toBe(200); - - const { pinData } = response.body.data as { pinData: IPinData }; - + const pinData = await testWithPinData(false); expect(pinData).toBeNull(); }); }); @@ -59,19 +43,13 @@ describe('POST /workflows', () => { describe('GET /workflows/:id', () => { test('should return pin data', async () => { const workflow = makeWorkflow({ withPinData: true }); - const workflowCreationResponse = await authOwnerAgent.post('/workflows').send(workflow); const { id } = workflowCreationResponse.body.data as { id: string }; - - const sw = await Db.collections.SharedWorkflow.find(); - const workflowRetrievalResponse = await authOwnerAgent.get(`/workflows/${id}`); expect(workflowRetrievalResponse.statusCode).toBe(200); - const { pinData } = workflowRetrievalResponse.body.data as { pinData: IPinData }; - expect(pinData).toMatchObject(MOCK_PINDATA); }); }); diff --git a/packages/cli/test/unit/ActiveExecutions.test.ts b/packages/cli/test/unit/ActiveExecutions.test.ts index 79b926441b..a05bb7f19e 100644 --- a/packages/cli/test/unit/ActiveExecutions.test.ts +++ b/packages/cli/test/unit/ActiveExecutions.test.ts @@ -1,11 +1,11 @@ import { ActiveExecutions } from '@/ActiveExecutions'; import PCancelable from 'p-cancelable'; import { v4 as uuid } from 'uuid'; +import { Container } from 'typedi'; import type { IExecuteResponsePromiseData, IRun } from 'n8n-workflow'; import { createDeferredPromise } from 'n8n-workflow'; import type { IWorkflowExecutionDataProcess } from '@/Interfaces'; -import { ExecutionRepository } from '@/databases/repositories'; -import Container from 'typedi'; +import { ExecutionRepository } from '@db/repositories'; const FAKE_EXECUTION_ID = '15'; const FAKE_SECOND_EXECUTION_ID = '20'; diff --git a/packages/cli/test/unit/ActiveWorkflowRunner.test.ts b/packages/cli/test/unit/ActiveWorkflowRunner.test.ts index c8694ceb88..055795936b 100644 --- a/packages/cli/test/unit/ActiveWorkflowRunner.test.ts +++ b/packages/cli/test/unit/ActiveWorkflowRunner.test.ts @@ -20,7 +20,7 @@ import { mock } from 'jest-mock-extended'; import type { ExternalHooks } from '@/ExternalHooks'; import { Container } from 'typedi'; import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; -import { mockInstance } from '../integration/shared/utils'; +import { mockInstance } from '../integration/shared/utils/'; import { Push } from '@/push'; import { ActiveExecutions } from '@/ActiveExecutions'; import { NodeTypes } from '@/NodeTypes'; diff --git a/packages/cli/test/unit/CommunityNodeHelpers.test.ts b/packages/cli/test/unit/CommunityNodeHelpers.test.ts index 951a9f4b34..24cb663b60 100644 --- a/packages/cli/test/unit/CommunityNodeHelpers.test.ts +++ b/packages/cli/test/unit/CommunityNodeHelpers.test.ts @@ -22,7 +22,7 @@ import { InstalledPackages } from '@db/entities/InstalledPackages'; import { InstalledNodes } from '@db/entities/InstalledNodes'; import { randomName } from '../integration/shared/random'; import config from '@/config'; -import { installedPackagePayload, installedNodePayload } from '../integration/shared/utils'; +import { installedPackagePayload, installedNodePayload } from '../integration/shared/utils/'; import type { CommunityPackages } from '@/Interfaces'; diff --git a/packages/cli/test/unit/Events.test.ts b/packages/cli/test/unit/Events.test.ts index cb7f499d67..452014a739 100644 --- a/packages/cli/test/unit/Events.test.ts +++ b/packages/cli/test/unit/Events.test.ts @@ -13,9 +13,9 @@ import * as UserManagementHelper from '@/UserManagement/UserManagementHelper'; import { getLogger } from '@/Logger'; import { InternalHooks } from '@/InternalHooks'; -import { mockInstance } from '../integration/shared/utils'; +import { mockInstance } from '../integration/shared/utils/'; import { UserService } from '@/user/user.service'; -import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; +import { WorkflowEntity } from '@db/entities/WorkflowEntity'; jest.mock('@/Db', () => { return { diff --git a/packages/cli/test/unit/License.test.ts b/packages/cli/test/unit/License.test.ts index 32351f2e27..c1ec4e7d7a 100644 --- a/packages/cli/test/unit/License.test.ts +++ b/packages/cli/test/unit/License.test.ts @@ -9,7 +9,7 @@ const MOCK_SERVER_URL = 'https://server.com/v1'; const MOCK_RENEW_OFFSET = 259200; const MOCK_INSTANCE_ID = 'instance-id'; const MOCK_ACTIVATION_KEY = 'activation-key'; -const MOCK_FEATURE_FLAG = 'feat:mock'; +const MOCK_FEATURE_FLAG = 'feat:sharing'; const MOCK_MAIN_PLAN_ID = '1b765dc4-d39d-4ffe-9885-c56dd67c4b26'; describe('License', () => { @@ -71,9 +71,9 @@ describe('License', () => { }); test('check fetching feature values', async () => { - await license.getFeatureValue(MOCK_FEATURE_FLAG, false); + license.getFeatureValue(MOCK_FEATURE_FLAG); - expect(LicenseManager.prototype.getFeatureValue).toHaveBeenCalledWith(MOCK_FEATURE_FLAG, false); + expect(LicenseManager.prototype.getFeatureValue).toHaveBeenCalledWith(MOCK_FEATURE_FLAG); }); test('check management jwt', async () => { diff --git a/packages/cli/test/unit/PermissionChecker.test.ts b/packages/cli/test/unit/PermissionChecker.test.ts index 44127f50ab..be12573b1d 100644 --- a/packages/cli/test/unit/PermissionChecker.test.ts +++ b/packages/cli/test/unit/PermissionChecker.test.ts @@ -22,7 +22,7 @@ import { import * as testDb from '../integration/shared/testDb'; import { mockNodeTypesData } from './Helpers'; import type { SaveCredentialFunction } from '../integration/shared/types'; -import { mockInstance } from '../integration/shared/utils'; +import { mockInstance } from '../integration/shared/utils/'; let mockNodeTypes: INodeTypes; let credentialOwnerRole: Role; diff --git a/packages/cli/test/unit/controllers/owner.controller.test.ts b/packages/cli/test/unit/controllers/owner.controller.test.ts index ad18cf2dd6..10879d0062 100644 --- a/packages/cli/test/unit/controllers/owner.controller.test.ts +++ b/packages/cli/test/unit/controllers/owner.controller.test.ts @@ -4,12 +4,7 @@ import type { ILogger } from 'n8n-workflow'; import jwt from 'jsonwebtoken'; import type { IInternalHooksClass } from '@/Interfaces'; import type { User } from '@db/entities/User'; -import type { - CredentialsRepository, - SettingsRepository, - UserRepository, - WorkflowRepository, -} from '@db/repositories'; +import type { SettingsRepository, UserRepository } from '@db/repositories'; import type { Config } from '@/config'; import { BadRequestError } from '@/ResponseHelper'; import type { OwnerRequest } from '@/requests'; @@ -23,8 +18,6 @@ describe('OwnerController', () => { const internalHooks = mock(); const userRepository = mock(); const settingsRepository = mock(); - const credentialsRepository = mock(); - const workflowsRepository = mock(); const controller = new OwnerController({ config, logger, @@ -32,29 +25,9 @@ describe('OwnerController', () => { repositories: { User: userRepository, Settings: settingsRepository, - Credentials: credentialsRepository, - Workflow: workflowsRepository, }, }); - describe('preSetup', () => { - it('should throw a BadRequestError if the instance owner is already setup', async () => { - config.getEnv.calledWith('userManagement.isInstanceOwnerSetUp').mockReturnValue(true); - await expect(controller.preSetup()).rejects.toThrowError( - new BadRequestError('Instance owner already setup'), - ); - }); - - it('should a return credential and workflow count', async () => { - config.getEnv.calledWith('userManagement.isInstanceOwnerSetUp').mockReturnValue(false); - credentialsRepository.countBy.mockResolvedValue(7); - workflowsRepository.countBy.mockResolvedValue(31); - const { credentials, workflows } = await controller.preSetup(); - expect(credentials).toBe(7); - expect(workflows).toBe(31); - }); - }); - describe('setupOwner', () => { it('should throw a BadRequestError if the instance owner is already setup', async () => { config.getEnv.calledWith('userManagement.isInstanceOwnerSetUp').mockReturnValue(true); @@ -123,15 +96,4 @@ describe('OwnerController', () => { expect(cookieOptions.value.sameSite).toBe('lax'); }); }); - - describe('skipSetup', () => { - it('should skip setting up the instance owner', async () => { - await controller.skipSetup(); - expect(settingsRepository.update).toHaveBeenCalledWith( - { key: 'userManagement.skipInstanceOwnerSetup' }, - { value: JSON.stringify(true) }, - ); - expect(config.set).toHaveBeenCalledWith('userManagement.skipInstanceOwnerSetup', true); - }); - }); }); diff --git a/packages/cli/test/unit/middlewares/basicAuth.test.ts b/packages/cli/test/unit/middlewares/basicAuth.test.ts deleted file mode 100644 index cc03b1ec6c..0000000000 --- a/packages/cli/test/unit/middlewares/basicAuth.test.ts +++ /dev/null @@ -1,42 +0,0 @@ -import express from 'express'; -import request from 'supertest'; -import config from '@/config'; -import { setupBasicAuth } from '@/middlewares/basicAuth'; - -describe('Basic Auth Middleware', () => { - let app: express.Application; - - beforeAll(() => { - app = express(); - config.set('security.basicAuth', { user: 'jim', password: 'n8n', hash: false, active: true }); - setupBasicAuth(app, config, new RegExp('^/skip-auth')); - app.get('/test', (req, res) => res.send({ auth: true })); - app.get('/skip-auth', (req, res) => res.send({ auth: false })); - }); - - it('should not block calls to /skip-auth', async () => { - const response = await request(app).get('/skip-auth'); - expect(response.statusCode).toEqual(200); - expect(response.headers).not.toHaveProperty('www-authenticate'); - expect(response.body).toEqual({ auth: false }); - }); - - it('should block calls to /test if auth is absent', async () => { - const response = await request(app).get('/test'); - expect(response.statusCode).toEqual(401); - expect(response.headers).toHaveProperty('www-authenticate'); - }); - - it('should block calls to /test if auth is invalid', async () => { - const response = await request(app).get('/test').auth('user', 'invalid'); - expect(response.statusCode).toEqual(401); - expect(response.headers).toHaveProperty('www-authenticate'); - }); - - it('should allow access to /test if basic auth header is valid', async () => { - const response = await request(app).get('/test').auth('jim', 'n8n'); - expect(response.statusCode).toEqual(200); - expect(response.headers).not.toHaveProperty('www-authenticate'); - expect(response.body).toEqual({ auth: true }); - }); -}); diff --git a/packages/cli/test/unit/middlewares/externalJWTAuth.test.ts b/packages/cli/test/unit/middlewares/externalJWTAuth.test.ts deleted file mode 100644 index 42b2b19af4..0000000000 --- a/packages/cli/test/unit/middlewares/externalJWTAuth.test.ts +++ /dev/null @@ -1,47 +0,0 @@ -import express from 'express'; -import request from 'supertest'; -import createJWKSMock from 'mock-jwks'; -import config from '@/config'; -import { setupExternalJWTAuth } from '@/middlewares/externalJWTAuth'; - -const testJWKUri = 'https://n8n.test/'; -const jwksMock = createJWKSMock(testJWKUri); - -describe('External JWT Auth Middleware', () => { - let app: express.Application; - - beforeAll(() => { - app = express(); - config.set('security.jwtAuth.jwtHeader', 'Authorization'); - config.set('security.jwtAuth.jwtHeaderValuePrefix', 'Bearer'); - config.set('security.jwtAuth.jwtIssuer', 'n8n'); - config.set('security.jwtAuth.jwksUri', `${testJWKUri}.well-known/jwks.json`); - setupExternalJWTAuth(app, config, new RegExp('^/skip-auth')); - app.get('/test', (req, res) => res.send({ auth: true })); - app.get('/skip-auth', (req, res) => res.send({ auth: false })); - - jwksMock.start(); - }); - - it('should not block calls to /skip-auth', async () => { - const response = await request(app).get('/skip-auth'); - expect(response.statusCode).toEqual(200); - expect(response.body).toEqual({ auth: false }); - }); - - it('should block calls to /test if auth is absent', async () => - request(app).get('/test').expect(403)); - - it('should block calls to /test if auth is invalid', async () => { - const token = jwksMock.token({ iss: 'invalid' }); - const response = await request(app).get('/test').set('Authorization', `Bearer ${token}`); - expect(response.statusCode).toEqual(403); - }); - - it('should allow access to /test if JWT auth header is valid', async () => { - const token = jwksMock.token({ iss: 'n8n' }); - const response = await request(app).get('/test').set('Authorization', `Bearer ${token}`); - expect(response.statusCode).toEqual(200); - expect(response.body).toEqual({ auth: true }); - }); -}); diff --git a/packages/cli/test/unit/repositories/role.repository.test.ts b/packages/cli/test/unit/repositories/role.repository.test.ts index e1a6a46eec..f47a93a7df 100644 --- a/packages/cli/test/unit/repositories/role.repository.test.ts +++ b/packages/cli/test/unit/repositories/role.repository.test.ts @@ -4,7 +4,7 @@ import { mock } from 'jest-mock-extended'; import type { RoleNames, RoleScopes } from '@db/entities/Role'; import { Role } from '@db/entities/Role'; import { RoleRepository } from '@db/repositories/role.repository'; -import { mockInstance } from '../../integration/shared/utils'; +import { mockInstance } from '../../integration/shared/utils/'; import { randomInteger } from '../../integration/shared/random'; describe('RoleRepository', () => { diff --git a/packages/cli/test/unit/services/role.service.test.ts b/packages/cli/test/unit/services/role.service.test.ts index 483be386ae..971b4bbf80 100644 --- a/packages/cli/test/unit/services/role.service.test.ts +++ b/packages/cli/test/unit/services/role.service.test.ts @@ -2,7 +2,7 @@ import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.reposi import { Role } from '@db/entities/Role'; import { SharedWorkflow } from '@db/entities/SharedWorkflow'; import { RoleService } from '@/role/role.service'; -import { mockInstance } from '../../integration/shared/utils'; +import { mockInstance } from '../../integration/shared/utils/'; describe('RoleService', () => { const sharedWorkflowRepository = mockInstance(SharedWorkflowRepository); diff --git a/packages/core/bin/generate-known b/packages/core/bin/generate-known index b6dde71d4f..5cabfa9b80 100755 --- a/packages/core/bin/generate-known +++ b/packages/core/bin/generate-known @@ -38,6 +38,10 @@ const generate = async (kind) => { else obj[name] = { className, sourcePath }; } + if (kind === 'credentials' && Array.isArray(instance.extends)) { + obj[name].extends = instance.extends; + } + if (kind === 'nodes') { const { credentials } = instance.description; if (credentials && credentials.length) { @@ -53,6 +57,7 @@ const generate = async (kind) => { } return obj; }, {}); + LoggerProxy.info(`Detected ${Object.keys(data).length} ${kind}`); await writeJSON(`known/${kind}.json`, data); return data; diff --git a/packages/core/package.json b/packages/core/package.json index 51caa50a81..ac50281ddb 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "0.173.0", + "version": "1.0.1", "description": "Core functionality of n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -60,8 +60,6 @@ "p-cancelable": "^2.0.0", "pretty-bytes": "^5.6.0", "qs": "^6.10.1", - "request": "^2.88.2", - "request-promise-native": "^1.0.7", "uuid": "^8.3.2" } } diff --git a/packages/core/src/BinaryDataManager/FileSystem.ts b/packages/core/src/BinaryDataManager/FileSystem.ts index 011a5cd5ee..e84e479473 100644 --- a/packages/core/src/BinaryDataManager/FileSystem.ts +++ b/packages/core/src/BinaryDataManager/FileSystem.ts @@ -1,3 +1,4 @@ +import glob from 'fast-glob'; import { createReadStream } from 'fs'; import fs from 'fs/promises'; import path from 'path'; @@ -12,6 +13,9 @@ import { FileNotFoundError } from '../errors'; const PREFIX_METAFILE = 'binarymeta'; const PREFIX_PERSISTED_METAFILE = 'persistedmeta'; +const executionExtractionRegexp = + /^(\w+)(?:[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12})$/; + export class BinaryDataFileSystem implements IBinaryDataManager { private storagePath: string; @@ -36,16 +40,12 @@ export class BinaryDataFileSystem implements IBinaryDataManager { }, this.persistedBinaryDataTTL * 30000); } - return fs - .readdir(this.storagePath) - .catch(async () => fs.mkdir(this.storagePath, { recursive: true })) - .then(async () => fs.readdir(this.getBinaryDataMetaPath())) - .catch(async () => fs.mkdir(this.getBinaryDataMetaPath(), { recursive: true })) - .then(async () => fs.readdir(this.getBinaryDataPersistMetaPath())) - .catch(async () => fs.mkdir(this.getBinaryDataPersistMetaPath(), { recursive: true })) - .then(async () => this.deleteMarkedFiles()) - .then(async () => this.deleteMarkedPersistedFiles()) - .then(() => {}); + await this.assertFolder(this.storagePath); + await this.assertFolder(this.getBinaryDataMetaPath()); + await this.assertFolder(this.getBinaryDataPersistMetaPath()); + + await this.deleteMarkedFiles(); + await this.deleteMarkedPersistedFiles(); } async getFileSize(identifier: string): Promise { @@ -122,46 +122,37 @@ export class BinaryDataFileSystem implements IBinaryDataManager { `${PREFIX_PERSISTED_METAFILE}_${executionId}_${timeoutTime}`, ); - return fs - .readFile(filePath) - .catch(async () => fs.writeFile(filePath, identifier)) - .then(() => {}); + try { + await fs.access(filePath); + } catch { + await fs.writeFile(filePath, identifier); + } } private async deleteMarkedFilesByMeta(metaPath: string, filePrefix: string): Promise { const currentTimeValue = new Date().valueOf(); - const metaFileNames = await fs.readdir(metaPath); + const metaFileNames = await glob(`${filePrefix}_*`, { cwd: metaPath }); - const execsAdded: { [key: string]: number } = {}; + const executionIds = metaFileNames + .map((f) => f.split('_') as [string, string, string]) + .filter(([prefix, , ts]) => { + if (prefix !== filePrefix) return false; + const execTimestamp = parseInt(ts, 10); + return execTimestamp < currentTimeValue; + }) + .map((e) => e[1]); - const promises = metaFileNames.reduce>>((prev, curr) => { - const [prefix, executionId, ts] = curr.split('_'); - - if (prefix !== filePrefix) { - return prev; - } - - const execTimestamp = parseInt(ts, 10); - - if (execTimestamp < currentTimeValue) { - if (execsAdded[executionId]) { - // do not delete data, only meta file - prev.push(this.deleteMetaFileByPath(path.join(metaPath, curr))); - return prev; - } - - execsAdded[executionId] = 1; - prev.push( - this.deleteBinaryDataByExecutionId(executionId).then(async () => - this.deleteMetaFileByPath(path.join(metaPath, curr)), - ), - ); - } - - return prev; - }, []); - - await Promise.all(promises); + const filesToDelete = []; + const deletedIds = await this.deleteBinaryDataByExecutionIds(executionIds); + for (const executionId of deletedIds) { + filesToDelete.push( + ...(await glob(`${filePrefix}_${executionId}_`, { + absolute: true, + cwd: metaPath, + })), + ); + } + await Promise.all(filesToDelete.map(async (file) => fs.rm(file))); } async duplicateBinaryDataByIdentifier(binaryDataId: string, prefix: string): Promise { @@ -174,18 +165,19 @@ export class BinaryDataFileSystem implements IBinaryDataManager { return newBinaryDataId; } - async deleteBinaryDataByExecutionId(executionId: string): Promise { - const regex = new RegExp(`${executionId}_*`); - const filenames = await fs.readdir(this.storagePath); - - const promises = filenames.reduce>>((allProms, filename) => { - if (regex.test(filename)) { - allProms.push(fs.rm(this.resolveStoragePath(filename))); + async deleteBinaryDataByExecutionIds(executionIds: string[]): Promise { + const set = new Set(executionIds); + const fileNames = await fs.readdir(this.storagePath); + const deletedIds = []; + for (const fileName of fileNames) { + const executionId = fileName.match(executionExtractionRegexp)?.[1]; + if (executionId && set.has(executionId)) { + const filePath = this.resolveStoragePath(fileName); + await Promise.all([fs.rm(filePath), fs.rm(`${filePath}.metadata`)]); + deletedIds.push(executionId); } - return allProms; - }, []); - - await Promise.all(promises); + } + return deletedIds; } async deleteBinaryDataByIdentifier(identifier: string): Promise { @@ -193,18 +185,24 @@ export class BinaryDataFileSystem implements IBinaryDataManager { } async persistBinaryDataForExecutionId(executionId: string): Promise { - return fs.readdir(this.getBinaryDataPersistMetaPath()).then(async (metaFiles) => { - const promises = metaFiles.reduce>>((prev, curr) => { - if (curr.startsWith(`${PREFIX_PERSISTED_METAFILE}_${executionId}_`)) { - prev.push(fs.rm(path.join(this.getBinaryDataPersistMetaPath(), curr))); - return prev; - } - + const metaFiles = await fs.readdir(this.getBinaryDataPersistMetaPath()); + const promises = metaFiles.reduce>>((prev, curr) => { + if (curr.startsWith(`${PREFIX_PERSISTED_METAFILE}_${executionId}_`)) { + prev.push(fs.rm(path.join(this.getBinaryDataPersistMetaPath(), curr))); return prev; - }, []); + } - await Promise.all(promises); - }); + return prev; + }, []); + await Promise.all(promises); + } + + private async assertFolder(folder: string): Promise { + try { + await fs.access(folder); + } catch { + await fs.mkdir(folder, { recursive: true }); + } } private generateFileName(prefix: string): string { @@ -219,10 +217,6 @@ export class BinaryDataFileSystem implements IBinaryDataManager { return path.join(this.storagePath, 'persistMeta'); } - private async deleteMetaFileByPath(metaFilePath: string): Promise { - return fs.rm(metaFilePath); - } - private async deleteFromLocalStorage(identifier: string) { return fs.rm(this.getBinaryPath(identifier)); } diff --git a/packages/core/src/BinaryDataManager/index.ts b/packages/core/src/BinaryDataManager/index.ts index 54ab8bc110..56b9f1550a 100644 --- a/packages/core/src/BinaryDataManager/index.ts +++ b/packages/core/src/BinaryDataManager/index.ts @@ -178,9 +178,9 @@ export class BinaryDataManager { } } - async deleteBinaryDataByExecutionId(executionId: string): Promise { + async deleteBinaryDataByExecutionIds(executionIds: string[]): Promise { if (this.managers[this.binaryDataMode]) { - await this.managers[this.binaryDataMode].deleteBinaryDataByExecutionId(executionId); + await this.managers[this.binaryDataMode].deleteBinaryDataByExecutionIds(executionIds); } } diff --git a/packages/core/src/DirectoryLoader.ts b/packages/core/src/DirectoryLoader.ts index 0fba8399d3..4b55f6b24d 100644 --- a/packages/core/src/DirectoryLoader.ts +++ b/packages/core/src/DirectoryLoader.ts @@ -109,9 +109,8 @@ export abstract class DirectoryLoader { nodeVersion = tempNode.currentVersion; if (currentVersionNode.hasOwnProperty('executeSingle')) { - Logger.warn( - `"executeSingle" will get deprecated soon. Please update the code of node "${this.packageName}.${nodeName}" to use "execute" instead!`, - { filePath }, + throw new Error( + `"executeSingle" has been removed. Please update the code of node "${this.packageName}.${nodeName}" to use "execute" instead!`, ); } } else { @@ -168,6 +167,7 @@ export abstract class DirectoryLoader { this.known.credentials[tempCredential.name] = { className: credentialName, sourcePath: filePath, + extends: tempCredential.extends, }; this.credentialTypes[tempCredential.name] = { diff --git a/packages/core/src/Interfaces.ts b/packages/core/src/Interfaces.ts index 0ec0017820..538a1127ca 100644 --- a/packages/core/src/Interfaces.ts +++ b/packages/core/src/Interfaces.ts @@ -3,26 +3,10 @@ import type { IPollResponse, ITriggerResponse, IWorkflowSettings as IWorkflowSettingsWorkflow, - IExecuteFunctions as IExecuteFunctionsBase, - IExecuteSingleFunctions as IExecuteSingleFunctionsBase, - IHookFunctions as IHookFunctionsBase, - ILoadOptionsFunctions as ILoadOptionsFunctionsBase, - IPollFunctions as IPollFunctionsBase, - ITriggerFunctions as ITriggerFunctionsBase, - IWebhookFunctions as IWebhookFunctionsBase, BinaryMetadata, ValidationResult, } from 'n8n-workflow'; -// TODO: remove these after removing `n8n-core` dependency from `nodes-bases` -export type IExecuteFunctions = IExecuteFunctionsBase; -export type IExecuteSingleFunctions = IExecuteSingleFunctionsBase; -export type IHookFunctions = IHookFunctionsBase; -export type ILoadOptionsFunctions = ILoadOptionsFunctionsBase; -export type IPollFunctions = IPollFunctionsBase; -export type ITriggerFunctions = ITriggerFunctionsBase; -export type IWebhookFunctions = IWebhookFunctionsBase; - export interface IProcessMessage { // eslint-disable-next-line @typescript-eslint/no-explicit-any data?: any; @@ -72,7 +56,7 @@ export interface IBinaryDataManager { deleteMarkedFiles(): Promise; deleteBinaryDataByIdentifier(identifier: string): Promise; duplicateBinaryDataByIdentifier(binaryDataId: string, prefix: string): Promise; - deleteBinaryDataByExecutionId(executionId: string): Promise; + deleteBinaryDataByExecutionIds(executionIds: string[]): Promise; persistBinaryDataForExecutionId(executionId: string): Promise; } diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 16f8f7b691..2c2541285c 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -97,9 +97,8 @@ import get from 'lodash/get'; import type { Request, Response } from 'express'; import FormData from 'form-data'; import path from 'path'; -import type { OptionsWithUri, OptionsWithUrl, RequestCallback, RequiredUriUrl } from 'request'; +import type { OptionsWithUri, OptionsWithUrl } from 'request'; import type { RequestPromiseOptions } from 'request-promise-native'; -import requestPromise from 'request-promise-native'; import FileType from 'file-type'; import { lookup, extension } from 'mime-types'; import type { IncomingHttpHeaders } from 'http'; @@ -129,6 +128,7 @@ import { setAllWorkflowExecutionMetadata, setWorkflowExecutionMetadata, } from './WorkflowExecutionMetadata'; +import { getUserN8nFolderPath } from './UserSettings'; axios.defaults.timeout = 300000; // Prevent axios from adding x-form-www-urlencoded headers by default @@ -142,10 +142,6 @@ axios.defaults.paramsSerializer = (params) => { return stringify(params, { arrayFormat: 'indices' }); }; -const requestPromiseWithDefaults = requestPromise.defaults({ - timeout: 300000, // 5 minutes -}); - const pushFormDataValue = (form: FormData, key: string, value: any) => { if (value?.hasOwnProperty('value') && value.hasOwnProperty('options')) { form.append(key, value.value, value.options); @@ -596,21 +592,12 @@ type ConfigObject = { }; export async function proxyRequestToAxios( - workflow: Workflow, - additionalData: IWorkflowExecuteAdditionalData, - node: INode, - uriOrObject: string | IDataObject, - options?: IDataObject, + workflow: Workflow | undefined, + additionalData: IWorkflowExecuteAdditionalData | undefined, + node: INode | undefined, + uriOrObject: string | object, + options?: object, ): Promise { - // Check if there's a better way of getting this config here - if (process.env.N8N_USE_DEPRECATED_REQUEST_LIB) { - return requestPromiseWithDefaults.call( - null, - uriOrObject as unknown as RequiredUriUrl & RequestPromiseOptions, - options as unknown as RequestCallback, - ); - } - let axiosConfig: AxiosRequestConfig = { maxBodyLength: Infinity, maxContentLength: Infinity, @@ -667,7 +654,7 @@ export async function proxyRequestToAxios( body = undefined; } } - await additionalData.hooks?.executeHookFunctions('nodeFetchedData', [workflow.id, node]); + await additionalData?.hooks?.executeHookFunctions('nodeFetchedData', [workflow?.id, node]); return { body, headers: response.headers, @@ -684,7 +671,7 @@ export async function proxyRequestToAxios( body = undefined; } } - await additionalData.hooks?.executeHookFunctions('nodeFetchedData', [workflow.id, node]); + await additionalData?.hooks?.executeHookFunctions('nodeFetchedData', [workflow?.id, node]); return body; } } catch (error) { @@ -1074,13 +1061,13 @@ async function prepareBinaryData( /** * Makes a request using OAuth data for authentication * - * @param {(OptionsWithUri | requestPromise.RequestPromiseOptions)} requestOptions + * @param {(OptionsWithUri | RequestPromiseOptions)} requestOptions * */ export async function requestOAuth2( this: IAllExecuteFunctions, credentialsType: string, - requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions | IHttpRequestOptions, + requestOptions: OptionsWithUri | RequestPromiseOptions | IHttpRequestOptions, node: INode, additionalData: IWorkflowExecuteAdditionalData, oAuth2Options?: IOAuth2Options, @@ -1311,11 +1298,7 @@ export async function requestOAuth2( export async function requestOAuth1( this: IAllExecuteFunctions, credentialsType: string, - requestOptions: - | OptionsWithUrl - | OptionsWithUri - | requestPromise.RequestPromiseOptions - | IHttpRequestOptions, + requestOptions: OptionsWithUrl | OptionsWithUri | RequestPromiseOptions | IHttpRequestOptions, isN8nRequest = false, ) { const credentials = await this.getCredentials(credentialsType); @@ -1569,7 +1552,7 @@ export function normalizeItems( export async function requestWithAuthentication( this: IAllExecuteFunctions, credentialsType: string, - requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, + requestOptions: OptionsWithUri | RequestPromiseOptions, workflow: Workflow, node: INode, additionalData: IWorkflowExecuteAdditionalData, @@ -1982,7 +1965,6 @@ const validateValueAgainstSchema = ( }' [item ${itemIndex}]`, { description: validationResult.errorMessage, - failExecution: true, runIndex, itemIndex, nodeCause: node.name, @@ -2179,6 +2161,7 @@ const getCommonWorkflowFunctions = ( getWorkflowStaticData: (type) => workflow.getStaticData(type, node), getRestApiUrl: () => additionalData.restApiUrl, + getInstanceBaseUrl: () => additionalData.instanceBaseUrl, getTimezone: () => getTimezone(workflow, additionalData), }); @@ -2229,7 +2212,7 @@ const getRequestHelperFunctions = ( async requestOAuth1( this: IAllExecuteFunctions, credentialsType: string, - requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions, + requestOptions: OptionsWithUrl | RequestPromiseOptions, ): Promise { return requestOAuth1.call(this, credentialsType, requestOptions); }, @@ -2237,7 +2220,7 @@ const getRequestHelperFunctions = ( async requestOAuth2( this: IAllExecuteFunctions, credentialsType: string, - requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, + requestOptions: OptionsWithUri | RequestPromiseOptions, oAuth2Options?: IOAuth2Options, ): Promise { return requestOAuth2.call( @@ -2264,6 +2247,9 @@ const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions => } return createReadStream(filePath); }, + getStoragePath() { + return path.join(getUserN8nFolderPath(), `storage/${node.type}`); + }, }); const getNodeHelperFunctions = ({ @@ -2733,7 +2719,9 @@ export function getExecuteSingleFunctions( export function getCredentialTestFunctions(): ICredentialTestFunctions { return { helpers: { - request: requestPromiseWithDefaults, + request: async (uriOrObject: string | object, options?: object) => { + return proxyRequestToAxios(undefined, undefined, undefined, uriOrObject, options); + }, }, }; } diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index f1e2cd6027..6fe4838115 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -143,6 +143,10 @@ export class WorkflowExecute { return this.processRunExecutionData(workflow); } + forceInputNodeExecution(workflow: Workflow): boolean { + return workflow.settings.executionOrder !== 'v1'; + } + /** * Executes the given workflow but only * @@ -329,6 +333,27 @@ export class WorkflowExecute { return true; } + prepareWaitingToExecution(nodeName: string, numberOfConnections: number, runIndex: number) { + if (!this.runExecutionData.executionData!.waitingExecutionSource) { + this.runExecutionData.executionData!.waitingExecutionSource = {}; + } + + this.runExecutionData.executionData!.waitingExecution[nodeName][runIndex] = { + main: [], + }; + this.runExecutionData.executionData!.waitingExecutionSource[nodeName][runIndex] = { + main: [], + }; + + for (let i = 0; i < numberOfConnections; i++) { + this.runExecutionData.executionData!.waitingExecution[nodeName][runIndex].main.push(null); + + this.runExecutionData.executionData!.waitingExecutionSource[nodeName][runIndex].main.push( + null, + ); + } + } + addNodeToBeExecuted( workflow: Workflow, connectionData: IConnection, @@ -338,6 +363,8 @@ export class WorkflowExecute { runIndex: number, ): void { let stillDataMissing = false; + const enqueueFn = workflow.settings.executionOrder === 'v1' ? 'unshift' : 'push'; + let waitingNodeIndex: number | undefined; // Check if node has multiple inputs as then we have to wait for all input data // to be present before we can add it to the node-execution-stack @@ -358,47 +385,64 @@ export class WorkflowExecute { this.runExecutionData.executionData!.waitingExecutionSource[connectionData.node] = {}; nodeWasWaiting = false; } - if ( - this.runExecutionData.executionData!.waitingExecution[connectionData.node][runIndex] === - undefined - ) { - // Node does not have data for runIndex yet so create also empty one and init it - this.runExecutionData.executionData!.waitingExecution[connectionData.node][runIndex] = { - main: [], - }; - this.runExecutionData.executionData!.waitingExecutionSource[connectionData.node][runIndex] = - { - main: [], - }; - for ( - let i = 0; - i < workflow.connectionsByDestinationNode[connectionData.node].main.length; - i++ - ) { - this.runExecutionData.executionData!.waitingExecution[connectionData.node][ - runIndex - ].main.push(null); - this.runExecutionData.executionData!.waitingExecutionSource[connectionData.node][ - runIndex - ].main.push(null); + // Figure out if the node is already waiting with partial data to which to add the + // data to or if a new entry has to get created + let createNewWaitingEntry = true; + + if ( + Object.keys(this.runExecutionData.executionData!.waitingExecution[connectionData.node]) + .length > 0 + ) { + // Check if there is already data for the input on all of the waiting nodes + for (const index of Object.keys( + this.runExecutionData.executionData!.waitingExecution[connectionData.node], + )) { + if ( + !this.runExecutionData.executionData!.waitingExecution[connectionData.node][ + parseInt(index) + ].main[connectionData.index] + ) { + // Data for the input is missing so we can add it to the existing entry + createNewWaitingEntry = false; + waitingNodeIndex = parseInt(index); + break; + } } } + if (waitingNodeIndex === undefined) { + waitingNodeIndex = Object.values( + this.runExecutionData.executionData!.waitingExecution[connectionData.node], + ).length; + } + + if (createNewWaitingEntry) { + // There is currently no node waiting that does not already have data for + // the given input, so create a new entry + + this.prepareWaitingToExecution( + connectionData.node, + workflow.connectionsByDestinationNode[connectionData.node].main.length, + waitingNodeIndex, + ); + } + // Add the new data if (nodeSuccessData === null) { - this.runExecutionData.executionData!.waitingExecution[connectionData.node][runIndex].main[ - connectionData.index - ] = null; + this.runExecutionData.executionData!.waitingExecution[connectionData.node][ + waitingNodeIndex + ].main[connectionData.index] = null; this.runExecutionData.executionData!.waitingExecutionSource[connectionData.node][ - runIndex + waitingNodeIndex ].main[connectionData.index] = null; } else { - this.runExecutionData.executionData!.waitingExecution[connectionData.node][runIndex].main[ - connectionData.index - ] = nodeSuccessData[outputIndex]; + this.runExecutionData.executionData!.waitingExecution[connectionData.node][ + waitingNodeIndex + ].main[connectionData.index] = nodeSuccessData[outputIndex]; + this.runExecutionData.executionData!.waitingExecutionSource[connectionData.node][ - runIndex + waitingNodeIndex ].main[connectionData.index] = { previousNode: parentNodeName, previousNodeOutput: outputIndex || undefined, @@ -412,14 +456,14 @@ export class WorkflowExecute { for ( let i = 0; i < - this.runExecutionData.executionData!.waitingExecution[connectionData.node][runIndex].main - .length; + this.runExecutionData.executionData!.waitingExecution[connectionData.node][waitingNodeIndex] + .main.length; i++ ) { thisExecutionData = - this.runExecutionData.executionData!.waitingExecution[connectionData.node][runIndex].main[ - i - ]; + this.runExecutionData.executionData!.waitingExecution[connectionData.node][ + waitingNodeIndex + ].main[i]; if (thisExecutionData === null) { allDataFound = false; break; @@ -433,11 +477,11 @@ export class WorkflowExecute { const executionStackItem = { node: workflow.nodes[connectionData.node], data: this.runExecutionData.executionData!.waitingExecution[connectionData.node][ - runIndex + waitingNodeIndex ], source: this.runExecutionData.executionData!.waitingExecutionSource[connectionData.node][ - runIndex + waitingNodeIndex ], } as IExecuteData; @@ -447,16 +491,18 @@ export class WorkflowExecute { ) { executionStackItem.source = this.runExecutionData.executionData!.waitingExecutionSource[connectionData.node][ - runIndex + waitingNodeIndex ]; } - this.runExecutionData.executionData!.nodeExecutionStack.push(executionStackItem); + this.runExecutionData.executionData!.nodeExecutionStack[enqueueFn](executionStackItem); // Remove the data from waiting - delete this.runExecutionData.executionData!.waitingExecution[connectionData.node][runIndex]; + delete this.runExecutionData.executionData!.waitingExecution[connectionData.node][ + waitingNodeIndex + ]; delete this.runExecutionData.executionData!.waitingExecutionSource[connectionData.node][ - runIndex + waitingNodeIndex ]; if ( @@ -492,6 +538,9 @@ export class WorkflowExecute { // checked. So we have to go through all the inputs and check if they // are already on the list to be processed. // If that is not the case add it. + + const forceInputNodeExecution = this.forceInputNodeExecution(workflow); + for ( let inputIndex = 0; inputIndex < workflow.connectionsByDestinationNode[connectionData.node].main.length; @@ -540,6 +589,12 @@ export class WorkflowExecute { continue; } + if (!forceInputNodeExecution) { + // Do not automatically follow all incoming nodes and force them + // to execute + continue; + } + // Check if any of the parent nodes does not have any inputs. That // would mean that it has to get added to the list of nodes to process. const parentNodes = workflow.getParentNodes(inputData.node, 'main', -1); @@ -609,7 +664,7 @@ export class WorkflowExecute { if (addEmptyItem) { // Add only node if it does not have any inputs because else it will // be added by its input node later anyway. - this.runExecutionData.executionData!.nodeExecutionStack.push({ + this.runExecutionData.executionData!.nodeExecutionStack[enqueueFn]({ node: workflow.getNode(nodeToAdd) as INode, data: { main: [ @@ -650,18 +705,30 @@ export class WorkflowExecute { } if (stillDataMissing) { + waitingNodeIndex = waitingNodeIndex!; + // Additional data is needed to run node so add it to waiting - if ( - !this.runExecutionData.executionData!.waitingExecution.hasOwnProperty(connectionData.node) - ) { - this.runExecutionData.executionData!.waitingExecution[connectionData.node] = {}; - } - this.runExecutionData.executionData!.waitingExecution[connectionData.node][runIndex] = { - main: connectionDataArray, + this.prepareWaitingToExecution( + connectionData.node, + workflow.connectionsByDestinationNode[connectionData.node].main.length, + waitingNodeIndex, + ); + + this.runExecutionData.executionData!.waitingExecution[connectionData.node][waitingNodeIndex] = + { + main: connectionDataArray, + }; + + this.runExecutionData.executionData!.waitingExecutionSource![connectionData.node][ + waitingNodeIndex + ].main[connectionData.index] = { + previousNode: parentNodeName, + previousNodeOutput: outputIndex || undefined, + previousNodeRun: runIndex || undefined, }; } else { // All data is there so add it directly to stack - this.runExecutionData.executionData!.nodeExecutionStack.push({ + this.runExecutionData.executionData!.nodeExecutionStack[enqueueFn]({ node: workflow.nodes[connectionData.node], data: { main: connectionDataArray, @@ -691,6 +758,7 @@ export class WorkflowExecute { Logger.verbose('Workflow execution started', { workflowId: workflow.id }); const startedAt = new Date(); + const forceInputNodeExecution = this.forceInputNodeExecution(workflow); this.status = 'running'; @@ -886,17 +954,20 @@ export class WorkflowExecute { continue executionLoop; } - // Check if it has the data for all the inputs - // The most nodes just have one but merge node for example has two and data - // of both inputs has to be available to be able to process the node. - if ( - executionData.data.main.length < connectionIndex || - executionData.data.main[connectionIndex] === null - ) { - // Does not have the data of the connections so add back to stack - this.runExecutionData.executionData!.nodeExecutionStack.push(executionData); - lastExecutionTry = currentExecutionTry; - continue executionLoop; + if (forceInputNodeExecution) { + // Check if it has the data for all the inputs + // The most nodes just have one but merge node for example has two and data + // of both inputs has to be available to be able to process the node. + if ( + executionData.data.main.length < connectionIndex || + executionData.data.main[connectionIndex] === null + ) { + // Does not have the data of the connections so add back to stack + this.runExecutionData.executionData!.nodeExecutionStack.push(executionData); + lastExecutionTry = currentExecutionTry; + + continue executionLoop; + } } } } @@ -1180,6 +1251,12 @@ export class WorkflowExecute { let connectionData: IConnection; // Iterate over all the outputs + const nodesToAdd: Array<{ + position: [number, number]; + connection: IConnection; + outputIndex: number; + }> = []; + // Add the nodes to be executed // eslint-disable-next-line @typescript-eslint/no-for-in-array for (outputIndex in workflow.connectionsBySourceNode[executionNode.name].main) { @@ -1203,20 +1280,59 @@ export class WorkflowExecute { if ( nodeSuccessData![outputIndex] && - (nodeSuccessData![outputIndex].length !== 0 || connectionData.index > 0) + (nodeSuccessData![outputIndex].length !== 0 || + (connectionData.index > 0 && forceInputNodeExecution)) ) { // Add the node only if it did execute or if connected to second "optional" input - this.addNodeToBeExecuted( - workflow, - connectionData, - parseInt(outputIndex, 10), - executionNode.name, - nodeSuccessData!, - runIndex, - ); + if (workflow.settings.executionOrder === 'v1') { + const nodeToAdd = workflow.getNode(connectionData.node); + nodesToAdd.push({ + position: nodeToAdd?.position || [0, 0], + connection: connectionData, + outputIndex: parseInt(outputIndex, 10), + }); + } else { + this.addNodeToBeExecuted( + workflow, + connectionData, + parseInt(outputIndex, 10), + executionNode.name, + nodeSuccessData!, + runIndex, + ); + } } } } + + if (workflow.settings.executionOrder === 'v1') { + // Always execute the node that is more to the top-left first + nodesToAdd.sort((a, b) => { + if (a.position[1] < b.position[1]) { + return 1; + } + if (a.position[1] > b.position[1]) { + return -1; + } + + if (a.position[0] > b.position[0]) { + return -1; + } + + return 0; + }); + + for (const nodeData of nodesToAdd) { + this.addNodeToBeExecuted( + workflow, + nodeData.connection, + nodeData.outputIndex, + executionNode.name, + nodeSuccessData!, + runIndex, + ); + } + } } } @@ -1229,6 +1345,166 @@ export class WorkflowExecute { taskData, this.runExecutionData, ]); + + let waitingNodes: string[] = Object.keys( + this.runExecutionData.executionData!.waitingExecution, + ); + + if ( + this.runExecutionData.executionData!.nodeExecutionStack.length === 0 && + waitingNodes.length + ) { + // There are no more nodes in the execution stack. Check if there are + // waiting nodes that do not require data on all inputs and execute them, + // one by one. + + // TODO: Should this also care about workflow position (top-left first?) + for (let i = 0; i < waitingNodes.length; i++) { + const nodeName = waitingNodes[i]; + + const checkNode = workflow.getNode(nodeName); + if (!checkNode) { + continue; + } + const nodeType = workflow.nodeTypes.getByNameAndVersion( + checkNode.type, + checkNode.typeVersion, + ); + + // Check if the node is only allowed execute if all inputs received data + let requiredInputs = + workflow.settings.executionOrder === 'v1' + ? nodeType.description.requiredInputs + : undefined; + if (requiredInputs !== undefined) { + if (typeof requiredInputs === 'string') { + requiredInputs = workflow.expression.getSimpleParameterValue( + checkNode, + requiredInputs, + this.mode, + this.additionalData.timezone, + { $version: checkNode.typeVersion }, + undefined, + [], + ) as number[]; + } + + if ( + (requiredInputs !== undefined && + Array.isArray(requiredInputs) && + requiredInputs.length === nodeType.description.inputs.length) || + requiredInputs === nodeType.description.inputs.length + ) { + // All inputs are required, but not all have data so do not continue + continue; + } + } + + const parentNodes = workflow.getParentNodes(nodeName); + + // Check if input nodes (of same run) got already executed + // eslint-disable-next-line @typescript-eslint/no-loop-func + const parentIsWaiting = parentNodes.some((value) => waitingNodes.includes(value)); + if (parentIsWaiting) { + // Execute node later as one of its dependencies is still outstanding + continue; + } + + const runIndexes = Object.keys( + this.runExecutionData.executionData!.waitingExecution[nodeName], + ).sort(); + + // The run-index of the earliest outstanding one + const firstRunIndex = parseInt(runIndexes[0]); + + // Find all the inputs which received any kind of data, even if it was an empty + // array as this shows that the parent nodes executed but they did not have any + // data to pass on. + const inputsWithData = this.runExecutionData + .executionData!.waitingExecution[nodeName][firstRunIndex].main.map((data, index) => + data === null ? null : index, + ) + .filter((data) => data !== null); + + if (requiredInputs !== undefined) { + // Certain inputs are required that the node can execute + + if (Array.isArray(requiredInputs)) { + // Specific inputs are required (array of input indexes) + let inputDataMissing = false; + for (const requiredInput of requiredInputs) { + if (!inputsWithData.includes(requiredInput)) { + inputDataMissing = true; + break; + } + } + if (inputDataMissing) { + continue; + } + } else { + // A certain amout of inputs are required (amount of inputs) + if (inputsWithData.length < requiredInputs) { + continue; + } + } + } + + const taskDataMain = this.runExecutionData.executionData!.waitingExecution[nodeName][ + firstRunIndex + ].main.map((data) => { + // For the inputs for which never any data got received set it to an empty array + return data === null ? [] : data; + }); + + if (taskDataMain.filter((data) => data.length).length !== 0) { + // Add the node to be executed + + // Make sure that each input at least receives an empty array + if (taskDataMain.length < nodeType.description.inputs.length) { + for (; taskDataMain.length < nodeType.description.inputs.length; ) { + taskDataMain.push([]); + } + } + + this.runExecutionData.executionData!.nodeExecutionStack.push({ + node: workflow.nodes[nodeName], + data: { + main: taskDataMain, + }, + source: + this.runExecutionData.executionData!.waitingExecutionSource![nodeName][ + firstRunIndex + ], + }); + } + + // Remove the node from waiting + delete this.runExecutionData.executionData!.waitingExecution[nodeName][firstRunIndex]; + delete this.runExecutionData.executionData!.waitingExecutionSource![nodeName][ + firstRunIndex + ]; + + if ( + Object.keys(this.runExecutionData.executionData!.waitingExecution[nodeName]) + .length === 0 + ) { + // No more data left for the node so also delete that one + delete this.runExecutionData.executionData!.waitingExecution[nodeName]; + delete this.runExecutionData.executionData!.waitingExecutionSource![nodeName]; + } + + if (taskDataMain.filter((data) => data.length).length !== 0) { + // Node to execute got found and added to stop + break; + } else { + // Node to add did not get found, rather an empty one removed so continue with search + waitingNodes = Object.keys(this.runExecutionData.executionData!.waitingExecution); + // Set counter to start again from the beginning. Set it to -1 as it auto increments + // after run. So only like that will we end up again ot 0. + i = -1; + } + } + } } return; diff --git a/packages/core/test/WorkflowExecute.test.ts b/packages/core/test/WorkflowExecute.test.ts index 7fe484ea35..668e8e77dc 100644 --- a/packages/core/test/WorkflowExecute.test.ts +++ b/packages/core/test/WorkflowExecute.test.ts @@ -4,15 +4,15 @@ import { WorkflowExecute } from '@/WorkflowExecute'; import * as Helpers from './helpers'; import { initLogger } from './helpers/utils'; -import { predefinedWorkflowExecuteTests } from './helpers/constants'; +import { legacyWorkflowExecuteTests, v1WorkflowExecuteTests } from './helpers/constants'; describe('WorkflowExecute', () => { beforeAll(() => { initLogger(); }); - describe('run', () => { - const tests: WorkflowTestData[] = predefinedWorkflowExecuteTests; + describe('v0 execution order', () => { + const tests: WorkflowTestData[] = legacyWorkflowExecuteTests; const executionMode = 'manual'; const nodeTypes = Helpers.NodeTypes(); @@ -25,6 +25,72 @@ describe('WorkflowExecute', () => { connections: testData.input.workflowData.connections, active: false, nodeTypes, + settings: { + executionOrder: 'v0', + }, + }); + + const waitPromise = await createDeferredPromise(); + const nodeExecutionOrder: string[] = []; + const additionalData = Helpers.WorkflowExecuteAdditionalData( + waitPromise, + nodeExecutionOrder, + ); + + const workflowExecute = new WorkflowExecute(additionalData, executionMode); + + const executionData = await workflowExecute.run(workflowInstance); + + const result = await waitPromise.promise(); + + // Check if the data from WorkflowExecute is identical to data received + // by the webhooks + expect(executionData).toEqual(result); + + // Check if the output data of the nodes is correct + for (const nodeName of Object.keys(testData.output.nodeData)) { + if (result.data.resultData.runData[nodeName] === undefined) { + throw new Error(`Data for node "${nodeName}" is missing!`); + } + + const resultData = result.data.resultData.runData[nodeName].map((nodeData) => { + if (nodeData.data === undefined) { + return null; + } + return nodeData.data.main[0]!.map((entry) => entry.json); + }); + + expect(resultData).toEqual(testData.output.nodeData[nodeName]); + } + + // Check if the nodes did execute in the correct order + expect(nodeExecutionOrder).toEqual(testData.output.nodeExecutionOrder); + + // Check if other data has correct value + expect(result.finished).toEqual(true); + expect(result.data.executionData!.contextData).toEqual({}); + expect(result.data.executionData!.nodeExecutionStack).toEqual([]); + }); + } + }); + + describe('v1 execution order', () => { + const tests: WorkflowTestData[] = v1WorkflowExecuteTests; + + const executionMode = 'manual'; + const nodeTypes = Helpers.NodeTypes(); + + for (const testData of tests) { + test(testData.description, async () => { + const workflowInstance = new Workflow({ + id: 'test', + nodes: testData.input.workflowData.nodes, + connections: testData.input.workflowData.connections, + active: false, + nodeTypes, + settings: { + executionOrder: 'v1', + }, }); const waitPromise = await createDeferredPromise(); @@ -88,6 +154,7 @@ describe('WorkflowExecute', () => { connections: testData.input.workflowData.connections, active: false, nodeTypes, + settings: testData.input.workflowData.settings, }); const waitPromise = await createDeferredPromise(); diff --git a/packages/core/test/helpers/constants.ts b/packages/core/test/helpers/constants.ts index 9a38c8cddf..92dcd50cb4 100644 --- a/packages/core/test/helpers/constants.ts +++ b/packages/core/test/helpers/constants.ts @@ -353,7 +353,8 @@ export const predefinedNodesTypes: INodeTypeData = { name: 'merge', icon: 'fa:clone', group: ['transform'], - version: 1, + version: [1, 2], + requiredInputs: '={{ $version === 2 ? 1 : undefined }}', description: 'Merges data of multiple streams once data of both is available', defaults: { name: 'Merge', @@ -743,7 +744,765 @@ export const predefinedNodesTypes: INodeTypeData = { }, }; -export const predefinedWorkflowExecuteTests: WorkflowTestData[] = [ +export const legacyWorkflowExecuteTests: WorkflowTestData[] = [ + { + description: + 'should run complicated multi node workflow where multiple Merge-Node have missing data and complex dependency structure', + input: { + workflowData: { + nodes: [ + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'b', + }, + ], + }, + }, + id: '21593a8c-07c1-435b-93a6-75317ee3bf67', + name: 'IF4', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [880, 1240], + }, + { + parameters: {}, + id: 'a9af6b9f-011c-4b34-a367-0cfa5ad4c865', + name: 'NoOp2', + type: 'n8n-nodes-base.noOp', + typeVersion: 1, + position: [1320, 1060], + }, + { + parameters: {}, + id: '429d1a51-65f0-4701-af76-b73611774952', + name: 'Merge3', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [1100, 1060], + }, + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'b', + }, + ], + }, + }, + id: 'ed08db0f-f747-4f87-af62-051fc53f955c', + name: 'IF3', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [620, 1100], + }, + { + parameters: {}, + id: 'e80d2aac-cbd4-4e7c-9817-83db52a617d4', + name: 'Merge2', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [940, 900], + }, + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'a', + }, + ], + }, + }, + id: '766dad6b-4326-41b5-a02a-0b3b7d879eb4', + name: 'IF2', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [620, 900], + }, + { + parameters: {}, + id: '0c0cd5bb-eb44-48fe-b66a-54a3c541ea57', + name: 'Merge7', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [2180, 1180], + }, + { + parameters: {}, + id: '863a00e5-7be4-43f3-97da-07cf552d7c0e', + name: 'Merge6', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [1840, 1200], + }, + { + parameters: {}, + id: '8855d0ca-1deb-4ad8-958b-2379d3a87160', + name: 'Merge5', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [1600, 1040], + }, + { + parameters: {}, + id: 'ea37e388-c77a-4a2f-a527-4585f24371d5', + name: 'Merge4', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [1180, 880], + }, + { + parameters: {}, + id: 'e3c814e9-9a92-4e12-96d5-85634fe76dc9', + name: 'Merge1', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [940, 720], + }, + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'b', + }, + ], + }, + }, + id: 'a21a3932-8a3f-464f-8393-309d3233433a', + name: 'IF1', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [620, 720], + }, + { + parameters: { + values: { + string: [ + { + name: 'test', + value: 'a', + }, + ], + }, + options: {}, + }, + id: '12d33a38-baeb-41de-aea0-d8a7477f5aa6', + name: 'Set1', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [400, 720], + }, + { + parameters: {}, + id: '41589b0b-0521-41ae-b0c6-80a016af803e', + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [160, 240], + }, + ], + connections: { + IF4: { + main: [ + [ + { + node: 'Merge3', + type: 'main', + index: 1, + }, + ], + [ + { + node: 'Merge6', + type: 'main', + index: 1, + }, + ], + ], + }, + NoOp2: { + main: [ + [ + { + node: 'Merge5', + type: 'main', + index: 1, + }, + ], + ], + }, + Merge3: { + main: [ + [ + { + node: 'NoOp2', + type: 'main', + index: 0, + }, + ], + ], + }, + IF3: { + main: [ + [ + { + node: 'Merge3', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'IF4', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge2: { + main: [ + [ + { + node: 'Merge4', + type: 'main', + index: 1, + }, + ], + ], + }, + IF2: { + main: [ + [ + { + node: 'Merge2', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'Merge2', + type: 'main', + index: 1, + }, + ], + ], + }, + Merge6: { + main: [ + [ + { + node: 'Merge7', + type: 'main', + index: 1, + }, + ], + ], + }, + Merge5: { + main: [ + [ + { + node: 'Merge6', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge4: { + main: [ + [ + { + node: 'Merge5', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge1: { + main: [ + [ + { + node: 'Merge4', + type: 'main', + index: 0, + }, + ], + ], + }, + IF1: { + main: [ + [ + { + node: 'Merge1', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'Merge1', + type: 'main', + index: 1, + }, + ], + ], + }, + Set1: { + main: [ + [ + { + node: 'IF1', + type: 'main', + index: 0, + }, + { + node: 'IF2', + type: 'main', + index: 0, + }, + { + node: 'IF3', + type: 'main', + index: 0, + }, + ], + ], + }, + Start: { + main: [ + [ + { + node: 'Set1', + type: 'main', + index: 0, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: [ + 'Start', + 'Set1', + 'IF1', + 'IF2', + 'IF3', + 'Merge2', + 'IF4', + 'Merge1', + 'Merge4', + 'Merge3', + 'NoOp2', + 'Merge5', + 'Merge6', + ], + nodeData: { + Merge1: [[{}]], + Merge2: [ + [ + { + test: 'a', + }, + ], + ], + Merge3: [[{}]], + Merge4: [ + [ + {}, + { + test: 'a', + }, + ], + ], + Merge5: [ + [ + {}, + { + test: 'a', + }, + {}, + ], + ], + Merge6: [ + [ + {}, + { + test: 'a', + }, + {}, + { + test: 'a', + }, + ], + ], + }, + }, + }, + { + description: 'should simply execute the next multi-input-node (totally ignoring the runIndex)', + input: { + workflowData: { + nodes: [ + { + parameters: { + values: { + number: [ + { + name: 'counter', + value: '={{ ($input.first().json.counter || 0) + 1 }}', + }, + ], + }, + options: {}, + }, + id: '18191406-b56b-4388-9d4b-ff5b22fdc02c', + name: 'Set', + type: 'n8n-nodes-base.set', + typeVersion: 2, + position: [640, 660], + }, + { + parameters: { + conditions: { + number: [ + { + value1: '={{ $json.counter }}', + value2: 3, + }, + ], + }, + }, + id: '0c6f239b-f9f5-4a20-b554-c69e7bc692b1', + name: 'IF', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [900, 660], + }, + { + parameters: {}, + id: '463194c3-4fcb-4da4-bba0-bc58462ac59a', + name: 'Merge', + type: 'n8n-nodes-base.merge', + typeVersion: 2.1, + position: [1180, 760], + }, + { + parameters: { + values: { + number: [ + { + name: 'counter', + value: '={{ ($input.first().json.counter || 0) + 1 }}', + }, + ], + }, + options: {}, + }, + id: '8b5177c1-34ab-468f-8cb1-ff1d253562dc', + name: 'Set1', + type: 'n8n-nodes-base.set', + typeVersion: 2, + position: [640, 320], + }, + { + parameters: { + conditions: { + number: [ + { + value1: '={{ $json.counter }}', + value2: 3, + }, + ], + }, + }, + id: '455663ab-bc3b-4674-9769-7428c85918c3', + name: 'IF1', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [860, 320], + }, + { + parameters: {}, + id: 'ffc0d327-5cbc-4cf3-8fb0-77c087b391c1', + name: 'Merge1', + type: 'n8n-nodes-base.merge', + typeVersion: 2.1, + position: [1180, 420], + }, + { + parameters: {}, + id: '9a5b13a4-eba1-4a18-a4c6-36bedb07d975', + name: 'Merge2', + type: 'n8n-nodes-base.merge', + typeVersion: 2.1, + position: [1500, 600], + }, + { + parameters: {}, + id: '89a78e50-2ec6-48bf-be5f-3838600cd08a', + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [-20, 700], + }, + ], + connections: { + Set: { + main: [ + [ + { + node: 'IF', + type: 'main', + index: 0, + }, + ], + ], + }, + IF: { + main: [ + [ + { + node: 'Set', + type: 'main', + index: 0, + }, + { + node: 'Merge1', + type: 'main', + index: 1, + }, + ], + [ + { + node: 'Merge', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge: { + main: [ + [ + { + node: 'Merge2', + type: 'main', + index: 1, + }, + ], + ], + }, + Set1: { + main: [ + [ + { + node: 'IF1', + type: 'main', + index: 0, + }, + ], + ], + }, + IF1: { + main: [ + [ + { + node: 'Set1', + type: 'main', + index: 0, + }, + { + node: 'Merge1', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge1: { + main: [ + [ + { + node: 'Merge2', + type: 'main', + index: 0, + }, + ], + ], + }, + Start: { + main: [ + [ + { + node: 'Merge', + type: 'main', + index: 1, + }, + { + node: 'Set1', + type: 'main', + index: 0, + }, + { + node: 'Set', + type: 'main', + index: 0, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: [ + 'Start', + 'Set1', + 'Set', + 'IF1', + 'IF', + 'Set1', + 'Set', + 'Merge1', + 'IF1', + 'IF', + 'Set1', + 'Set', + 'Merge1', + 'IF1', + 'IF', + 'Merge', + 'Merge2', + 'Merge2', + ], + nodeData: { + Start: [[{}]], + Set1: [ + [ + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + ], + [ + { + counter: 3, + }, + ], + ], + Set: [ + [ + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + ], + [ + { + counter: 3, + }, + ], + ], + IF1: [ + [ + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + ], + [], + ], + IF: [ + [ + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + ], + [], + ], + Merge1: [ + [ + { + counter: 1, + }, + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + { + counter: 2, + }, + ], + ], + Merge: [ + [ + { + counter: 3, + }, + {}, + ], + ], + Merge2: [ + [ + { + counter: 1, + }, + { + counter: 1, + }, + { + counter: 3, + }, + {}, + ], + [ + { + counter: 2, + }, + { + counter: 2, + }, + ], + ], + }, + }, + }, { description: 'should run basic two node workflow', input: { @@ -806,7 +1565,7 @@ export const predefinedWorkflowExecuteTests: WorkflowTestData[] = [ }, }, { - description: 'should run node twice when it has two input connections', + description: 'should run node twice when it has two input connections3', input: { // Leave the workflowData in regular JSON to be able to easily // copy it from/in the UI @@ -1920,7 +2679,6 @@ export const predefinedWorkflowExecuteTests: WorkflowTestData[] = [ }, }, }, - { description: 'should display the correct parameters and so correct data when simplified node-versioning is used', @@ -2064,4 +2822,2429 @@ export const predefinedWorkflowExecuteTests: WorkflowTestData[] = [ }, }, }, + { + description: 'should execute nodes in the correct order, breath-first & order of connection', + input: { + workflowData: { + nodes: [ + { + parameters: {}, + id: '3e4ab8bb-2e22-45d9-9287-0265f2ee9c4b', + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [300, 620], + }, + { + parameters: { + options: {}, + }, + id: '444650ce-464a-4630-9e24-109056105167', + name: 'Wait', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [740, 420], + webhookId: '1f4118f8-591a-48fe-a68d-6fec3c99b7a8', + }, + { + parameters: { + values: { + number: [ + { + name: 'wait', + }, + ], + }, + options: {}, + }, + id: '7a74a097-6563-4f1e-a327-97e5a43b8acb', + name: 'Set', + type: 'n8n-nodes-base.set', + typeVersion: 2, + position: [480, 620], + }, + { + parameters: { + options: {}, + }, + id: '9039eebf-6c11-4ce0-b8ad-0812774019d4', + name: 'Wait1', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [740, 800], + webhookId: '35ceb27a-3fb1-47a9-8678-2df16dcecbcb', + }, + { + parameters: { + options: {}, + }, + id: '7f130b16-8fac-4d93-a0ef-56dfe575f952', + name: 'Wait2', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [940, 420], + webhookId: 'cc8e2fd2-afc8-4a17-afda-fda943f4bd83', + }, + { + parameters: { + options: {}, + }, + id: '063e2097-b27a-4775-923c-5b839c434640', + name: 'Wait3', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1300, 420], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'ec908b56-8829-4566-a0b7-ced4bd16c550', + name: 'Wait4', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [940, 800], + webhookId: 'cc8e2fd2-afc8-4a17-afda-fda943f4bd83', + }, + { + parameters: { + options: {}, + }, + id: 'a7d279bd-7241-4744-8ef6-41468131dfa7', + name: 'Wait5', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1140, 800], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'f620aff1-7d9c-453f-a2c1-6e3b9a1664d3', + name: 'Wait6', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [760, 200], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '1d9bac9b-8197-4ad9-9189-f947068f1a46', + name: 'Wait7', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1060, 200], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '9ad0cc8c-4922-440e-913c-39c8570ddcbc', + name: 'Wait8', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [740, 600], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'af0ca700-b6ed-40c1-8c62-bbadb6fd81f7', + name: 'Wait9', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1040, 580], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'f2553f9f-670f-4b54-8b89-84dd5a27a244', + name: 'Wait10', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1660, 340], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '0f4475cb-87db-4ed7-a7a0-8a67043c320b', + name: 'Wait11', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1660, 540], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '815f7b2a-1789-48a3-be61-931e643e6d89', + name: 'Wait12', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1920, 340], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'be1e11af-b8e4-40cb-af36-03613e384b5e', + name: 'Wait13', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1240, 580], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: {}, + id: 'cf72f99c-612f-4b76-bc8e-d77612e4faa9', + name: 'Merge', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1300, 220], + }, + { + parameters: { + options: {}, + }, + id: 'bfe1dfca-a060-4c37-94d0-058739e7cfca', + name: 'Wait14', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1520, 220], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + conditions: { + number: [ + { + value1: '={{ $itemIndex }}', + operation: 'equal', + value2: 1, + }, + ], + }, + }, + id: 'bf7d7e54-db5f-4f20-bf3e-b07224096872', + name: 'IF', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [1780, -220], + }, + { + parameters: { + options: {}, + }, + id: 'd340f2ad-3a6a-4412-bd15-9a7dde1fcb8c', + name: 'Wait15', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [2020, -300], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '913a3c9c-1704-433d-9790-21ad0922e5e1', + name: 'Wait16', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [2020, -140], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + conditions: { + number: [ + { + value1: '={{ $itemIndex }}', + operation: 'equal', + value2: 1, + }, + ], + }, + }, + id: 'df1fba53-92af-4351-b471-114dda12bef9', + name: 'IF1', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [1780, 120], + }, + { + parameters: { + options: {}, + }, + id: '8b3c7e63-8cd8-469d-b6d4-bf5c1953af11', + name: 'Wait17', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [2020, 200], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'e74c4b7c-fc76-4e48-9a0e-3195b19ce1a0', + name: 'Wait18', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [2020, 40], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + ], + connections: { + Start: { + main: [ + [ + { + node: 'Set', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait: { + main: [ + [ + { + node: 'Wait2', + type: 'main', + index: 0, + }, + ], + ], + }, + Set: { + main: [ + [ + { + node: 'Wait', + type: 'main', + index: 0, + }, + { + node: 'Wait1', + type: 'main', + index: 0, + }, + { + node: 'Wait6', + type: 'main', + index: 0, + }, + { + node: 'Wait7', + type: 'main', + index: 0, + }, + { + node: 'Wait8', + type: 'main', + index: 0, + }, + { + node: 'Wait9', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait1: { + main: [ + [ + { + node: 'Wait4', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait2: { + main: [ + [ + { + node: 'Wait3', + type: 'main', + index: 0, + }, + { + node: 'Merge', + type: 'main', + index: 1, + }, + ], + ], + }, + Wait3: { + main: [ + [ + { + node: 'Wait10', + type: 'main', + index: 0, + }, + { + node: 'Wait11', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait4: { + main: [ + [ + { + node: 'Wait5', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait7: { + main: [ + [ + { + node: 'Merge', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait9: { + main: [ + [ + { + node: 'Wait13', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait10: { + main: [ + [ + { + node: 'Wait12', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge: { + main: [ + [ + { + node: 'Wait14', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait14: { + main: [ + [ + { + node: 'IF', + type: 'main', + index: 0, + }, + { + node: 'IF1', + type: 'main', + index: 0, + }, + ], + ], + }, + IF: { + main: [ + [ + { + node: 'Wait15', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'Wait16', + type: 'main', + index: 0, + }, + ], + ], + }, + IF1: { + main: [ + [ + { + node: 'Wait17', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'Wait18', + type: 'main', + index: 0, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: [ + 'Start', + 'Set', + 'Wait', + 'Wait1', + 'Wait6', + 'Wait7', + 'Wait8', + 'Wait9', + 'Wait2', + 'Wait4', + 'Wait13', + 'Wait3', + 'Merge', + 'Wait5', + 'Wait10', + 'Wait11', + 'Wait14', + 'Wait12', + 'IF', + 'IF1', + 'Wait15', + 'Wait16', + 'Wait17', + 'Wait18', + ], + nodeData: {}, + }, + }, +]; + +export const v1WorkflowExecuteTests: WorkflowTestData[] = [ + { + description: 'should run node twice when it has two input connections', + input: { + // Leave the workflowData in regular JSON to be able to easily + // copy it from/in the UI + workflowData: { + nodes: [ + { + id: 'uuid-1', + parameters: {}, + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [100, 300], + }, + { + id: 'uuid-2', + parameters: { + values: { + number: [ + { + name: 'value1', + value: 1, + }, + ], + }, + }, + name: 'Set1', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [300, 250], + }, + { + id: 'uuid-3', + parameters: { + values: { + number: [ + { + name: 'value2', + value: 2, + }, + ], + }, + }, + name: 'Set2', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [500, 400], + }, + ], + connections: { + Start: { + main: [ + [ + { + node: 'Set1', + type: 'main', + index: 0, + }, + { + node: 'Set2', + type: 'main', + index: 0, + }, + ], + ], + }, + Set1: { + main: [ + [ + { + node: 'Set2', + type: 'main', + index: 0, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: ['Start', 'Set1', 'Set2', 'Set2'], + nodeData: { + Set1: [ + [ + { + value1: 1, + }, + ], + ], + Set2: [ + [ + { + value1: 1, + value2: 2, + }, + ], + [ + { + value2: 2, + }, + ], + ], + }, + }, + }, + { + description: 'should run complicated multi node workflow', + input: { + // Leave the workflowData in regular JSON to be able to easily + // copy it from/in the UI + workflowData: { + nodes: [ + { + id: 'uuid-1', + parameters: { + mode: 'passThrough', + }, + name: 'Merge4', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [1150, 500], + }, + { + id: 'uuid-2', + parameters: { + values: { + number: [ + { + name: 'value2', + value: 2, + }, + ], + }, + }, + name: 'Set2', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [290, 400], + }, + { + id: 'uuid-3', + parameters: { + values: { + number: [ + { + name: 'value4', + value: 4, + }, + ], + }, + }, + name: 'Set4', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [850, 200], + }, + { + id: 'uuid-4', + parameters: { + values: { + number: [ + { + name: 'value3', + value: 3, + }, + ], + }, + }, + name: 'Set3', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [650, 200], + }, + { + id: 'uuid-5', + parameters: { + mode: 'passThrough', + }, + name: 'Merge4', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [1150, 500], + }, + { + id: 'uuid-6', + parameters: {}, + name: 'Merge3', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [1000, 400], + }, + { + id: 'uuid-7', + parameters: { + mode: 'passThrough', + output: 'input2', + }, + name: 'Merge2', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [700, 400], + }, + { + id: 'uuid-8', + parameters: {}, + name: 'Merge1', + type: 'n8n-nodes-base.merge', + typeVersion: 1, + position: [500, 300], + }, + { + id: 'uuid-9', + parameters: { + values: { + number: [ + { + name: 'value1', + value: 1, + }, + ], + }, + }, + name: 'Set1', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [300, 200], + }, + { + id: 'uuid-10', + parameters: {}, + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [100, 300], + }, + ], + connections: { + Set2: { + main: [ + [ + { + node: 'Merge1', + type: 'main', + index: 1, + }, + { + node: 'Merge2', + type: 'main', + index: 1, + }, + ], + ], + }, + Set4: { + main: [ + [ + { + node: 'Merge3', + type: 'main', + index: 0, + }, + ], + ], + }, + Set3: { + main: [ + [ + { + node: 'Set4', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge3: { + main: [ + [ + { + node: 'Merge4', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge2: { + main: [ + [ + { + node: 'Merge3', + type: 'main', + index: 1, + }, + ], + ], + }, + Merge1: { + main: [ + [ + { + node: 'Merge2', + type: 'main', + index: 0, + }, + ], + ], + }, + Set1: { + main: [ + [ + { + node: 'Merge1', + type: 'main', + index: 0, + }, + { + node: 'Set3', + type: 'main', + index: 0, + }, + ], + ], + }, + Start: { + main: [ + [ + { + node: 'Set1', + type: 'main', + index: 0, + }, + { + node: 'Set2', + type: 'main', + index: 0, + }, + { + node: 'Merge4', + type: 'main', + index: 1, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: [ + 'Start', + 'Set1', + 'Set3', + 'Set4', + 'Set2', + 'Merge1', + 'Merge2', + 'Merge3', + 'Merge4', + ], + nodeData: { + Set1: [ + [ + { + value1: 1, + }, + ], + ], + Set2: [ + [ + { + value2: 2, + }, + ], + ], + Set3: [ + [ + { + value1: 1, + value3: 3, + }, + ], + ], + Set4: [ + [ + { + value1: 1, + value3: 3, + value4: 4, + }, + ], + ], + Merge1: [ + [ + { + value1: 1, + }, + { + value2: 2, + }, + ], + ], + Merge2: [ + [ + { + value2: 2, + }, + ], + ], + Merge3: [ + [ + { + value1: 1, + value3: 3, + value4: 4, + }, + { + value2: 2, + }, + ], + ], + Merge4: [ + [ + { + value1: 1, + value3: 3, + value4: 4, + }, + { + value2: 2, + }, + ], + ], + }, + }, + }, + { + description: + 'should execute nodes in the correct order, depth-first & the most top-left one first', + input: { + workflowData: { + nodes: [ + { + parameters: {}, + id: '3e4ab8bb-2e22-45d9-9287-0265f2ee9c4b', + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [300, 620], + }, + { + parameters: { + options: {}, + }, + id: '444650ce-464a-4630-9e24-109056105167', + name: 'Wait', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [740, 420], + webhookId: '1f4118f8-591a-48fe-a68d-6fec3c99b7a8', + }, + { + parameters: { + values: { + number: [ + { + name: 'wait', + }, + ], + }, + options: {}, + }, + id: '7a74a097-6563-4f1e-a327-97e5a43b8acb', + name: 'Set', + type: 'n8n-nodes-base.set', + typeVersion: 2, + position: [480, 620], + }, + { + parameters: { + options: {}, + }, + id: '9039eebf-6c11-4ce0-b8ad-0812774019d4', + name: 'Wait1', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [740, 800], + webhookId: '35ceb27a-3fb1-47a9-8678-2df16dcecbcb', + }, + { + parameters: { + options: {}, + }, + id: '7f130b16-8fac-4d93-a0ef-56dfe575f952', + name: 'Wait2', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [940, 420], + webhookId: 'cc8e2fd2-afc8-4a17-afda-fda943f4bd83', + }, + { + parameters: { + options: {}, + }, + id: '063e2097-b27a-4775-923c-5b839c434640', + name: 'Wait3', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1300, 420], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'ec908b56-8829-4566-a0b7-ced4bd16c550', + name: 'Wait4', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [940, 800], + webhookId: 'cc8e2fd2-afc8-4a17-afda-fda943f4bd83', + }, + { + parameters: { + options: {}, + }, + id: 'a7d279bd-7241-4744-8ef6-41468131dfa7', + name: 'Wait5', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1140, 800], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'f620aff1-7d9c-453f-a2c1-6e3b9a1664d3', + name: 'Wait6', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [760, 200], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '1d9bac9b-8197-4ad9-9189-f947068f1a46', + name: 'Wait7', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1060, 200], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '9ad0cc8c-4922-440e-913c-39c8570ddcbc', + name: 'Wait8', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [740, 600], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'af0ca700-b6ed-40c1-8c62-bbadb6fd81f7', + name: 'Wait9', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1040, 580], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'f2553f9f-670f-4b54-8b89-84dd5a27a244', + name: 'Wait10', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1660, 340], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '0f4475cb-87db-4ed7-a7a0-8a67043c320b', + name: 'Wait11', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1660, 540], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '815f7b2a-1789-48a3-be61-931e643e6d89', + name: 'Wait12', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1920, 340], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'be1e11af-b8e4-40cb-af36-03613e384b5e', + name: 'Wait13', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1240, 580], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: {}, + id: 'cf72f99c-612f-4b76-bc8e-d77612e4faa9', + name: 'Merge', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1300, 220], + }, + { + parameters: { + options: {}, + }, + id: 'bfe1dfca-a060-4c37-94d0-058739e7cfca', + name: 'Wait14', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [1520, 220], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + conditions: { + number: [ + { + value1: '={{ $itemIndex }}', + operation: 'equal', + value2: 1, + }, + ], + }, + }, + id: 'bf7d7e54-db5f-4f20-bf3e-b07224096872', + name: 'IF', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [1780, -220], + }, + { + parameters: { + options: {}, + }, + id: 'd340f2ad-3a6a-4412-bd15-9a7dde1fcb8c', + name: 'Wait15', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [2020, -300], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: '913a3c9c-1704-433d-9790-21ad0922e5e1', + name: 'Wait16', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [2020, -140], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + conditions: { + number: [ + { + value1: '={{ $itemIndex }}', + operation: 'equal', + value2: 1, + }, + ], + }, + }, + id: 'df1fba53-92af-4351-b471-114dda12bef9', + name: 'IF1', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [1780, 120], + }, + { + parameters: { + options: {}, + }, + id: '8b3c7e63-8cd8-469d-b6d4-bf5c1953af11', + name: 'Wait17', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [2020, 200], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + { + parameters: { + options: {}, + }, + id: 'e74c4b7c-fc76-4e48-9a0e-3195b19ce1a0', + name: 'Wait18', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [2020, 40], + webhookId: '35400ecf-3e53-4b2d-9fd7-2663bbfd830f', + }, + ], + connections: { + Start: { + main: [ + [ + { + node: 'Set', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait: { + main: [ + [ + { + node: 'Wait2', + type: 'main', + index: 0, + }, + ], + ], + }, + Set: { + main: [ + [ + { + node: 'Wait', + type: 'main', + index: 0, + }, + { + node: 'Wait1', + type: 'main', + index: 0, + }, + { + node: 'Wait6', + type: 'main', + index: 0, + }, + { + node: 'Wait7', + type: 'main', + index: 0, + }, + { + node: 'Wait8', + type: 'main', + index: 0, + }, + { + node: 'Wait9', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait1: { + main: [ + [ + { + node: 'Wait4', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait2: { + main: [ + [ + { + node: 'Wait3', + type: 'main', + index: 0, + }, + { + node: 'Merge', + type: 'main', + index: 1, + }, + ], + ], + }, + Wait3: { + main: [ + [ + { + node: 'Wait10', + type: 'main', + index: 0, + }, + { + node: 'Wait11', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait4: { + main: [ + [ + { + node: 'Wait5', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait7: { + main: [ + [ + { + node: 'Merge', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait9: { + main: [ + [ + { + node: 'Wait13', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait10: { + main: [ + [ + { + node: 'Wait12', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge: { + main: [ + [ + { + node: 'Wait14', + type: 'main', + index: 0, + }, + ], + ], + }, + Wait14: { + main: [ + [ + { + node: 'IF', + type: 'main', + index: 0, + }, + { + node: 'IF1', + type: 'main', + index: 0, + }, + ], + ], + }, + IF: { + main: [ + [ + { + node: 'Wait15', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'Wait16', + type: 'main', + index: 0, + }, + ], + ], + }, + IF1: { + main: [ + [ + { + node: 'Wait17', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'Wait18', + type: 'main', + index: 0, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: [ + 'Start', + 'Set', + 'Wait6', + 'Wait7', + 'Wait', + 'Wait2', + 'Merge', + 'Wait14', + 'IF', + 'Wait15', + 'Wait16', + 'IF1', + 'Wait18', + 'Wait17', + 'Wait3', + 'Wait10', + 'Wait12', + 'Wait11', + 'Wait9', + 'Wait13', + 'Wait8', + 'Wait1', + 'Wait4', + 'Wait5', + ], + nodeData: {}, + }, + }, + { + description: 'should simply execute the next multi-input-node (totally ignoring the runIndex)', + input: { + workflowData: { + nodes: [ + { + parameters: { + values: { + number: [ + { + name: 'counter', + value: '={{ ($input.first().json.counter || 0) + 1 }}', + }, + ], + }, + options: {}, + }, + id: '18191406-b56b-4388-9d4b-ff5b22fdc02c', + name: 'Set', + type: 'n8n-nodes-base.set', + typeVersion: 2, + position: [640, 660], + }, + { + parameters: { + conditions: { + number: [ + { + value1: '={{ $json.counter }}', + value2: 3, + }, + ], + }, + }, + id: '0c6f239b-f9f5-4a20-b554-c69e7bc692b1', + name: 'IF', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [900, 660], + }, + { + parameters: {}, + id: '463194c3-4fcb-4da4-bba0-bc58462ac59a', + name: 'Merge', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1180, 760], + }, + { + parameters: { + values: { + number: [ + { + name: 'counter', + value: '={{ ($input.first().json.counter || 0) + 1 }}', + }, + ], + }, + options: {}, + }, + id: '8b5177c1-34ab-468f-8cb1-ff1d253562dc', + name: 'Set1', + type: 'n8n-nodes-base.set', + typeVersion: 2, + position: [640, 320], + }, + { + parameters: { + conditions: { + number: [ + { + value1: '={{ $json.counter }}', + value2: 3, + }, + ], + }, + }, + id: '455663ab-bc3b-4674-9769-7428c85918c3', + name: 'IF1', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [860, 320], + }, + { + parameters: {}, + id: 'ffc0d327-5cbc-4cf3-8fb0-77c087b391c1', + name: 'Merge1', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1180, 420], + }, + { + parameters: {}, + id: '9a5b13a4-eba1-4a18-a4c6-36bedb07d975', + name: 'Merge2', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1500, 600], + }, + { + parameters: {}, + id: '89a78e50-2ec6-48bf-be5f-3838600cd08a', + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [-20, 700], + }, + ], + connections: { + Set: { + main: [ + [ + { + node: 'IF', + type: 'main', + index: 0, + }, + ], + ], + }, + IF: { + main: [ + [ + { + node: 'Set', + type: 'main', + index: 0, + }, + { + node: 'Merge1', + type: 'main', + index: 1, + }, + ], + [ + { + node: 'Merge', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge: { + main: [ + [ + { + node: 'Merge2', + type: 'main', + index: 1, + }, + ], + ], + }, + Set1: { + main: [ + [ + { + node: 'IF1', + type: 'main', + index: 0, + }, + ], + ], + }, + IF1: { + main: [ + [ + { + node: 'Set1', + type: 'main', + index: 0, + }, + { + node: 'Merge1', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge1: { + main: [ + [ + { + node: 'Merge2', + type: 'main', + index: 0, + }, + ], + ], + }, + Start: { + main: [ + [ + { + node: 'Merge', + type: 'main', + index: 1, + }, + { + node: 'Set1', + type: 'main', + index: 0, + }, + { + node: 'Set', + type: 'main', + index: 0, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: [ + 'Start', + 'Set1', + 'IF1', + 'Set1', + 'IF1', + 'Set1', + 'IF1', + 'Set', + 'IF', + 'Merge1', + 'Set', + 'IF', + 'Merge1', + 'Set', + 'IF', + 'Merge', + 'Merge2', + 'Merge2', + ], + nodeData: { + Start: [[{}]], + Set1: [ + [ + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + ], + [ + { + counter: 3, + }, + ], + ], + Set: [ + [ + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + ], + [ + { + counter: 3, + }, + ], + ], + IF1: [ + [ + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + ], + [], + ], + IF: [ + [ + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + ], + [], + ], + Merge1: [ + [ + { + counter: 1, + }, + { + counter: 1, + }, + ], + [ + { + counter: 2, + }, + { + counter: 2, + }, + ], + ], + Merge: [ + [ + { + counter: 3, + }, + {}, + ], + ], + Merge2: [ + [ + { + counter: 1, + }, + { + counter: 1, + }, + { + counter: 3, + }, + {}, + ], + [ + { + counter: 2, + }, + { + counter: 2, + }, + ], + ], + }, + }, + }, + { + description: 'should run keep on executing even if data from input 1 is missing', + input: { + workflowData: { + nodes: [ + { + parameters: {}, + id: '9c0cb647-5d60-40dc-b791-4946ee260a5d', + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [180, 240], + }, + { + parameters: { + values: { + string: [ + { + name: 'test', + value: 'a', + }, + ], + }, + options: {}, + }, + id: '2bed3b26-0907-465b-a416-9dc993c2e302', + name: 'Set', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [400, 240], + }, + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'b', + }, + ], + }, + }, + id: 'eca22a12-fb0c-4a4f-ab97-74544c178714', + name: 'IF', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [620, 240], + }, + { + parameters: {}, + id: '8d63caea-8d89-450e-87ae-6097b9821a70', + name: 'NoOp', + type: 'n8n-nodes-base.noOp', + typeVersion: 1, + position: [860, 160], + }, + { + parameters: {}, + id: 'bd0e79e4-7b7a-4016-ace3-6f54f46b41c3', + name: 'NoOp1', + type: 'n8n-nodes-base.noOp', + typeVersion: 1, + position: [860, 300], + }, + { + parameters: {}, + id: '975966f6-8e59-41d8-a69e-7223476a7c50', + name: 'Merge', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1140, 220], + }, + ], + connections: { + Start: { + main: [ + [ + { + node: 'Set', + type: 'main', + index: 0, + }, + ], + ], + }, + Set: { + main: [ + [ + { + node: 'IF', + type: 'main', + index: 0, + }, + ], + ], + }, + IF: { + main: [ + [ + { + node: 'NoOp', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'NoOp1', + type: 'main', + index: 0, + }, + ], + ], + }, + NoOp: { + main: [ + [ + { + node: 'Merge', + type: 'main', + index: 0, + }, + ], + ], + }, + NoOp1: { + main: [ + [ + { + node: 'Merge', + type: 'main', + index: 1, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: ['Start', 'Set', 'IF', 'NoOp1', 'Merge'], + nodeData: { + Merge: [ + [ + { + test: 'a', + }, + ], + ], + }, + }, + }, + { + description: + 'should run complicated multi node workflow where multiple Merge-Node have missing data and complex dependency structure', + input: { + workflowData: { + nodes: [ + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'b', + }, + ], + }, + }, + id: '21593a8c-07c1-435b-93a6-75317ee3bf67', + name: 'IF4', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [880, 1240], + }, + { + parameters: {}, + id: 'a9af6b9f-011c-4b34-a367-0cfa5ad4c865', + name: 'NoOp2', + type: 'n8n-nodes-base.noOp', + typeVersion: 1, + position: [1320, 1060], + }, + { + parameters: {}, + id: '429d1a51-65f0-4701-af76-b73611774952', + name: 'Merge3', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1100, 1060], + }, + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'b', + }, + ], + }, + }, + id: 'ed08db0f-f747-4f87-af62-051fc53f955c', + name: 'IF3', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [620, 1060], + }, + { + parameters: {}, + id: 'e80d2aac-cbd4-4e7c-9817-83db52a617d4', + name: 'Merge2', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [940, 900], + }, + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'a', + }, + ], + }, + }, + id: '766dad6b-4326-41b5-a02a-0b3b7d879eb4', + name: 'IF2', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [620, 900], + }, + { + parameters: {}, + id: '0c0cd5bb-eb44-48fe-b66a-54a3c541ea57', + name: 'Merge7', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [2180, 1180], + }, + { + parameters: {}, + id: '863a00e5-7be4-43f3-97da-07cf552d7c0e', + name: 'Merge6', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1840, 1200], + }, + { + parameters: {}, + id: '8855d0ca-1deb-4ad8-958b-2379d3a87160', + name: 'Merge5', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1600, 1040], + }, + { + parameters: {}, + id: 'ea37e388-c77a-4a2f-a527-4585f24371d5', + name: 'Merge4', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [1180, 880], + }, + { + parameters: {}, + id: 'e3c814e9-9a92-4e12-96d5-85634fe76dc9', + name: 'Merge1', + type: 'n8n-nodes-base.merge', + typeVersion: 2, + position: [940, 720], + }, + { + parameters: { + conditions: { + string: [ + { + value1: '={{ $json["test"] }}', + value2: 'b', + }, + ], + }, + }, + id: 'a21a3932-8a3f-464f-8393-309d3233433a', + name: 'IF1', + type: 'n8n-nodes-base.if', + typeVersion: 1, + position: [620, 720], + }, + { + parameters: { + values: { + string: [ + { + name: 'test', + value: 'a', + }, + ], + }, + options: {}, + }, + id: '12d33a38-baeb-41de-aea0-d8a7477f5aa6', + name: 'Set1', + type: 'n8n-nodes-base.set', + typeVersion: 1, + position: [400, 720], + }, + { + parameters: {}, + id: '41589b0b-0521-41ae-b0c6-80a016af803e', + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + position: [160, 240], + }, + ], + connections: { + IF4: { + main: [ + [ + { + node: 'Merge3', + type: 'main', + index: 1, + }, + ], + [ + { + node: 'Merge6', + type: 'main', + index: 1, + }, + ], + ], + }, + NoOp2: { + main: [ + [ + { + node: 'Merge5', + type: 'main', + index: 1, + }, + ], + ], + }, + Merge3: { + main: [ + [ + { + node: 'NoOp2', + type: 'main', + index: 0, + }, + ], + ], + }, + IF3: { + main: [ + [ + { + node: 'Merge3', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'IF4', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge2: { + main: [ + [ + { + node: 'Merge4', + type: 'main', + index: 1, + }, + ], + ], + }, + IF2: { + main: [ + [ + { + node: 'Merge2', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'Merge2', + type: 'main', + index: 1, + }, + ], + ], + }, + Merge6: { + main: [ + [ + { + node: 'Merge7', + type: 'main', + index: 1, + }, + ], + ], + }, + Merge5: { + main: [ + [ + { + node: 'Merge6', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge4: { + main: [ + [ + { + node: 'Merge5', + type: 'main', + index: 0, + }, + ], + ], + }, + Merge1: { + main: [ + [ + { + node: 'Merge4', + type: 'main', + index: 0, + }, + ], + ], + }, + IF1: { + main: [ + [ + { + node: 'Merge1', + type: 'main', + index: 0, + }, + ], + [ + { + node: 'Merge1', + type: 'main', + index: 1, + }, + ], + ], + }, + Set1: { + main: [ + [ + { + node: 'IF1', + type: 'main', + index: 0, + }, + { + node: 'IF2', + type: 'main', + index: 0, + }, + { + node: 'IF3', + type: 'main', + index: 0, + }, + ], + ], + }, + Start: { + main: [ + [ + { + node: 'Set1', + type: 'main', + index: 0, + }, + ], + ], + }, + }, + }, + }, + output: { + nodeExecutionOrder: [ + 'Start', + 'Set1', + 'IF1', + 'IF2', + 'IF3', + 'IF4', + 'Merge1', + 'Merge2', + 'Merge4', + 'Merge5', + 'Merge6', + 'Merge7', + ], + nodeData: { + Merge1: [ + [ + { + test: 'a', + }, + ], + ], + Merge2: [ + [ + { + test: 'a', + }, + ], + ], + Merge4: [ + [ + { + test: 'a', + }, + { + test: 'a', + }, + ], + ], + Merge5: [ + [ + { + test: 'a', + }, + { + test: 'a', + }, + ], + ], + Merge6: [ + [ + { + test: 'a', + }, + { + test: 'a', + }, + { + test: 'a', + }, + ], + ], + Merge7: [ + [ + { + test: 'a', + }, + { + test: 'a', + }, + { + test: 'a', + }, + ], + ], + }, + }, + }, ]; diff --git a/packages/design-system/package.json b/packages/design-system/package.json index 857efd19c8..91d0c715a4 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "0.69.0", + "version": "1.0.0", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", "author": { diff --git a/packages/design-system/src/components/N8nActionDropdown/ActionDropdown.vue b/packages/design-system/src/components/N8nActionDropdown/ActionDropdown.vue index 61220060f7..e37ddfa1e8 100644 --- a/packages/design-system/src/components/N8nActionDropdown/ActionDropdown.vue +++ b/packages/design-system/src/components/N8nActionDropdown/ActionDropdown.vue @@ -18,7 +18,7 @@ :disabled="item.disabled" :divided="item.divided" > -
+
@@ -62,6 +62,10 @@ export default defineComponent({ ElDropdownItem, N8nIcon, }, + data() { + const testIdPrefix = this.$attrs['data-test-id']; + return { testIdPrefix }; + }, props: { items: { type: Array as PropType, diff --git a/packages/design-system/src/components/N8nBadge/Badge.vue b/packages/design-system/src/components/N8nBadge/Badge.vue index 605c78243c..7c6aed8b2f 100644 --- a/packages/design-system/src/components/N8nBadge/Badge.vue +++ b/packages/design-system/src/components/N8nBadge/Badge.vue @@ -16,7 +16,10 @@ export default defineComponent({ theme: { type: String, default: 'default', - validator: (value: string) => ['default', 'primary', 'secondary', 'tertiary'].includes(value), + validator: (value: string) => + ['default', 'success', 'warning', 'danger', 'primary', 'secondary', 'tertiary'].includes( + value, + ), }, size: { type: String, @@ -49,6 +52,27 @@ export default defineComponent({ border-color: var(--color-text-light); } +.success { + composes: badge; + border-radius: var(--border-radius-base); + color: var(--color-success); + border-color: var(--color-success); +} + +.warning { + composes: badge; + border-radius: var(--border-radius-base); + color: var(--color-warning); + border-color: var(--color-warning); +} + +.danger { + composes: badge; + border-radius: var(--border-radius-base); + color: var(--color-danger); + border-color: var(--color-danger); +} + .primary { composes: badge; padding: var(--spacing-5xs) var(--spacing-3xs); diff --git a/packages/design-system/src/components/N8nCallout/Callout.vue b/packages/design-system/src/components/N8nCallout/Callout.vue index 604cec6a25..53545563ab 100644 --- a/packages/design-system/src/components/N8nCallout/Callout.vue +++ b/packages/design-system/src/components/N8nCallout/Callout.vue @@ -2,7 +2,7 @@
- +
@@ -42,7 +42,10 @@ export default defineComponent({ }, icon: { type: String, - default: 'info-circle', + }, + iconSize: { + type: String, + default: 'medium', }, iconless: { type: Boolean, @@ -50,6 +53,10 @@ export default defineComponent({ slim: { type: Boolean, }, + roundCorners: { + type: Boolean, + default: true, + }, }, computed: { classes(): string[] { @@ -58,14 +65,20 @@ export default defineComponent({ this.$style.callout, this.$style[this.theme], this.slim ? this.$style.slim : '', + this.roundCorners ? this.$style.round : '', ]; }, getIcon(): string { - if (Object.keys(CALLOUT_DEFAULT_ICONS).includes(this.theme)) { - return CALLOUT_DEFAULT_ICONS[this.theme]; + return this.icon ?? CALLOUT_DEFAULT_ICONS?.[this.theme] ?? CALLOUT_DEFAULT_ICONS.info; + }, + getIconSize(): string { + if (this.iconSize) { + return this.iconSize; } - - return this.icon; + if (this.theme === 'secondary') { + return 'medium'; + } + return 'large'; }, }, }); @@ -78,7 +91,6 @@ export default defineComponent({ font-size: var(--font-size-2xs); padding: var(--spacing-xs); border: var(--border-width-base) var(--border-style-base); - border-radius: var(--border-radius-base); align-items: center; line-height: var(--font-line-height-loose); @@ -88,6 +100,10 @@ export default defineComponent({ } } +.round { + border-radius: var(--border-radius-base); +} + .messageSection { display: flex; align-items: center; @@ -96,7 +112,7 @@ export default defineComponent({ .info, .custom { border-color: var(--color-foreground-base); - background-color: var(--color-background-light); + background-color: var(--color-foreground-xlight); color: var(--color-info); } @@ -119,7 +135,8 @@ export default defineComponent({ } .icon { - margin-right: var(--spacing-xs); + line-height: 1; + margin-right: var(--spacing-2xs); } .secondary { diff --git a/packages/design-system/src/components/N8nCallout/__tests__/__snapshots__/Callout.spec.ts.snap b/packages/design-system/src/components/N8nCallout/__tests__/__snapshots__/Callout.spec.ts.snap index 6141319c81..0a58f816b2 100644 --- a/packages/design-system/src/components/N8nCallout/__tests__/__snapshots__/Callout.spec.ts.snap +++ b/packages/design-system/src/components/N8nCallout/__tests__/__snapshots__/Callout.spec.ts.snap @@ -1,10 +1,10 @@ // Vitest Snapshot v1 exports[`components > N8nCallout > should render additional slots correctly 1`] = ` -"
+"
- +
This is a secondary callout. @@ -15,10 +15,10 @@ exports[`components > N8nCallout > should render additional slots correctly 1`] `; exports[`components > N8nCallout > should render custom theme correctly 1`] = ` -"
+"
- +
This is a secondary callout. @@ -28,10 +28,10 @@ exports[`components > N8nCallout > should render custom theme correctly 1`] = ` `; exports[`components > N8nCallout > should render danger theme correctly 1`] = ` -"
+"
- +
This is a danger callout. @@ -41,10 +41,10 @@ exports[`components > N8nCallout > should render danger theme correctly 1`] = ` `; exports[`components > N8nCallout > should render info theme correctly 1`] = ` -"
+"
- +
This is an info callout. @@ -54,7 +54,7 @@ exports[`components > N8nCallout > should render info theme correctly 1`] = ` `; exports[`components > N8nCallout > should render secondary theme correctly 1`] = ` -"
+"
@@ -67,10 +67,10 @@ exports[`components > N8nCallout > should render secondary theme correctly 1`] = `; exports[`components > N8nCallout > should render success theme correctly 1`] = ` -"
+"
- +
This is a success callout. @@ -80,10 +80,10 @@ exports[`components > N8nCallout > should render success theme correctly 1`] = ` `; exports[`components > N8nCallout > should render warning theme correctly 1`] = ` -"
+"
- +
This is a warning callout. diff --git a/packages/design-system/src/components/N8nMenuItem/MenuItem.stories.ts b/packages/design-system/src/components/N8nMenuItem/MenuItem.stories.ts index f1dccdd3b0..5ad0429634 100644 --- a/packages/design-system/src/components/N8nMenuItem/MenuItem.stories.ts +++ b/packages/design-system/src/components/N8nMenuItem/MenuItem.stories.ts @@ -32,6 +32,33 @@ defaultMenuItem.args = { }, }; +export const withSecondaryIcon = template.bind({}); +withSecondaryIcon.args = { + item: { + id: 'workflows', + icon: 'heart', + label: 'Workflows', + secondaryIcon: { name: 'lock', size: 'small' }, + }, +}; + +export const withSecondaryIconTooltip = template.bind({}); +withSecondaryIconTooltip.args = { + item: { + id: 'workflows', + icon: 'heart', + label: 'Workflows', + secondaryIcon: { + name: 'lock', + size: 'small', + tooltip: { + content: 'Locked secret', + bindTo: 'secondaryIcon', + }, + }, + }, +}; + export const compact = template.bind({}); compact.args = { item: { diff --git a/packages/design-system/src/components/N8nMenuItem/MenuItem.vue b/packages/design-system/src/components/N8nMenuItem/MenuItem.vue index 5d69ce3b9c..f1a4639c61 100644 --- a/packages/design-system/src/components/N8nMenuItem/MenuItem.vue +++ b/packages/design-system/src/components/N8nMenuItem/MenuItem.vue @@ -1,7 +1,7 @@ - - - {{ child.label }} - - - - + {{ item.label }} - + - +
diff --git a/packages/design-system/src/types/menu.ts b/packages/design-system/src/types/menu.ts index b4c00f9a8a..ce5640df72 100644 --- a/packages/design-system/src/types/menu.ts +++ b/packages/design-system/src/types/menu.ts @@ -1,8 +1,14 @@ +import type { Tooltip } from 'element-ui'; + export type IMenuItem = { id: string; label: string; icon?: string; - secondaryIcon?: { name: string; size?: 'xsmall' | 'small' | 'medium' | 'large' | 'xlarge' }; + secondaryIcon?: { + name: string; + size?: 'xsmall' | 'small' | 'medium' | 'large' | 'xlarge'; + tooltip?: Tooltip; + }; customIconSize?: 'medium' | 'small'; available?: boolean; position?: 'top' | 'bottom'; diff --git a/packages/editor-ui/package.json b/packages/editor-ui/package.json index f769b7ded4..d57be0ed40 100644 --- a/packages/editor-ui/package.json +++ b/packages/editor-ui/package.json @@ -1,6 +1,6 @@ { "name": "n8n-editor-ui", - "version": "0.200.0", + "version": "1.0.1", "description": "Workflow Editor UI for n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -45,7 +45,7 @@ "@jsplumb/connector-bezier": "^5.13.2", "@jsplumb/core": "^5.13.2", "@jsplumb/util": "^5.13.2", - "@n8n/codemirror-lang-sql": "^1.0.0", + "@n8n/codemirror-lang-sql": "^1.0.1", "@vueuse/components": "^10.2.0", "@vueuse/core": "^10.2.0", "axios": "^0.21.1", diff --git a/packages/editor-ui/src/App.vue b/packages/editor-ui/src/App.vue index 78bb5151c9..ce606c3386 100644 --- a/packages/editor-ui/src/App.vue +++ b/packages/editor-ui/src/App.vue @@ -9,6 +9,9 @@ [$style.sidebarCollapsed]: uiStore.sidebarMenuCollapsed, }" > +
+ +
@@ -32,6 +35,7 @@ import { defineComponent } from 'vue'; import { mapStores } from 'pinia'; +import BannerStack from '@/components/banners/BannerStack.vue'; import Modals from '@/components/Modals.vue'; import LoadingView from '@/views/LoadingView.vue'; import Telemetry from '@/components/Telemetry.vue'; @@ -59,6 +63,7 @@ import { useExternalHooks } from '@/composables'; export default defineComponent({ name: 'App', components: { + BannerStack, LoadingView, Telemetry, Modals, @@ -88,9 +93,13 @@ export default defineComponent({ defaultLocale(): string { return this.rootStore.defaultLocale; }, + isDemoMode(): boolean { + return this.$route.name === VIEWS.DEMO; + }, }, data() { return { + postAuthenticateDone: false, loading: true, }; }, @@ -124,7 +133,7 @@ export default defineComponent({ } catch (e) {} }, logHiringBanner() { - if (this.settingsStore.isHiringBannerEnabled && this.$route.name !== VIEWS.DEMO) { + if (this.settingsStore.isHiringBannerEnabled && !this.isDemoMode) { console.log(HIRING_BANNER); // eslint-disable-line no-console } }, @@ -144,7 +153,7 @@ export default defineComponent({ }, authenticate() { // redirect to setup page. user should be redirected to this only once - if (this.settingsStore.isUserManagementEnabled && this.settingsStore.showSetupPage) { + if (this.settingsStore.showSetupPage) { if (this.$route.name === VIEWS.SETUP) { return; } @@ -214,6 +223,31 @@ export default defineComponent({ } catch {} }, CLOUD_TRIAL_CHECK_INTERVAL); }, + async initBanners(): Promise { + if (this.cloudPlanStore.userIsTrialing) { + await this.uiStore.dismissBanner('V1', 'temporary'); + if (this.cloudPlanStore.trialExpired) { + this.uiStore.showBanner('TRIAL_OVER'); + } else { + this.uiStore.showBanner('TRIAL'); + } + } + }, + async postAuthenticate() { + if (this.postAuthenticateDone) { + return; + } + + if (!this.usersStore.currentUser) { + return; + } + + if (this.sourceControlStore.isEnterpriseSourceControlEnabled) { + await this.sourceControlStore.getPreferences(); + } + + this.postAuthenticateDone = true; + }, }, async mounted() { this.setTheme(); @@ -222,14 +256,11 @@ export default defineComponent({ this.authenticate(); this.redirectIfNecessary(); void this.checkForNewVersions(); - void this.checkForCloudPlanData(); + await this.checkForCloudPlanData(); + await this.initBanners(); - if ( - this.sourceControlStore.isEnterpriseSourceControlEnabled && - this.usersStore.isInstanceOwner - ) { - await this.sourceControlStore.getPreferences(); - } + void this.checkForCloudPlanData(); + void this.postAuthenticate(); this.loading = false; @@ -241,6 +272,11 @@ export default defineComponent({ } }, watch: { + 'usersStore.currentUser'(currentValue, previousValue) { + if (currentValue && !previousValue) { + void this.postAuthenticate(); + } + }, $route(route) { this.authenticate(); this.redirectIfNecessary(); @@ -263,17 +299,24 @@ export default defineComponent({ .container { display: grid; grid-template-areas: + 'banners banners' 'sidebar header' 'sidebar content'; grid-auto-columns: fit-content($sidebar-expanded-width) 1fr; - grid-template-rows: fit-content($sidebar-width) 1fr; + grid-template-rows: auto fit-content($header-height) 1fr; + height: 100vh; +} + +.banners { + grid-area: banners; + z-index: 999; } .content { display: flex; grid-area: content; overflow: auto; - height: 100vh; + height: 100%; width: 100%; justify-content: center; @@ -285,12 +328,12 @@ export default defineComponent({ .header { grid-area: header; - z-index: 999; + z-index: 99; } .sidebar { grid-area: sidebar; - height: 100vh; + height: 100%; z-index: 999; } diff --git a/packages/editor-ui/src/Interface.ts b/packages/editor-ui/src/Interface.ts index 859a2ca104..7e0d39a1f3 100644 --- a/packages/editor-ui/src/Interface.ts +++ b/packages/editor-ui/src/Interface.ts @@ -34,6 +34,7 @@ import type { IUserManagementSettings, WorkflowSettings, IUserSettings, + Banners, } from 'n8n-workflow'; import type { SignInType } from './constants'; import type { @@ -702,6 +703,7 @@ export interface IWorkflowSettings extends IWorkflowSettingsWorkflow { maxExecutionTimeout?: number; callerIds?: string; callerPolicy?: WorkflowSettings.CallerPolicy; + executionOrder: NonNullable; } export interface ITimeoutHMS { @@ -1067,7 +1069,10 @@ export interface UIState { nodeViewInitialized: boolean; addFirstStepOnLoad: boolean; executionSidebarAutoRefresh: boolean; + bannersHeight: number; + banners: { [key in Banners]: { dismissed: boolean; type?: 'temporary' | 'permanent' } }; } + export type IFakeDoor = { id: FAKE_DOOR_FEATURES; featureName: string; @@ -1480,6 +1485,7 @@ export interface SourceControlAggregatedFile { name: string; status: string; type: string; + updatedAt?: string; } export declare namespace Cloud { @@ -1520,3 +1526,35 @@ export interface InstanceUsage { } export type CloudPlanAndUsageData = Cloud.PlanData & { usage: InstanceUsage }; + +export type CloudUpdateLinkSourceType = + | 'canvas-nav' + | 'custom-data-filter' + | 'workflow_sharing' + | 'credential_sharing' + | 'settings-n8n-api' + | 'audit-logs' + | 'ldap' + | 'log-streaming' + | 'source-control' + | 'sso' + | 'usage_page' + | 'settings-users' + | 'variables'; + +export type UTMCampaign = + | 'upgrade-custom-data-filter' + | 'upgrade-canvas-nav' + | 'upgrade-workflow-sharing' + | 'upgrade-canvas-nav' + | 'upgrade-credentials-sharing' + | 'upgrade-workflow-sharing' + | 'upgrade-api' + | 'upgrade-audit-logs' + | 'upgrade-ldap' + | 'upgrade-log-streaming' + | 'upgrade-source-control' + | 'upgrade-sso' + | 'open' + | 'upgrade-users' + | 'upgrade-variables'; diff --git a/packages/editor-ui/src/__tests__/server/endpoints/settings.ts b/packages/editor-ui/src/__tests__/server/endpoints/settings.ts index 4462f6e3e5..c8e17f65c0 100644 --- a/packages/editor-ui/src/__tests__/server/endpoints/settings.ts +++ b/packages/editor-ui/src/__tests__/server/endpoints/settings.ts @@ -56,7 +56,6 @@ const defaultSettings: IN8nUISettings = { urlBaseEditor: '', urlBaseWebhook: '', userManagement: { - enabled: true, showSetupOnFirstLoad: true, smtpSetup: true, authenticationMethod: 'email', @@ -75,6 +74,9 @@ const defaultSettings: IN8nUISettings = { deployment: { type: 'default', }, + banners: { + dismissed: [], + }, }; export function routesForSettings(server: Server) { diff --git a/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts b/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts index 2b5fbe8799..445c76257a 100644 --- a/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts +++ b/packages/editor-ui/src/__tests__/server/endpoints/sourceControl.ts @@ -18,6 +18,16 @@ export function routesForSourceControl(server: Server) { publicKey: 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHEX+25m', }; + server.get(`${sourceControlApiRoot}/preferences`, (schema: AppSchema, request: Request) => { + return new Response( + 200, + {}, + { + data: defaultSourceControlPreferences, + }, + ); + }); + server.post(`${sourceControlApiRoot}/preferences`, (schema: AppSchema, request: Request) => { const requestBody: Partial = jsonParse(request.requestBody); diff --git a/packages/editor-ui/src/api/cloudPlans.ts b/packages/editor-ui/src/api/cloudPlans.ts index eb39bd42d9..e0d15299dc 100644 --- a/packages/editor-ui/src/api/cloudPlans.ts +++ b/packages/editor-ui/src/api/cloudPlans.ts @@ -1,13 +1,10 @@ import type { Cloud, IRestApiContext, InstanceUsage } from '@/Interface'; import { get } from '@/utils'; -export async function getCurrentPlan( - context: IRestApiContext, - cloudUserId: string, -): Promise { - return get(context.baseUrl, `/user/${cloudUserId}/plan`); +export async function getCurrentPlan(context: IRestApiContext): Promise { + return get(context.baseUrl, '/admin/cloud-plan'); } export async function getCurrentUsage(context: IRestApiContext): Promise { - return get(context.baseUrl, '/limits'); + return get(context.baseUrl, '/cloud/limits'); } diff --git a/packages/editor-ui/src/api/ui.ts b/packages/editor-ui/src/api/ui.ts new file mode 100644 index 0000000000..25683ad46e --- /dev/null +++ b/packages/editor-ui/src/api/ui.ts @@ -0,0 +1,10 @@ +import type { IRestApiContext } from '@/Interface'; +import { makeRestApiRequest } from '@/utils/apiUtils'; +import type { Banners } from 'n8n-workflow'; + +export async function dismissBannerPermanently( + context: IRestApiContext, + data: { bannerName: Banners; dismissedBanners: string[] }, +): Promise { + return makeRestApiRequest(context, 'POST', '/owner/dismiss-banner', { banner: data.bannerName }); +} diff --git a/packages/editor-ui/src/api/users.ts b/packages/editor-ui/src/api/users.ts index bb360d4902..18da881afe 100644 --- a/packages/editor-ui/src/api/users.ts +++ b/packages/editor-ui/src/api/users.ts @@ -25,12 +25,6 @@ export async function logout(context: IRestApiContext): Promise { await makeRestApiRequest(context, 'POST', '/logout'); } -export async function preOwnerSetup( - context: IRestApiContext, -): Promise<{ credentials: number; workflows: number }> { - return makeRestApiRequest(context, 'GET', '/owner/pre-setup'); -} - export async function setupOwner( context: IRestApiContext, params: { firstName: string; lastName: string; email: string; password: string }, @@ -38,10 +32,6 @@ export async function setupOwner( return makeRestApiRequest(context, 'POST', '/owner/setup', params as unknown as IDataObject); } -export async function skipOwnerSetup(context: IRestApiContext): Promise { - return makeRestApiRequest(context, 'POST', '/owner/skip-setup'); -} - export async function validateSignupToken( context: IRestApiContext, params: { inviterId: string; inviteeId: string }, diff --git a/packages/editor-ui/src/api/workflows.ts b/packages/editor-ui/src/api/workflows.ts index 24f047aeb7..2c2cbc6b54 100644 --- a/packages/editor-ui/src/api/workflows.ts +++ b/packages/editor-ui/src/api/workflows.ts @@ -7,6 +7,7 @@ export async function getNewWorkflow(context: IRestApiContext, name?: string) { return { name: response.name, onboardingFlowEnabled: response.onboardingFlowEnabled === true, + settings: response.defaultSettings, }; } diff --git a/packages/editor-ui/src/components/ActivationModal.vue b/packages/editor-ui/src/components/ActivationModal.vue index c1e8e6cdbd..57387806b3 100644 --- a/packages/editor-ui/src/components/ActivationModal.vue +++ b/packages/editor-ui/src/components/ActivationModal.vue @@ -26,7 +26,7 @@ @@ -146,6 +152,7 @@ import { LOG_STREAM_MODAL_KEY, ASK_AI_MODAL_KEY, SOURCE_CONTROL_PUSH_MODAL_KEY, + SOURCE_CONTROL_PULL_MODAL_KEY, } from '@/constants'; import AboutModal from './AboutModal.vue'; @@ -172,6 +179,7 @@ import ImportCurlModal from './ImportCurlModal.vue'; import WorkflowShareModal from './WorkflowShareModal.ee.vue'; import EventDestinationSettingsModal from '@/components/SettingsLogStreaming/EventDestinationSettingsModal.ee.vue'; import SourceControlPushModal from '@/components/SourceControlPushModal.ee.vue'; +import SourceControlPullModal from '@/components/SourceControlPullModal.ee.vue'; export default defineComponent({ name: 'Modals', @@ -200,6 +208,7 @@ export default defineComponent({ ImportCurlModal, EventDestinationSettingsModal, SourceControlPushModal, + SourceControlPullModal, }, data: () => ({ COMMUNITY_PACKAGE_CONFIRM_MODAL_KEY, @@ -225,6 +234,7 @@ export default defineComponent({ IMPORT_CURL_MODAL_KEY, LOG_STREAM_MODAL_KEY, SOURCE_CONTROL_PUSH_MODAL_KEY, + SOURCE_CONTROL_PULL_MODAL_KEY, }), }); diff --git a/packages/editor-ui/src/components/Node/NodeCreation.vue b/packages/editor-ui/src/components/Node/NodeCreation.vue index 896f14f260..376033cf82 100644 --- a/packages/editor-ui/src/components/Node/NodeCreation.vue +++ b/packages/editor-ui/src/components/Node/NodeCreation.vue @@ -144,7 +144,7 @@ export default defineComponent({ diff --git a/packages/editor-ui/src/components/SourceControlPushModal.ee.vue b/packages/editor-ui/src/components/SourceControlPushModal.ee.vue index 21c7ab1ebf..e4b6126829 100644 --- a/packages/editor-ui/src/components/SourceControlPushModal.ee.vue +++ b/packages/editor-ui/src/components/SourceControlPushModal.ee.vue @@ -9,6 +9,7 @@ import { useI18n, useLoadingService, useToast } from '@/composables'; import { useSourceControlStore } from '@/stores/sourceControl.store'; import { useUIStore } from '@/stores'; import { useRoute } from 'vue-router'; +import dateformat from 'dateformat'; const props = defineProps({ data: { @@ -17,6 +18,8 @@ const props = defineProps({ }, }); +const defaultStagedFileTypes = ['tags', 'variables', 'credential']; + const loadingService = useLoadingService(); const uiStore = useUIStore(); const toast = useToast(); @@ -31,10 +34,71 @@ const commitMessage = ref(''); const loading = ref(true); const context = ref<'workflow' | 'workflows' | 'credentials' | string>(''); +const statusToBadgeThemeMap = { + created: 'success', + deleted: 'danger', + modified: 'warning', + renamed: 'warning', +}; + const isSubmitDisabled = computed(() => { return !commitMessage.value || Object.values(staged.value).every((value) => !value); }); +const workflowId = computed(() => { + if (context.value === 'workflow') { + return route.params.name as string; + } + + return ''; +}); + +const sortedFiles = computed(() => { + const statusPriority = { + deleted: 1, + modified: 2, + renamed: 3, + created: 4, + }; + + return [...files.value].sort((a, b) => { + if (context.value === 'workflow') { + if (a.id === workflowId.value) { + return -1; + } else if (b.id === workflowId.value) { + return 1; + } + } + + if (statusPriority[a.status] < statusPriority[b.status]) { + return -1; + } else if (statusPriority[a.status] > statusPriority[b.status]) { + return 1; + } + + return a.updatedAt < b.updatedAt ? 1 : a.updatedAt > b.updatedAt ? -1 : 0; + }); +}); + +const selectAll = computed(() => { + return files.value.every((file) => staged.value[file.file]); +}); + +const workflowFiles = computed(() => { + return files.value.filter((file) => file.type === 'workflow'); +}); + +const stagedWorkflowFiles = computed(() => { + return workflowFiles.value.filter((workflow) => staged.value[workflow.file]); +}); + +const selectAllIndeterminate = computed(() => { + return ( + stagedWorkflowFiles.value.length > 0 && + stagedWorkflowFiles.value.length < workflowFiles.value.length + ); +}); + onMounted(async () => { context.value = getContext(); try { @@ -46,6 +110,22 @@ onMounted(async () => { } }); +function onToggleSelectAll() { + if (selectAll.value) { + files.value.forEach((file) => { + if (!defaultStagedFileTypes.includes(file.type)) { + staged.value[file.file] = false; + } + }); + } else { + files.value.forEach((file) => { + if (!defaultStagedFileTypes.includes(file.type)) { + staged.value[file.file] = true; + } + }); + } +} + function getContext() { if (route.fullPath.startsWith('/workflows')) { return 'workflows'; @@ -62,20 +142,24 @@ function getContext() { } function getStagedFilesByContext(files: SourceControlAggregatedFile[]): Record { - const stagedFiles: SourceControlAggregatedFile[] = []; - if (context.value === 'workflows') { - stagedFiles.push(...files.filter((file) => file.file.startsWith('workflows'))); - } else if (context.value === 'credentials') { - stagedFiles.push(...files.filter((file) => file.file.startsWith('credentials'))); - } else if (context.value === 'workflow') { - const workflowId = route.params.name as string; - stagedFiles.push(...files.filter((file) => file.type === 'workflow' && file.id === workflowId)); - } - - return stagedFiles.reduce>((acc, file) => { - acc[file.file] = true; + const stagedFiles = files.reduce((acc, file) => { + acc[file.file] = false; return acc; }, {}); + + files.forEach((file) => { + if (defaultStagedFileTypes.includes(file.type)) { + stagedFiles[file.file] = true; + } + + if (context.value === 'workflow' && file.type === 'workflow' && file.id === workflowId.value) { + stagedFiles[file.file] = true; + } else if (context.value === 'workflows' && file.type === 'workflow') { + stagedFiles[file.file] = true; + } + }); + + return stagedFiles; } function setStagedStatus(file: SourceControlAggregatedFile, status: boolean) { @@ -89,6 +173,20 @@ function close() { uiStore.closeModal(SOURCE_CONTROL_PUSH_MODAL_KEY); } +function renderUpdatedAt(file: SourceControlAggregatedFile) { + const currentYear = new Date().getFullYear(); + + return i18n.baseText('settings.sourceControl.lastUpdated', { + interpolate: { + date: dateformat( + file.updatedAt, + `d mmm${file.updatedAt.startsWith(currentYear) ? '' : ', yyyy'}`, + ), + time: dateformat(file.updatedAt, 'HH:MM'), + }, + }); +} + async function commitAndPush() { const fileNames = files.value.filter((file) => staged.value[file.file]).map((file) => file.file); @@ -135,12 +233,24 @@ async function commitAndPush() { -
- - {{ i18n.baseText('settings.sourceControl.modals.push.filesToCommit') }} - +
+
+ + + {{ i18n.baseText('settings.sourceControl.modals.push.workflowsToCommit') }} + + + ({{ stagedWorkflowFiles.length }}/{{ workflowFiles.length }}) + + +
- - - Workflow - Credential - Id: {{ file.id }} - - +
+ + Deleted Workflow: + Deleted Credential: + {{ file.id }} + + {{ file.name }} - - - - {{ file.status }} - + +
+ + {{ renderUpdatedAt(file) }} + +
+
+ + {{ i18n.baseText('settings.sourceControl.modals.push.overrideVersionInGit') }} + +
+
+
+ + Current workflow + + + {{ file.status }} + +
@@ -228,22 +353,22 @@ async function commitAndPush() { &:last-child { margin-bottom: 0; } +} - .listItemBody { - display: flex; - flex-direction: row; - align-items: center; +.listItemBody { + display: flex; + flex-direction: row; + align-items: center; +} - .listItemCheckbox { - display: inline-flex !important; - margin-bottom: 0 !important; - margin-right: var(--spacing-2xs); - } +.listItemCheckbox { + display: inline-flex !important; + margin-bottom: 0 !important; + margin-right: var(--spacing-2xs) !important; +} - .listItemStatus { - margin-left: var(--spacing-2xs); - } - } +.listItemStatus { + margin-left: auto; } .footer { diff --git a/packages/editor-ui/src/components/TemplateCard.vue b/packages/editor-ui/src/components/TemplateCard.vue index 0fce8fdbc3..f1f87ba742 100644 --- a/packages/editor-ui/src/components/TemplateCard.vue +++ b/packages/editor-ui/src/components/TemplateCard.vue @@ -13,7 +13,7 @@
{{ workflow.name }} -
+
@@ -72,6 +72,10 @@ export default defineComponent({ loading: { type: Boolean, }, + simpleView: { + type: Boolean, + default: false, + }, }, components: { TimeAgo, @@ -122,6 +126,7 @@ export default defineComponent({ background-color: var(--color-background-xlight); display: flex; + align-items: center; padding: 0 var(--spacing-s) var(--spacing-s) var(--spacing-s); background-color: var(--color-background-xlight); cursor: pointer; diff --git a/packages/editor-ui/src/components/TemplateDetails.vue b/packages/editor-ui/src/components/TemplateDetails.vue index 56bf07b1af..9acaa1a846 100644 --- a/packages/editor-ui/src/components/TemplateDetails.vue +++ b/packages/editor-ui/src/components/TemplateDetails.vue @@ -55,6 +55,7 @@ import { abbreviateNumber, filterTemplateNodes } from '@/utils'; import type { ITemplatesNode, ITemplatesWorkflow, ITemplatesWorkflowFull } from '@/Interface'; import { mapStores } from 'pinia'; import { useTemplatesStore } from '@/stores/templates.store'; +import TimeAgo from '@/components/TimeAgo.vue'; export default defineComponent({ name: 'TemplateDetails', @@ -72,6 +73,7 @@ export default defineComponent({ components: { NodeIcon, TemplateDetailsBlock, + TimeAgo, }, computed: { ...mapStores(useTemplatesStore), diff --git a/packages/editor-ui/src/components/TemplateList.vue b/packages/editor-ui/src/components/TemplateList.vue index 651546f88c..4f8d9699db 100644 --- a/packages/editor-ui/src/components/TemplateList.vue +++ b/packages/editor-ui/src/components/TemplateList.vue @@ -1,6 +1,6 @@