mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
Merge remote-tracking branch 'origin/evaluation-tags' into ai-430-metrics
This commit is contained in:
commit
5b87550dfa
123
.github/workflows/test-workflows.yml
vendored
123
.github/workflows/test-workflows.yml
vendored
|
@ -4,18 +4,70 @@ on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 2 * * *'
|
- cron: '0 2 * * *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- packages/core/package.json
|
||||||
|
- packages/nodes-base/package.json
|
||||||
|
- packages/@n8n/nodes-langchain/package.json
|
||||||
|
- .github/workflows/test-workflows.yml
|
||||||
|
pull_request_review:
|
||||||
|
types: [submitted]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
run-test-workflows:
|
build:
|
||||||
|
name: Install & Build
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name != 'pull_request_review' || startsWith(github.event.pull_request.base.ref, 'release/')
|
||||||
timeout-minutes: 30
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- uses: actions/checkout@v4.1.1
|
||||||
uses: actions/checkout@v4.1.1
|
- run: corepack enable
|
||||||
|
- uses: actions/setup-node@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: n8n
|
node-version: 20.x
|
||||||
|
cache: 'pnpm'
|
||||||
|
- run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Setup build cache
|
||||||
|
uses: rharkor/caching-for-turbo@v1.5
|
||||||
|
|
||||||
|
- name: Build Backend
|
||||||
|
run: pnpm build:backend
|
||||||
|
|
||||||
|
- name: Cache build artifacts
|
||||||
|
uses: actions/cache/save@v4.0.0
|
||||||
|
with:
|
||||||
|
path: ./packages/**/dist
|
||||||
|
key: ${{ github.sha }}:workflow-tests
|
||||||
|
|
||||||
|
run-test-workflows:
|
||||||
|
name: Workflow Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
timeout-minutes: 10
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4.1.1
|
||||||
|
- run: corepack enable
|
||||||
|
- uses: actions/setup-node@v4.0.2
|
||||||
|
with:
|
||||||
|
node-version: 20.x
|
||||||
|
cache: 'pnpm'
|
||||||
|
- run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Setup build cache
|
||||||
|
uses: rharkor/caching-for-turbo@v1.5
|
||||||
|
|
||||||
|
- name: Restore cached build artifacts
|
||||||
|
uses: actions/cache/restore@v4.0.0
|
||||||
|
with:
|
||||||
|
path: ./packages/**/dist
|
||||||
|
key: ${{ github.sha }}:workflow-tests
|
||||||
|
|
||||||
|
- name: Install OS dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update -y
|
||||||
|
echo 'tzdata tzdata/Areas select Europe' | sudo debconf-set-selections
|
||||||
|
echo 'tzdata tzdata/Zones/Europe select Paris' | sudo debconf-set-selections
|
||||||
|
DEBIAN_FRONTEND="noninteractive" sudo apt-get install -y graphicsmagick
|
||||||
|
|
||||||
- name: Checkout workflows repo
|
- name: Checkout workflows repo
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
|
@ -23,77 +75,34 @@ jobs:
|
||||||
repository: n8n-io/test-workflows
|
repository: n8n-io/test-workflows
|
||||||
path: test-workflows
|
path: test-workflows
|
||||||
|
|
||||||
- run: corepack enable
|
|
||||||
working-directory: n8n
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4.0.2
|
|
||||||
with:
|
|
||||||
node-version: 20.x
|
|
||||||
cache: 'pnpm'
|
|
||||||
cache-dependency-path: 'n8n/pnpm-lock.yaml'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
sudo apt update -y
|
|
||||||
echo 'tzdata tzdata/Areas select Europe' | sudo debconf-set-selections
|
|
||||||
echo 'tzdata tzdata/Zones/Europe select Paris' | sudo debconf-set-selections
|
|
||||||
DEBIAN_FRONTEND="noninteractive" sudo apt-get install -y graphicsmagick
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: pnpm install and build
|
|
||||||
working-directory: n8n
|
|
||||||
run: |
|
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
pnpm build:backend
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Import credentials
|
- name: Import credentials
|
||||||
run: n8n/packages/cli/bin/n8n import:credentials --input=test-workflows/credentials.json
|
run: packages/cli/bin/n8n import:credentials --input=test-workflows/credentials.json
|
||||||
shell: bash
|
|
||||||
env:
|
env:
|
||||||
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
|
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
|
||||||
|
|
||||||
- name: Import workflows
|
- name: Import workflows
|
||||||
run: n8n/packages/cli/bin/n8n import:workflow --separate --input=test-workflows/workflows
|
run: packages/cli/bin/n8n import:workflow --separate --input=test-workflows/workflows
|
||||||
shell: bash
|
|
||||||
env:
|
env:
|
||||||
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
|
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
|
||||||
|
|
||||||
- name: Copy static assets
|
- name: Copy static assets
|
||||||
run: |
|
run: |
|
||||||
cp n8n/assets/n8n-logo.png /tmp/n8n-logo.png
|
cp assets/n8n-logo.png /tmp/n8n-logo.png
|
||||||
cp n8n/assets/n8n-screenshot.png /tmp/n8n-screenshot.png
|
cp assets/n8n-screenshot.png /tmp/n8n-screenshot.png
|
||||||
cp test-workflows/testData/pdfs/*.pdf /tmp/
|
cp test-workflows/testData/pdfs/*.pdf /tmp/
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: n8n/packages/cli/bin/n8n executeBatch --shallow --skipList=test-workflows/skipList.txt --githubWorkflow --shortOutput --concurrency=16 --compare=test-workflows/snapshots
|
run: packages/cli/bin/n8n executeBatch --shallow --skipList=test-workflows/skipList.txt --githubWorkflow --shortOutput --concurrency=16 --compare=test-workflows/snapshots
|
||||||
shell: bash
|
|
||||||
id: tests
|
id: tests
|
||||||
env:
|
env:
|
||||||
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
|
N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
|
||||||
SKIP_STATISTICS_EVENTS: true
|
SKIP_STATISTICS_EVENTS: true
|
||||||
DB_SQLITE_POOL_SIZE: 4
|
DB_SQLITE_POOL_SIZE: 4
|
||||||
# -
|
N8N_SENTRY_DSN: ${{secrets.CI_SENTRY_DSN}}
|
||||||
# name: Export credentials
|
|
||||||
# if: always()
|
|
||||||
# run: n8n/packages/cli/bin/n8n export:credentials --output=test-workflows/credentials.json --all --pretty
|
|
||||||
# shell: bash
|
|
||||||
# env:
|
|
||||||
# N8N_ENCRYPTION_KEY: ${{secrets.ENCRYPTION_KEY}}
|
|
||||||
# -
|
|
||||||
# name: Commit and push credential changes
|
|
||||||
# if: always()
|
|
||||||
# run: |
|
|
||||||
# cd test-workflows
|
|
||||||
# git config --global user.name 'n8n test bot'
|
|
||||||
# git config --global user.email 'n8n-test-bot@users.noreply.github.com'
|
|
||||||
# git commit -am "Automated credential update"
|
|
||||||
# git push --force --quiet "https://janober:${{ secrets.TOKEN }}@github.com/n8n-io/test-workflows.git" main:main
|
|
||||||
|
|
||||||
- name: Notify Slack on failure
|
- name: Notify Slack on failure
|
||||||
uses: act10ns/slack@v2.0.0
|
uses: act10ns/slack@v2.0.0
|
||||||
if: failure()
|
if: failure() && github.ref == 'refs/heads/master'
|
||||||
with:
|
with:
|
||||||
status: ${{ job.status }}
|
status: ${{ job.status }}
|
||||||
channel: '#alerts-build'
|
channel: '#alerts-build'
|
||||||
|
|
85
CHANGELOG.md
85
CHANGELOG.md
|
@ -1,3 +1,88 @@
|
||||||
|
# [1.71.0](https://github.com/n8n-io/n8n/compare/n8n@1.70.0...n8n@1.71.0) (2024-12-04)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* **core:** Fix push for waiting executions ([#11984](https://github.com/n8n-io/n8n/issues/11984)) ([8d71307](https://github.com/n8n-io/n8n/commit/8d71307da0398e7e39bf53e8e1cfa21ac1ceaf69))
|
||||||
|
* **core:** Improve header parameter parsing on http client responses ([#11953](https://github.com/n8n-io/n8n/issues/11953)) ([41e9e39](https://github.com/n8n-io/n8n/commit/41e9e39b5b53ecd9d8d1b385df65a26ecb9bccd8))
|
||||||
|
* **core:** Opt-out from optimizations if `$item` is used ([#12036](https://github.com/n8n-io/n8n/issues/12036)) ([872535a](https://github.com/n8n-io/n8n/commit/872535a40c85dcfad3a4b27c57c026ae003f562f))
|
||||||
|
* **core:** Use the configured timezone in task runner ([#12032](https://github.com/n8n-io/n8n/issues/12032)) ([2e6845a](https://github.com/n8n-io/n8n/commit/2e6845afcbc30dff73c3f3f15f21278cab397387))
|
||||||
|
* **core:** Validate node name when creating `NodeOperationErrror` ([#11999](https://github.com/n8n-io/n8n/issues/11999)) ([e68c9da](https://github.com/n8n-io/n8n/commit/e68c9da30c31cd5f994cb01ce759192562bfbd40))
|
||||||
|
* **editor:** Add execution concurrency info and paywall ([#11847](https://github.com/n8n-io/n8n/issues/11847)) ([57d3269](https://github.com/n8n-io/n8n/commit/57d3269e400ee4e7e3636614870ebdfdb0aa8c1d))
|
||||||
|
* **editor:** Fix bug causing connection lines to disappear when hovering stickies ([#11950](https://github.com/n8n-io/n8n/issues/11950)) ([439a1cc](https://github.com/n8n-io/n8n/commit/439a1cc4f39243e91715b21a84b8e7266ce872cd))
|
||||||
|
* **editor:** Fix canvas keybindings using splitter keys such as zooming using `+` key ([#12022](https://github.com/n8n-io/n8n/issues/12022)) ([6af9c82](https://github.com/n8n-io/n8n/commit/6af9c82af6020e99d61e442ee9c2d40761baf027))
|
||||||
|
* **editor:** Fix community check ([#11979](https://github.com/n8n-io/n8n/issues/11979)) ([af0398a](https://github.com/n8n-io/n8n/commit/af0398a5e3a8987c01c7112e6f689b35e1ef92fe))
|
||||||
|
* **editor:** Fix copy/paste keyboard events in canvas chat ([#12004](https://github.com/n8n-io/n8n/issues/12004)) ([967340a](https://github.com/n8n-io/n8n/commit/967340a2938a79c89319121bf57a8d654f88e06c))
|
||||||
|
* **editor:** Fix node showing as successful if errors exists on subsequent runs ([#12019](https://github.com/n8n-io/n8n/issues/12019)) ([8616b17](https://github.com/n8n-io/n8n/commit/8616b17cc6c305da69bbb54fd56ab7cb34213f7c))
|
||||||
|
* **editor:** Fix pin data showing up in production executions on new canvas ([#11951](https://github.com/n8n-io/n8n/issues/11951)) ([5f6f8a1](https://github.com/n8n-io/n8n/commit/5f6f8a1bddfd76b586c08da821e8b59070f449fc))
|
||||||
|
* **editor:** Handle source control initialization to prevent UI form crashing ([#11776](https://github.com/n8n-io/n8n/issues/11776)) ([6be8e86](https://github.com/n8n-io/n8n/commit/6be8e86c45bd64d000bc95d2ef2d68220e930c02))
|
||||||
|
* **editor:** Implement dirty nodes for partial executions ([#11739](https://github.com/n8n-io/n8n/issues/11739)) ([b8da4ff](https://github.com/n8n-io/n8n/commit/b8da4ff9edb0fbb0093c4c41fe11f8e67b696ca3))
|
||||||
|
* **editor:** Resolve going back from Settings ([#11958](https://github.com/n8n-io/n8n/issues/11958)) ([d74423c](https://github.com/n8n-io/n8n/commit/d74423c75198d38d0d99a1879051b5e964ecae74))
|
||||||
|
* **editor:** Unify executions card label color ([#11949](https://github.com/n8n-io/n8n/issues/11949)) ([fc79718](https://github.com/n8n-io/n8n/commit/fc797188d63e87df34b3a153eb4a0d0b7361b3f5))
|
||||||
|
* **editor:** Use optional chaining for all members in execution data when using the debug feature ([#12024](https://github.com/n8n-io/n8n/issues/12024)) ([67aa0c9](https://github.com/n8n-io/n8n/commit/67aa0c9107bda16b1cb6d273e17c3cde77035f51))
|
||||||
|
* **GraphQL Node:** Throw error if GraphQL variables are not objects or strings ([#11904](https://github.com/n8n-io/n8n/issues/11904)) ([85f30b2](https://github.com/n8n-io/n8n/commit/85f30b27ae282da58a25186d13ff17196dcd7d9c))
|
||||||
|
* **HTTP Request Node:** Use iconv-lite to decode http responses, to support more encoding types ([#11930](https://github.com/n8n-io/n8n/issues/11930)) ([461b39c](https://github.com/n8n-io/n8n/commit/461b39c5df5dd446cb8ceef469b204c7c5111229))
|
||||||
|
* Load workflows with unconnected Switch outputs ([#12020](https://github.com/n8n-io/n8n/issues/12020)) ([abc851c](https://github.com/n8n-io/n8n/commit/abc851c0cff298607a0dc2f2882aa17136898f45))
|
||||||
|
* **n8n Form Node:** Use https to load google fonts ([#11948](https://github.com/n8n-io/n8n/issues/11948)) ([eccd924](https://github.com/n8n-io/n8n/commit/eccd924f5e8dbe59e37099d1a6fbe8866fef55bf))
|
||||||
|
* **Telegram Trigger Node:** Fix header secret check ([#12018](https://github.com/n8n-io/n8n/issues/12018)) ([f16de4d](https://github.com/n8n-io/n8n/commit/f16de4db01c0496205635a3203a44098e7908453))
|
||||||
|
* **Webflow Node:** Fix issue with pagination in v2 node ([#11934](https://github.com/n8n-io/n8n/issues/11934)) ([1eb94bc](https://github.com/n8n-io/n8n/commit/1eb94bcaf54d9e581856ce0b87253e1c28fa68e2))
|
||||||
|
* **Webflow Node:** Fix issue with publishing items ([#11982](https://github.com/n8n-io/n8n/issues/11982)) ([0a8a57e](https://github.com/n8n-io/n8n/commit/0a8a57e4ec8081ab1a53f36d686b3d5dcaae2476))
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* **AI Transform Node:** Node Prompt improvements ([#11611](https://github.com/n8n-io/n8n/issues/11611)) ([40a7445](https://github.com/n8n-io/n8n/commit/40a7445f0873af2cdbd10b12bd691c07a43e27cc))
|
||||||
|
* **Code Node:** Warning if pairedItem absent or could not be auto mapped ([#11737](https://github.com/n8n-io/n8n/issues/11737)) ([3a5bd12](https://github.com/n8n-io/n8n/commit/3a5bd129459272cbac960ae2754db3028943f87e))
|
||||||
|
* **editor:** Canvas chat UI & UX improvements ([#11924](https://github.com/n8n-io/n8n/issues/11924)) ([1e25774](https://github.com/n8n-io/n8n/commit/1e25774541461c86da5c4af8efec792e2814eeb1))
|
||||||
|
* **editor:** Persist user's preferred display modes on localStorage ([#11929](https://github.com/n8n-io/n8n/issues/11929)) ([bd69316](https://github.com/n8n-io/n8n/commit/bd693162b86a21c90880bab2c2e67aab733095ff))
|
||||||
|
|
||||||
|
|
||||||
|
### Performance Improvements
|
||||||
|
|
||||||
|
* **editor:** Virtualize SchemaView ([#11694](https://github.com/n8n-io/n8n/issues/11694)) ([9c6def9](https://github.com/n8n-io/n8n/commit/9c6def91975764522fa52cdf21e9cb5bdb4d721d))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# [1.70.0](https://github.com/n8n-io/n8n/compare/n8n@1.69.0...n8n@1.70.0) (2024-11-27)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* **AI Agent Node:** Add binary message before scratchpad to prevent tool calling loops ([#11845](https://github.com/n8n-io/n8n/issues/11845)) ([5c80cb5](https://github.com/n8n-io/n8n/commit/5c80cb57cf709a1097a38e0394aad6fce5330eba))
|
||||||
|
* CodeNodeEditor walk cannot read properties of null ([#11129](https://github.com/n8n-io/n8n/issues/11129)) ([d99e0a7](https://github.com/n8n-io/n8n/commit/d99e0a7c979a1ee96b2eea1b9011d5bce375289a))
|
||||||
|
* **core:** Bring back execution data on the `executionFinished` push message ([#11821](https://github.com/n8n-io/n8n/issues/11821)) ([0313570](https://github.com/n8n-io/n8n/commit/03135702f18e750ba44840dccfec042270629a2b))
|
||||||
|
* **core:** Correct invalid WS status code on removing connection ([#11901](https://github.com/n8n-io/n8n/issues/11901)) ([1d80225](https://github.com/n8n-io/n8n/commit/1d80225d26ba01f78934a455acdcca7b83be7205))
|
||||||
|
* **core:** Don't use unbound context methods in code sandboxes ([#11914](https://github.com/n8n-io/n8n/issues/11914)) ([f6c0d04](https://github.com/n8n-io/n8n/commit/f6c0d045e9683cd04ee849f37b96697097c5b41d))
|
||||||
|
* **core:** Fix broken execution query when using projectId ([#11852](https://github.com/n8n-io/n8n/issues/11852)) ([a061dbc](https://github.com/n8n-io/n8n/commit/a061dbca07ad686c563e85c56081bc1a7830259b))
|
||||||
|
* **core:** Fix validation of items returned in the task runner ([#11897](https://github.com/n8n-io/n8n/issues/11897)) ([a535e88](https://github.com/n8n-io/n8n/commit/a535e88f1aec8fbbf2eb9397d38748f49773de2d))
|
||||||
|
* **editor:** Add missing trigger waiting tooltip on new canvas ([#11918](https://github.com/n8n-io/n8n/issues/11918)) ([a8df221](https://github.com/n8n-io/n8n/commit/a8df221bfbb5428d93d03f539bcfdaf29ee20c21))
|
||||||
|
* **editor:** Don't re-render input panel after node finishes executing ([#11813](https://github.com/n8n-io/n8n/issues/11813)) ([b3a99a2](https://github.com/n8n-io/n8n/commit/b3a99a2351079c37ed6d83f43920ba80f3832234))
|
||||||
|
* **editor:** Fix AI assistant loading message layout ([#11819](https://github.com/n8n-io/n8n/issues/11819)) ([89b4807](https://github.com/n8n-io/n8n/commit/89b48072432753137b498c338af7777036fdde7a))
|
||||||
|
* **editor:** Fix new canvas discovery tooltip position after adding github stars button ([#11898](https://github.com/n8n-io/n8n/issues/11898)) ([f4ab5c7](https://github.com/n8n-io/n8n/commit/f4ab5c7b9244b8fdde427c12c1a152fbaaba0c34))
|
||||||
|
* **editor:** Fix node position not getting set when dragging selection on new canvas ([#11871](https://github.com/n8n-io/n8n/issues/11871)) ([595de81](https://github.com/n8n-io/n8n/commit/595de81c03b3e488ab41fb8d1d316c3db6a8372a))
|
||||||
|
* **editor:** Restore workers view ([#11876](https://github.com/n8n-io/n8n/issues/11876)) ([3aa72f6](https://github.com/n8n-io/n8n/commit/3aa72f613f64c16d7dff67ffe66037894e45aa7c))
|
||||||
|
* **editor:** Turn NPS survey into a modal and make sure it shows above the Ask AI button ([#11814](https://github.com/n8n-io/n8n/issues/11814)) ([ca169f3](https://github.com/n8n-io/n8n/commit/ca169f3f3455fa39ce9120b30d7b409bade6561e))
|
||||||
|
* **editor:** Use `crypto.randomUUID()` to initialize node id if missing on new canvas ([#11873](https://github.com/n8n-io/n8n/issues/11873)) ([bc4857a](https://github.com/n8n-io/n8n/commit/bc4857a1b3d6ea389f11fb8246a1cee33b8a008e))
|
||||||
|
* **n8n Form Node:** Duplicate popup in manual mode ([#11925](https://github.com/n8n-io/n8n/issues/11925)) ([2c34bf4](https://github.com/n8n-io/n8n/commit/2c34bf4ea6137fb0fb321969684ffa621da20fa3))
|
||||||
|
* **n8n Form Node:** Redirect if completion page to trigger ([#11822](https://github.com/n8n-io/n8n/issues/11822)) ([1a8fb7b](https://github.com/n8n-io/n8n/commit/1a8fb7bdc428c6a23c8708e2dcf924f1f10b47a9))
|
||||||
|
* **OpenAI Node:** Remove preview chatInput parameter for `Assistant:Messsage` operation ([#11825](https://github.com/n8n-io/n8n/issues/11825)) ([4dde287](https://github.com/n8n-io/n8n/commit/4dde287cde3af7c9c0e57248e96b8f1270da9332))
|
||||||
|
* Retain execution data between partial executions (new flow) ([#11828](https://github.com/n8n-io/n8n/issues/11828)) ([3320436](https://github.com/n8n-io/n8n/commit/3320436a6fdf8472b3843b9fe8d4de7af7f5ef5c))
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* Add SharePoint credentials ([#11570](https://github.com/n8n-io/n8n/issues/11570)) ([05c6109](https://github.com/n8n-io/n8n/commit/05c61091db9bdd62fdcca910ead50d0bd512966a))
|
||||||
|
* Add Zabbix credential only node ([#11489](https://github.com/n8n-io/n8n/issues/11489)) ([fbd1ecf](https://github.com/n8n-io/n8n/commit/fbd1ecfb29461fee393914bc200ec72c654d8944))
|
||||||
|
* **AI Transform Node:** Support for drag and drop ([#11276](https://github.com/n8n-io/n8n/issues/11276)) ([2c252b0](https://github.com/n8n-io/n8n/commit/2c252b0b2d5282f4a87bce76f93c4c02dd8ff5e3))
|
||||||
|
* **editor:** Drop `response` wrapper requirement from Subworkflow Tool output ([#11785](https://github.com/n8n-io/n8n/issues/11785)) ([cd3598a](https://github.com/n8n-io/n8n/commit/cd3598aaab6cefe58a4cb9df7d93fb501415e9d3))
|
||||||
|
* **editor:** Improve node and edge bring-to-front mechanism on new canvas ([#11793](https://github.com/n8n-io/n8n/issues/11793)) ([b89ca9d](https://github.com/n8n-io/n8n/commit/b89ca9d482faa5cb542898f3973fb6e7c9a8437a))
|
||||||
|
* **editor:** Make new canvas connections go underneath node when looping backwards ([#11833](https://github.com/n8n-io/n8n/issues/11833)) ([91d1bd8](https://github.com/n8n-io/n8n/commit/91d1bd8d333454f3971605df73c3703102d2a9e9))
|
||||||
|
* **editor:** Make the left sidebar in Expressions editor draggable ([#11838](https://github.com/n8n-io/n8n/issues/11838)) ([a713b3e](https://github.com/n8n-io/n8n/commit/a713b3ed25feb1790412fc320cf41a0967635263))
|
||||||
|
* **editor:** Migrate existing users to new canvas and set new canvas as default ([#11896](https://github.com/n8n-io/n8n/issues/11896)) ([caa7447](https://github.com/n8n-io/n8n/commit/caa744785a2cc5063a5fb9d269c0ea53ea432298))
|
||||||
|
* **Slack Node:** Update wait for approval to use markdown ([#11754](https://github.com/n8n-io/n8n/issues/11754)) ([40dd02f](https://github.com/n8n-io/n8n/commit/40dd02f360d0d8752fe89c4304c18cac9858c530))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [1.69.0](https://github.com/n8n-io/n8n/compare/n8n@1.68.0...n8n@1.69.0) (2024-11-20)
|
# [1.69.0](https://github.com/n8n-io/n8n/compare/n8n@1.68.0...n8n@1.69.0) (2024-11-20)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -40,6 +40,7 @@ export function saveCredential() {
|
||||||
.within(() => {
|
.within(() => {
|
||||||
cy.get('button').should('not.exist');
|
cy.get('button').should('not.exist');
|
||||||
});
|
});
|
||||||
|
getCredentialSaveButton().should('have.text', 'Saved');
|
||||||
}
|
}
|
||||||
|
|
||||||
export function closeCredentialModal() {
|
export function closeCredentialModal() {
|
||||||
|
|
|
@ -76,6 +76,10 @@ export function getCanvasNodes() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getCanvasNodeByName(nodeName: string) {
|
||||||
|
return getCanvasNodes().filter(`:contains(${nodeName})`);
|
||||||
|
}
|
||||||
|
|
||||||
export function getSaveButton() {
|
export function getSaveButton() {
|
||||||
return cy.getByTestId('workflow-save-button');
|
return cy.getByTestId('workflow-save-button');
|
||||||
}
|
}
|
||||||
|
@ -194,3 +198,8 @@ export function pasteWorkflow(workflow: object) {
|
||||||
export function clickZoomToFit() {
|
export function clickZoomToFit() {
|
||||||
getZoomToFitButton().click();
|
getZoomToFitButton().click();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function deleteNode(name: string) {
|
||||||
|
getCanvasNodeByName(name).first().click();
|
||||||
|
cy.get('body').type('{del}');
|
||||||
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { saveCredential } from '../composables/modals/credential-modal';
|
||||||
import * as projects from '../composables/projects';
|
import * as projects from '../composables/projects';
|
||||||
import { INSTANCE_MEMBERS, INSTANCE_OWNER, INSTANCE_ADMIN, NOTION_NODE_NAME } from '../constants';
|
import { INSTANCE_MEMBERS, INSTANCE_OWNER, INSTANCE_ADMIN, NOTION_NODE_NAME } from '../constants';
|
||||||
import {
|
import {
|
||||||
|
@ -225,8 +226,7 @@ describe('Sharing', { disableAutoLogin: true }, () => {
|
||||||
.filter(':contains("Development")')
|
.filter(':contains("Development")')
|
||||||
.should('have.length', 1)
|
.should('have.length', 1)
|
||||||
.click();
|
.click();
|
||||||
credentialsModal.getters.saveButton().click();
|
saveCredential();
|
||||||
credentialsModal.getters.saveButton().should('have.text', 'Saved');
|
|
||||||
credentialsModal.actions.close();
|
credentialsModal.actions.close();
|
||||||
|
|
||||||
projects.getProjectTabWorkflows().click();
|
projects.getProjectTabWorkflows().click();
|
||||||
|
@ -252,8 +252,7 @@ describe('Sharing', { disableAutoLogin: true }, () => {
|
||||||
credentialsModal.actions.changeTab('Sharing');
|
credentialsModal.actions.changeTab('Sharing');
|
||||||
credentialsModal.getters.usersSelect().click();
|
credentialsModal.getters.usersSelect().click();
|
||||||
getVisibleSelect().find('li').should('have.length', 4).first().click();
|
getVisibleSelect().find('li').should('have.length', 4).first().click();
|
||||||
credentialsModal.getters.saveButton().click();
|
saveCredential();
|
||||||
credentialsModal.getters.saveButton().should('have.text', 'Saved');
|
|
||||||
credentialsModal.actions.close();
|
credentialsModal.actions.close();
|
||||||
|
|
||||||
credentialsPage.getters
|
credentialsPage.getters
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { type ICredentialType } from 'n8n-workflow';
|
import { type ICredentialType } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import { getCredentialSaveButton, saveCredential } from '../composables/modals/credential-modal';
|
||||||
import {
|
import {
|
||||||
AGENT_NODE_NAME,
|
AGENT_NODE_NAME,
|
||||||
AI_TOOL_HTTP_NODE_NAME,
|
AI_TOOL_HTTP_NODE_NAME,
|
||||||
|
@ -194,7 +195,7 @@ describe('Credentials', () => {
|
||||||
credentialsModal.getters.credentialsEditModal().should('be.visible');
|
credentialsModal.getters.credentialsEditModal().should('be.visible');
|
||||||
credentialsModal.getters.name().click();
|
credentialsModal.getters.name().click();
|
||||||
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME);
|
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME);
|
||||||
credentialsModal.getters.saveButton().click();
|
saveCredential();
|
||||||
credentialsModal.getters.closeButton().click();
|
credentialsModal.getters.closeButton().click();
|
||||||
workflowPage.getters
|
workflowPage.getters
|
||||||
.nodeCredentialsSelect()
|
.nodeCredentialsSelect()
|
||||||
|
@ -212,7 +213,7 @@ describe('Credentials', () => {
|
||||||
credentialsModal.getters.credentialsEditModal().should('be.visible');
|
credentialsModal.getters.credentialsEditModal().should('be.visible');
|
||||||
credentialsModal.getters.name().click();
|
credentialsModal.getters.name().click();
|
||||||
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME2);
|
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME2);
|
||||||
credentialsModal.getters.saveButton().click();
|
saveCredential();
|
||||||
credentialsModal.getters.closeButton().click();
|
credentialsModal.getters.closeButton().click();
|
||||||
workflowPage.getters
|
workflowPage.getters
|
||||||
.nodeCredentialsSelect()
|
.nodeCredentialsSelect()
|
||||||
|
@ -237,7 +238,7 @@ describe('Credentials', () => {
|
||||||
credentialsModal.getters.credentialsEditModal().should('be.visible');
|
credentialsModal.getters.credentialsEditModal().should('be.visible');
|
||||||
credentialsModal.getters.name().click();
|
credentialsModal.getters.name().click();
|
||||||
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME);
|
credentialsModal.actions.renameCredential(NEW_CREDENTIAL_NAME);
|
||||||
credentialsModal.getters.saveButton().click();
|
saveCredential();
|
||||||
credentialsModal.getters.closeButton().click();
|
credentialsModal.getters.closeButton().click();
|
||||||
workflowPage.getters
|
workflowPage.getters
|
||||||
.nodeCredentialsSelect()
|
.nodeCredentialsSelect()
|
||||||
|
@ -342,7 +343,8 @@ describe('Credentials', () => {
|
||||||
credentialsModal.getters.connectionParameter('Internal Integration Secret').type('1234567890');
|
credentialsModal.getters.connectionParameter('Internal Integration Secret').type('1234567890');
|
||||||
|
|
||||||
credentialsModal.actions.setName('My awesome Notion account');
|
credentialsModal.actions.setName('My awesome Notion account');
|
||||||
credentialsModal.getters.saveButton().click({ force: true });
|
getCredentialSaveButton().click();
|
||||||
|
|
||||||
errorToast().should('have.length', 1);
|
errorToast().should('have.length', 1);
|
||||||
errorToast().should('be.visible');
|
errorToast().should('be.visible');
|
||||||
|
|
||||||
|
|
|
@ -49,33 +49,47 @@ describe('Two-factor authentication', { disableAutoLogin: true }, () => {
|
||||||
cy.intercept('GET', '/rest/mfa/qr').as('getMfaQrCode');
|
cy.intercept('GET', '/rest/mfa/qr').as('getMfaQrCode');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should be able to login with MFA token', () => {
|
it('Should be able to login with MFA code', () => {
|
||||||
const { email, password } = user;
|
const { email, password } = user;
|
||||||
signinPage.actions.loginWithEmailAndPassword(email, password);
|
signinPage.actions.loginWithEmailAndPassword(email, password);
|
||||||
personalSettingsPage.actions.enableMfa();
|
personalSettingsPage.actions.enableMfa();
|
||||||
mainSidebar.actions.signout();
|
mainSidebar.actions.signout();
|
||||||
const token = generateOTPToken(user.mfaSecret);
|
const mfaCode = generateOTPToken(user.mfaSecret);
|
||||||
mfaLoginPage.actions.loginWithMfaToken(email, password, token);
|
mfaLoginPage.actions.loginWithMfaCode(email, password, mfaCode);
|
||||||
mainSidebar.actions.signout();
|
mainSidebar.actions.signout();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should be able to login with recovery code', () => {
|
it('Should be able to login with MFA recovery code', () => {
|
||||||
const { email, password } = user;
|
const { email, password } = user;
|
||||||
signinPage.actions.loginWithEmailAndPassword(email, password);
|
signinPage.actions.loginWithEmailAndPassword(email, password);
|
||||||
personalSettingsPage.actions.enableMfa();
|
personalSettingsPage.actions.enableMfa();
|
||||||
mainSidebar.actions.signout();
|
mainSidebar.actions.signout();
|
||||||
mfaLoginPage.actions.loginWithRecoveryCode(email, password, user.mfaRecoveryCodes[0]);
|
mfaLoginPage.actions.loginWithMfaRecoveryCode(email, password, user.mfaRecoveryCodes[0]);
|
||||||
mainSidebar.actions.signout();
|
mainSidebar.actions.signout();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should be able to disable MFA in account', () => {
|
it('Should be able to disable MFA in account with MFA code', () => {
|
||||||
const { email, password } = user;
|
const { email, password } = user;
|
||||||
signinPage.actions.loginWithEmailAndPassword(email, password);
|
signinPage.actions.loginWithEmailAndPassword(email, password);
|
||||||
personalSettingsPage.actions.enableMfa();
|
personalSettingsPage.actions.enableMfa();
|
||||||
mainSidebar.actions.signout();
|
mainSidebar.actions.signout();
|
||||||
const token = generateOTPToken(user.mfaSecret);
|
const mfaCode = generateOTPToken(user.mfaSecret);
|
||||||
mfaLoginPage.actions.loginWithMfaToken(email, password, token);
|
mfaLoginPage.actions.loginWithMfaCode(email, password, mfaCode);
|
||||||
personalSettingsPage.actions.disableMfa();
|
const disableToken = generateOTPToken(user.mfaSecret);
|
||||||
|
personalSettingsPage.actions.disableMfa(disableToken);
|
||||||
|
personalSettingsPage.getters.enableMfaButton().should('exist');
|
||||||
|
mainSidebar.actions.signout();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should be able to disable MFA in account with recovery code', () => {
|
||||||
|
const { email, password } = user;
|
||||||
|
signinPage.actions.loginWithEmailAndPassword(email, password);
|
||||||
|
personalSettingsPage.actions.enableMfa();
|
||||||
|
mainSidebar.actions.signout();
|
||||||
|
const mfaCode = generateOTPToken(user.mfaSecret);
|
||||||
|
mfaLoginPage.actions.loginWithMfaCode(email, password, mfaCode);
|
||||||
|
personalSettingsPage.actions.disableMfa(user.mfaRecoveryCodes[0]);
|
||||||
|
personalSettingsPage.getters.enableMfaButton().should('exist');
|
||||||
mainSidebar.actions.signout();
|
mainSidebar.actions.signout();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
import {
|
||||||
|
deleteNode,
|
||||||
|
getCanvasNodes,
|
||||||
|
navigateToNewWorkflowPage,
|
||||||
|
pasteWorkflow,
|
||||||
|
} from '../composables/workflow';
|
||||||
|
import Workflow from '../fixtures/Switch_node_with_null_connection.json';
|
||||||
|
|
||||||
|
describe('ADO-2929 can load Switch nodes', () => {
|
||||||
|
it('can load workflows with Switch nodes with null at connection index', () => {
|
||||||
|
navigateToNewWorkflowPage();
|
||||||
|
pasteWorkflow(Workflow);
|
||||||
|
getCanvasNodes().should('have.length', 3);
|
||||||
|
deleteNode('Switch');
|
||||||
|
getCanvasNodes().should('have.length', 2);
|
||||||
|
});
|
||||||
|
});
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { getCredentialSaveButton } from '../composables/modals/credential-modal';
|
||||||
import { CredentialsPage, CredentialsModal } from '../pages';
|
import { CredentialsPage, CredentialsModal } from '../pages';
|
||||||
|
|
||||||
const credentialsPage = new CredentialsPage();
|
const credentialsPage = new CredentialsPage();
|
||||||
|
@ -40,7 +41,7 @@ describe('Credentials', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
// Check that the credential was saved and connected successfully
|
// Check that the credential was saved and connected successfully
|
||||||
credentialsModal.getters.saveButton().should('contain.text', 'Saved');
|
getCredentialSaveButton().should('contain.text', 'Saved');
|
||||||
credentialsModal.getters.oauthConnectSuccessBanner().should('be.visible');
|
credentialsModal.getters.oauthConnectSuccessBanner().should('be.visible');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
85
cypress/fixtures/Switch_node_with_null_connection.json
Normal file
85
cypress/fixtures/Switch_node_with_null_connection.json
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
{
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"parameters": {},
|
||||||
|
"id": "418350b8-b402-4d3b-93ba-3794d36c1ad5",
|
||||||
|
"name": "When clicking \"Test workflow\"",
|
||||||
|
"type": "n8n-nodes-base.manualTrigger",
|
||||||
|
"typeVersion": 1,
|
||||||
|
"position": [440, 380]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameters": {
|
||||||
|
"rules": {
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"conditions": {
|
||||||
|
"options": {
|
||||||
|
"caseSensitive": true,
|
||||||
|
"leftValue": "",
|
||||||
|
"typeValidation": "strict"
|
||||||
|
},
|
||||||
|
"conditions": [
|
||||||
|
{
|
||||||
|
"leftValue": "",
|
||||||
|
"rightValue": "",
|
||||||
|
"operator": {
|
||||||
|
"type": "string",
|
||||||
|
"operation": "equals"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"combinator": "and"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
{}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"options": {}
|
||||||
|
},
|
||||||
|
"id": "b67ad46f-6b0d-4ff4-b2d2-dfbde44e287c",
|
||||||
|
"name": "Switch",
|
||||||
|
"type": "n8n-nodes-base.switch",
|
||||||
|
"typeVersion": 3,
|
||||||
|
"position": [660, 380]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameters": {
|
||||||
|
"options": {}
|
||||||
|
},
|
||||||
|
"id": "24731c11-e2a4-4854-81a6-277ce72e8a93",
|
||||||
|
"name": "Edit Fields",
|
||||||
|
"type": "n8n-nodes-base.set",
|
||||||
|
"typeVersion": 3.3,
|
||||||
|
"position": [840, 480]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"connections": {
|
||||||
|
"When clicking \"Test workflow\"": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"node": "Switch",
|
||||||
|
"type": "main",
|
||||||
|
"index": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"Switch": {
|
||||||
|
"main": [
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"node": "Edit Fields",
|
||||||
|
"type": "main",
|
||||||
|
"index": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pinData": {}
|
||||||
|
}
|
|
@ -8,18 +8,18 @@ export class MfaLoginPage extends BasePage {
|
||||||
|
|
||||||
getters = {
|
getters = {
|
||||||
form: () => cy.getByTestId('mfa-login-form'),
|
form: () => cy.getByTestId('mfa-login-form'),
|
||||||
token: () => cy.getByTestId('token'),
|
mfaCode: () => cy.getByTestId('mfaCode'),
|
||||||
recoveryCode: () => cy.getByTestId('recoveryCode'),
|
mfaRecoveryCode: () => cy.getByTestId('mfaRecoveryCode'),
|
||||||
enterRecoveryCodeButton: () => cy.getByTestId('mfa-enter-recovery-code-button'),
|
enterRecoveryCodeButton: () => cy.getByTestId('mfa-enter-recovery-code-button'),
|
||||||
};
|
};
|
||||||
|
|
||||||
actions = {
|
actions = {
|
||||||
loginWithMfaToken: (email: string, password: string, mfaToken: string) => {
|
loginWithMfaCode: (email: string, password: string, mfaCode: string) => {
|
||||||
const signinPage = new SigninPage();
|
const signinPage = new SigninPage();
|
||||||
const workflowsPage = new WorkflowsPage();
|
const workflowsPage = new WorkflowsPage();
|
||||||
|
|
||||||
cy.session(
|
cy.session(
|
||||||
[mfaToken],
|
[mfaCode],
|
||||||
() => {
|
() => {
|
||||||
cy.visit(signinPage.url);
|
cy.visit(signinPage.url);
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ export class MfaLoginPage extends BasePage {
|
||||||
});
|
});
|
||||||
|
|
||||||
this.getters.form().within(() => {
|
this.getters.form().within(() => {
|
||||||
this.getters.token().type(mfaToken);
|
this.getters.mfaCode().type(mfaCode);
|
||||||
});
|
});
|
||||||
|
|
||||||
// we should be redirected to /workflows
|
// we should be redirected to /workflows
|
||||||
|
@ -43,12 +43,12 @@ export class MfaLoginPage extends BasePage {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
loginWithRecoveryCode: (email: string, password: string, recoveryCode: string) => {
|
loginWithMfaRecoveryCode: (email: string, password: string, mfaRecoveryCode: string) => {
|
||||||
const signinPage = new SigninPage();
|
const signinPage = new SigninPage();
|
||||||
const workflowsPage = new WorkflowsPage();
|
const workflowsPage = new WorkflowsPage();
|
||||||
|
|
||||||
cy.session(
|
cy.session(
|
||||||
[recoveryCode],
|
[mfaRecoveryCode],
|
||||||
() => {
|
() => {
|
||||||
cy.visit(signinPage.url);
|
cy.visit(signinPage.url);
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ export class MfaLoginPage extends BasePage {
|
||||||
this.getters.enterRecoveryCodeButton().click();
|
this.getters.enterRecoveryCodeButton().click();
|
||||||
|
|
||||||
this.getters.form().within(() => {
|
this.getters.form().within(() => {
|
||||||
this.getters.recoveryCode().type(recoveryCode);
|
this.getters.mfaRecoveryCode().type(mfaRecoveryCode);
|
||||||
});
|
});
|
||||||
|
|
||||||
// we should be redirected to /workflows
|
// we should be redirected to /workflows
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { getCredentialSaveButton, saveCredential } from '../../composables/modals/credential-modal';
|
||||||
import { getVisibleSelect } from '../../utils';
|
import { getVisibleSelect } from '../../utils';
|
||||||
import { BasePage } from '../base';
|
import { BasePage } from '../base';
|
||||||
|
|
||||||
|
@ -13,8 +14,6 @@ export class CredentialsModal extends BasePage {
|
||||||
this.getters.credentialInputs().find(`:contains('${fieldName}') .n8n-input input`),
|
this.getters.credentialInputs().find(`:contains('${fieldName}') .n8n-input input`),
|
||||||
name: () => cy.getByTestId('credential-name'),
|
name: () => cy.getByTestId('credential-name'),
|
||||||
nameInput: () => cy.getByTestId('credential-name').find('input'),
|
nameInput: () => cy.getByTestId('credential-name').find('input'),
|
||||||
// Saving of the credentials takes a while on the CI so we need to increase the timeout
|
|
||||||
saveButton: () => cy.getByTestId('credential-save-button', { timeout: 5000 }),
|
|
||||||
deleteButton: () => cy.getByTestId('credential-delete-button'),
|
deleteButton: () => cy.getByTestId('credential-delete-button'),
|
||||||
closeButton: () => this.getters.editCredentialModal().find('.el-dialog__close').first(),
|
closeButton: () => this.getters.editCredentialModal().find('.el-dialog__close').first(),
|
||||||
oauthConnectButton: () => cy.getByTestId('oauth-connect-button'),
|
oauthConnectButton: () => cy.getByTestId('oauth-connect-button'),
|
||||||
|
@ -41,17 +40,17 @@ export class CredentialsModal extends BasePage {
|
||||||
},
|
},
|
||||||
save: (test = false) => {
|
save: (test = false) => {
|
||||||
cy.intercept('POST', '/rest/credentials').as('saveCredential');
|
cy.intercept('POST', '/rest/credentials').as('saveCredential');
|
||||||
this.getters.saveButton().click({ force: true });
|
saveCredential();
|
||||||
|
|
||||||
cy.wait('@saveCredential');
|
cy.wait('@saveCredential');
|
||||||
if (test) cy.wait('@testCredential');
|
if (test) cy.wait('@testCredential');
|
||||||
this.getters.saveButton().should('contain.text', 'Saved');
|
getCredentialSaveButton().should('contain.text', 'Saved');
|
||||||
},
|
},
|
||||||
saveSharing: () => {
|
saveSharing: () => {
|
||||||
cy.intercept('PUT', '/rest/credentials/*/share').as('shareCredential');
|
cy.intercept('PUT', '/rest/credentials/*/share').as('shareCredential');
|
||||||
this.getters.saveButton().click({ force: true });
|
saveCredential();
|
||||||
cy.wait('@shareCredential');
|
cy.wait('@shareCredential');
|
||||||
this.getters.saveButton().should('contain.text', 'Saved');
|
getCredentialSaveButton().should('contain.text', 'Saved');
|
||||||
},
|
},
|
||||||
close: () => {
|
close: () => {
|
||||||
this.getters.closeButton().click();
|
this.getters.closeButton().click();
|
||||||
|
@ -65,7 +64,7 @@ export class CredentialsModal extends BasePage {
|
||||||
.each(($el) => {
|
.each(($el) => {
|
||||||
cy.wrap($el).type('test');
|
cy.wrap($el).type('test');
|
||||||
});
|
});
|
||||||
this.getters.saveButton().click();
|
saveCredential();
|
||||||
if (closeModal) {
|
if (closeModal) {
|
||||||
this.getters.closeButton().click();
|
this.getters.closeButton().click();
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,8 @@ export class PersonalSettingsPage extends BasePage {
|
||||||
saveSettingsButton: () => cy.getByTestId('save-settings-button'),
|
saveSettingsButton: () => cy.getByTestId('save-settings-button'),
|
||||||
enableMfaButton: () => cy.getByTestId('enable-mfa-button'),
|
enableMfaButton: () => cy.getByTestId('enable-mfa-button'),
|
||||||
disableMfaButton: () => cy.getByTestId('disable-mfa-button'),
|
disableMfaButton: () => cy.getByTestId('disable-mfa-button'),
|
||||||
|
mfaCodeOrMfaRecoveryCodeInput: () => cy.getByTestId('mfa-code-or-recovery-code-input'),
|
||||||
|
mfaSaveButton: () => cy.getByTestId('mfa-save-button'),
|
||||||
themeSelector: () => cy.getByTestId('theme-select'),
|
themeSelector: () => cy.getByTestId('theme-select'),
|
||||||
selectOptionsVisible: () => cy.get('.el-select-dropdown:visible .el-select-dropdown__item'),
|
selectOptionsVisible: () => cy.get('.el-select-dropdown:visible .el-select-dropdown__item'),
|
||||||
};
|
};
|
||||||
|
@ -83,9 +85,11 @@ export class PersonalSettingsPage extends BasePage {
|
||||||
mfaSetupModal.getters.saveButton().click();
|
mfaSetupModal.getters.saveButton().click();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
disableMfa: () => {
|
disableMfa: (mfaCodeOrRecoveryCode: string) => {
|
||||||
cy.visit(this.url);
|
cy.visit(this.url);
|
||||||
this.getters.disableMfaButton().click();
|
this.getters.disableMfaButton().click();
|
||||||
|
this.getters.mfaCodeOrMfaRecoveryCodeInput().type(mfaCodeOrRecoveryCode);
|
||||||
|
this.getters.mfaSaveButton().click();
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ COPY docker/images/n8n/docker-entrypoint.sh /
|
||||||
|
|
||||||
# Setup the Task Runner Launcher
|
# Setup the Task Runner Launcher
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
ARG LAUNCHER_VERSION=0.6.0-rc
|
ARG LAUNCHER_VERSION=0.7.0-rc
|
||||||
COPY docker/images/n8n/n8n-task-runners.json /etc/n8n-task-runners.json
|
COPY docker/images/n8n/n8n-task-runners.json /etc/n8n-task-runners.json
|
||||||
# Download, verify, then extract the launcher binary
|
# Download, verify, then extract the launcher binary
|
||||||
RUN \
|
RUN \
|
||||||
|
|
|
@ -24,7 +24,7 @@ RUN set -eux; \
|
||||||
|
|
||||||
# Setup the Task Runner Launcher
|
# Setup the Task Runner Launcher
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
ARG LAUNCHER_VERSION=0.6.0-rc
|
ARG LAUNCHER_VERSION=0.7.0-rc
|
||||||
COPY n8n-task-runners.json /etc/n8n-task-runners.json
|
COPY n8n-task-runners.json /etc/n8n-task-runners.json
|
||||||
# Download, verify, then extract the launcher binary
|
# Download, verify, then extract the launcher binary
|
||||||
RUN \
|
RUN \
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
"args": ["/usr/local/lib/node_modules/n8n/node_modules/@n8n/task-runner/dist/start.js"],
|
"args": ["/usr/local/lib/node_modules/n8n/node_modules/@n8n/task-runner/dist/start.js"],
|
||||||
"allowed-env": [
|
"allowed-env": [
|
||||||
"PATH",
|
"PATH",
|
||||||
|
"GENERIC_TIMEZONE",
|
||||||
"N8N_RUNNERS_GRANT_TOKEN",
|
"N8N_RUNNERS_GRANT_TOKEN",
|
||||||
"N8N_RUNNERS_N8N_URI",
|
"N8N_RUNNERS_N8N_URI",
|
||||||
"N8N_RUNNERS_MAX_PAYLOAD",
|
"N8N_RUNNERS_MAX_PAYLOAD",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "n8n-monorepo",
|
"name": "n8n-monorepo",
|
||||||
"version": "1.69.0",
|
"version": "1.71.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=20.15",
|
"node": ">=20.15",
|
||||||
|
@ -62,7 +62,7 @@
|
||||||
"ts-jest": "^29.1.1",
|
"ts-jest": "^29.1.1",
|
||||||
"tsc-alias": "^1.8.10",
|
"tsc-alias": "^1.8.10",
|
||||||
"tsc-watch": "^6.2.0",
|
"tsc-watch": "^6.2.0",
|
||||||
"turbo": "2.1.2",
|
"turbo": "2.3.3",
|
||||||
"typescript": "*",
|
"typescript": "*",
|
||||||
"zx": "^8.1.4"
|
"zx": "^8.1.4"
|
||||||
},
|
},
|
||||||
|
@ -80,7 +80,7 @@
|
||||||
"tslib": "^2.6.2",
|
"tslib": "^2.6.2",
|
||||||
"tsconfig-paths": "^4.2.0",
|
"tsconfig-paths": "^4.2.0",
|
||||||
"typescript": "^5.7.2",
|
"typescript": "^5.7.2",
|
||||||
"vue-tsc": "^2.1.6",
|
"vue-tsc": "^2.1.10",
|
||||||
"ws": ">=8.17.1"
|
"ws": ">=8.17.1"
|
||||||
},
|
},
|
||||||
"patchedDependencies": {
|
"patchedDependencies": {
|
||||||
|
@ -90,7 +90,7 @@
|
||||||
"@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch",
|
"@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch",
|
||||||
"@types/ws@8.5.4": "patches/@types__ws@8.5.4.patch",
|
"@types/ws@8.5.4": "patches/@types__ws@8.5.4.patch",
|
||||||
"@types/uuencode@0.0.3": "patches/@types__uuencode@0.0.3.patch",
|
"@types/uuencode@0.0.3": "patches/@types__uuencode@0.0.3.patch",
|
||||||
"vue-tsc@2.1.6": "patches/vue-tsc@2.1.6.patch"
|
"vue-tsc@2.1.10": "patches/vue-tsc@2.1.10.patch"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@n8n/api-types",
|
"name": "@n8n/api-types",
|
||||||
"version": "0.7.0",
|
"version": "0.9.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"clean": "rimraf dist .turbo",
|
"clean": "rimraf dist .turbo",
|
||||||
"dev": "pnpm watch",
|
"dev": "pnpm watch",
|
||||||
|
|
|
@ -172,4 +172,5 @@ export interface FrontendSettings {
|
||||||
blockFileAccessToN8nFiles: boolean;
|
blockFileAccessToN8nFiles: boolean;
|
||||||
};
|
};
|
||||||
betaFeatures: FrontendBetaFeatures[];
|
betaFeatures: FrontendBetaFeatures[];
|
||||||
|
virtualSchemaView: boolean;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@n8n/chat",
|
"name": "@n8n/chat",
|
||||||
"version": "0.31.0",
|
"version": "0.32.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "pnpm run storybook",
|
"dev": "pnpm run storybook",
|
||||||
"build": "pnpm build:vite && pnpm build:bundle",
|
"build": "pnpm build:vite && pnpm build:bundle",
|
||||||
|
@ -46,11 +46,12 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@iconify-json/mdi": "^1.1.54",
|
"@iconify-json/mdi": "^1.1.54",
|
||||||
"@n8n/storybook": "workspace:*",
|
"@n8n/storybook": "workspace:*",
|
||||||
|
"@vitejs/plugin-vue": "catalog:frontend",
|
||||||
"@vitest/coverage-v8": "catalog:frontend",
|
"@vitest/coverage-v8": "catalog:frontend",
|
||||||
"unplugin-icons": "^0.19.0",
|
"unplugin-icons": "^0.19.0",
|
||||||
"vite": "catalog:frontend",
|
"vite": "catalog:frontend",
|
||||||
"vitest": "catalog:frontend",
|
"vitest": "catalog:frontend",
|
||||||
"vite-plugin-dts": "^4.2.3",
|
"vite-plugin-dts": "^4.3.0",
|
||||||
"vue-tsc": "catalog:frontend"
|
"vue-tsc": "catalog:frontend"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
|
|
|
@ -38,12 +38,12 @@ const isSubmitting = ref(false);
|
||||||
const resizeObserver = ref<ResizeObserver | null>(null);
|
const resizeObserver = ref<ResizeObserver | null>(null);
|
||||||
|
|
||||||
const isSubmitDisabled = computed(() => {
|
const isSubmitDisabled = computed(() => {
|
||||||
return input.value === '' || waitingForResponse.value || options.disabled?.value === true;
|
return input.value === '' || unref(waitingForResponse) || options.disabled?.value === true;
|
||||||
});
|
});
|
||||||
|
|
||||||
const isInputDisabled = computed(() => options.disabled?.value === true);
|
const isInputDisabled = computed(() => options.disabled?.value === true);
|
||||||
const isFileUploadDisabled = computed(
|
const isFileUploadDisabled = computed(
|
||||||
() => isFileUploadAllowed.value && waitingForResponse.value && !options.disabled?.value,
|
() => isFileUploadAllowed.value && unref(waitingForResponse) && !options.disabled?.value,
|
||||||
);
|
);
|
||||||
const isFileUploadAllowed = computed(() => unref(options.allowFileUploads) === true);
|
const isFileUploadAllowed = computed(() => unref(options.allowFileUploads) === true);
|
||||||
const allowedFileTypes = computed(() => unref(options.allowedFilesMimeTypes));
|
const allowedFileTypes = computed(() => unref(options.allowedFilesMimeTypes));
|
||||||
|
@ -194,10 +194,13 @@ function adjustHeight(event: Event) {
|
||||||
<template>
|
<template>
|
||||||
<div class="chat-input" :style="styleVars" @keydown.stop="onKeyDown">
|
<div class="chat-input" :style="styleVars" @keydown.stop="onKeyDown">
|
||||||
<div class="chat-inputs">
|
<div class="chat-inputs">
|
||||||
|
<div v-if="$slots.leftPanel" class="chat-input-left-panel">
|
||||||
|
<slot name="leftPanel" />
|
||||||
|
</div>
|
||||||
<textarea
|
<textarea
|
||||||
ref="chatTextArea"
|
ref="chatTextArea"
|
||||||
data-test-id="chat-input"
|
|
||||||
v-model="input"
|
v-model="input"
|
||||||
|
data-test-id="chat-input"
|
||||||
:disabled="isInputDisabled"
|
:disabled="isInputDisabled"
|
||||||
:placeholder="t(props.placeholder)"
|
:placeholder="t(props.placeholder)"
|
||||||
@keydown.enter="onSubmitKeydown"
|
@keydown.enter="onSubmitKeydown"
|
||||||
|
@ -251,7 +254,7 @@ function adjustHeight(event: Event) {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
display: flex;
|
display: flex;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
align-items: center;
|
align-items: flex-end;
|
||||||
|
|
||||||
textarea {
|
textarea {
|
||||||
font-family: inherit;
|
font-family: inherit;
|
||||||
|
@ -259,8 +262,7 @@ function adjustHeight(event: Event) {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
border: var(--chat--input--border, 0);
|
border: var(--chat--input--border, 0);
|
||||||
border-radius: var(--chat--input--border-radius, 0);
|
border-radius: var(--chat--input--border-radius, 0);
|
||||||
padding: 0.8rem;
|
padding: var(--chat--input--padding, 0.8rem);
|
||||||
padding-right: calc(0.8rem + (var(--controls-count, 1) * var(--chat--textarea--height)));
|
|
||||||
min-height: var(--chat--textarea--height, 2.5rem); // Set a smaller initial height
|
min-height: var(--chat--textarea--height, 2.5rem); // Set a smaller initial height
|
||||||
max-height: var(--chat--textarea--max-height, 30rem);
|
max-height: var(--chat--textarea--max-height, 30rem);
|
||||||
height: var(--chat--textarea--height, 2.5rem); // Set initial height same as min-height
|
height: var(--chat--textarea--height, 2.5rem); // Set initial height same as min-height
|
||||||
|
@ -271,6 +273,9 @@ function adjustHeight(event: Event) {
|
||||||
outline: none;
|
outline: none;
|
||||||
line-height: var(--chat--input--line-height, 1.5);
|
line-height: var(--chat--input--line-height, 1.5);
|
||||||
|
|
||||||
|
&::placeholder {
|
||||||
|
font-size: var(--chat--input--placeholder--font-size, var(--chat--input--font-size, inherit));
|
||||||
|
}
|
||||||
&:focus,
|
&:focus,
|
||||||
&:hover {
|
&:hover {
|
||||||
border-color: var(--chat--input--border-active, 0);
|
border-color: var(--chat--input--border-active, 0);
|
||||||
|
@ -279,9 +284,6 @@ function adjustHeight(event: Event) {
|
||||||
}
|
}
|
||||||
.chat-inputs-controls {
|
.chat-inputs-controls {
|
||||||
display: flex;
|
display: flex;
|
||||||
position: absolute;
|
|
||||||
right: 0.5rem;
|
|
||||||
bottom: 0;
|
|
||||||
}
|
}
|
||||||
.chat-input-send-button,
|
.chat-input-send-button,
|
||||||
.chat-input-file-button {
|
.chat-input-file-button {
|
||||||
|
@ -340,4 +342,9 @@ function adjustHeight(event: Event) {
|
||||||
gap: 0.5rem;
|
gap: 0.5rem;
|
||||||
padding: var(--chat--files-spacing, 0.25rem);
|
padding: var(--chat--files-spacing, 0.25rem);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.chat-input-left-panel {
|
||||||
|
width: var(--chat--input--left--panel--width, 2rem);
|
||||||
|
margin-left: 0.4rem;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -136,7 +136,8 @@ onMounted(async () => {
|
||||||
font-size: var(--chat--message--font-size, 1rem);
|
font-size: var(--chat--message--font-size, 1rem);
|
||||||
padding: var(--chat--message--padding, var(--chat--spacing));
|
padding: var(--chat--message--padding, var(--chat--spacing));
|
||||||
border-radius: var(--chat--message--border-radius, var(--chat--border-radius));
|
border-radius: var(--chat--message--border-radius, var(--chat--border-radius));
|
||||||
scroll-margin: 100px;
|
scroll-margin: 3rem;
|
||||||
|
|
||||||
.chat-message-actions {
|
.chat-message-actions {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
bottom: calc(100% - 0.5rem);
|
bottom: calc(100% - 0.5rem);
|
||||||
|
@ -151,9 +152,6 @@ onMounted(async () => {
|
||||||
left: auto;
|
left: auto;
|
||||||
right: 0;
|
right: 0;
|
||||||
}
|
}
|
||||||
&.chat-message-from-bot .chat-message-actions {
|
|
||||||
bottom: calc(100% - 1rem);
|
|
||||||
}
|
|
||||||
|
|
||||||
&:hover {
|
&:hover {
|
||||||
.chat-message-actions {
|
.chat-message-actions {
|
||||||
|
|
|
@ -37,8 +37,7 @@ body {
|
||||||
4. Prevent font size adjustment after orientation changes (IE, iOS)
|
4. Prevent font size adjustment after orientation changes (IE, iOS)
|
||||||
5. Prevent overflow from long words (all)
|
5. Prevent overflow from long words (all)
|
||||||
*/
|
*/
|
||||||
font-size: 110%; /* 2 */
|
line-height: 1.4; /* 3 */
|
||||||
line-height: 1.6; /* 3 */
|
|
||||||
-webkit-text-size-adjust: 100%; /* 4 */
|
-webkit-text-size-adjust: 100%; /* 4 */
|
||||||
word-break: break-word; /* 5 */
|
word-break: break-word; /* 5 */
|
||||||
|
|
||||||
|
@ -407,7 +406,7 @@ body {
|
||||||
h4,
|
h4,
|
||||||
h5,
|
h5,
|
||||||
h6 {
|
h6 {
|
||||||
margin: 3.2rem 0 0.8em;
|
margin: 2rem 0 0.8em;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -641,4 +640,15 @@ body {
|
||||||
body > a:first-child:focus {
|
body > a:first-child:focus {
|
||||||
top: 1rem;
|
top: 1rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Lists
|
||||||
|
ul,
|
||||||
|
ol {
|
||||||
|
padding-left: 1.5rem;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
|
||||||
|
li {
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@n8n/config",
|
"name": "@n8n/config",
|
||||||
"version": "1.19.0",
|
"version": "1.21.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"clean": "rimraf dist .turbo",
|
"clean": "rimraf dist .turbo",
|
||||||
"dev": "pnpm watch",
|
"dev": "pnpm watch",
|
||||||
|
|
|
@ -7,5 +7,5 @@ export type FrontendBetaFeatures = 'canvas_v2';
|
||||||
export class FrontendConfig {
|
export class FrontendConfig {
|
||||||
/** Which UI experiments to enable. Separate multiple values with a comma `,` */
|
/** Which UI experiments to enable. Separate multiple values with a comma `,` */
|
||||||
@Env('N8N_UI_BETA_FEATURES')
|
@Env('N8N_UI_BETA_FEATURES')
|
||||||
betaFeatures: StringArray<FrontendBetaFeatures> = [];
|
betaFeatures: StringArray<FrontendBetaFeatures> = ['canvas_v2'];
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,10 +6,6 @@ export class WorkflowsConfig {
|
||||||
@Env('WORKFLOWS_DEFAULT_NAME')
|
@Env('WORKFLOWS_DEFAULT_NAME')
|
||||||
defaultName: string = 'My workflow';
|
defaultName: string = 'My workflow';
|
||||||
|
|
||||||
/** Show onboarding flow in new workflow */
|
|
||||||
@Env('N8N_ONBOARDING_FLOW_DISABLED')
|
|
||||||
onboardingFlowDisabled: boolean = false;
|
|
||||||
|
|
||||||
/** Default option for which workflows may call the current workflow */
|
/** Default option for which workflows may call the current workflow */
|
||||||
@Env('N8N_WORKFLOW_CALLER_POLICY_DEFAULT_OPTION')
|
@Env('N8N_WORKFLOW_CALLER_POLICY_DEFAULT_OPTION')
|
||||||
callerPolicyDefaultOption: 'any' | 'none' | 'workflowsFromAList' | 'workflowsFromSameOwner' =
|
callerPolicyDefaultOption: 'any' | 'none' | 'workflowsFromAList' | 'workflowsFromSameOwner' =
|
||||||
|
|
|
@ -150,7 +150,6 @@ describe('GlobalConfig', () => {
|
||||||
},
|
},
|
||||||
workflows: {
|
workflows: {
|
||||||
defaultName: 'My workflow',
|
defaultName: 'My workflow',
|
||||||
onboardingFlowDisabled: false,
|
|
||||||
callerPolicyDefaultOption: 'workflowsFromSameOwner',
|
callerPolicyDefaultOption: 'workflowsFromSameOwner',
|
||||||
},
|
},
|
||||||
endpoints: {
|
endpoints: {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@n8n/imap",
|
"name": "@n8n/imap",
|
||||||
"version": "0.7.0",
|
"version": "0.8.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"clean": "rimraf dist .turbo",
|
"clean": "rimraf dist .turbo",
|
||||||
"dev": "pnpm watch",
|
"dev": "pnpm watch",
|
||||||
|
@ -20,7 +20,7 @@
|
||||||
"dist/**/*"
|
"dist/**/*"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"iconv-lite": "0.6.3",
|
"iconv-lite": "catalog:",
|
||||||
"imap": "0.8.19",
|
"imap": "0.8.19",
|
||||||
"quoted-printable": "1.0.1",
|
"quoted-printable": "1.0.1",
|
||||||
"utf8": "3.0.0",
|
"utf8": "3.0.0",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@n8n/json-schema-to-zod",
|
"name": "@n8n/json-schema-to-zod",
|
||||||
"version": "1.1.0",
|
"version": "1.2.0",
|
||||||
"description": "Converts JSON schema objects into Zod schemas",
|
"description": "Converts JSON schema objects into Zod schemas",
|
||||||
"types": "./dist/types/index.d.ts",
|
"types": "./dist/types/index.d.ts",
|
||||||
"main": "./dist/cjs/index.js",
|
"main": "./dist/cjs/index.js",
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
export type * from './types';
|
export type * from './types';
|
||||||
export { jsonSchemaToZod } from './json-schema-to-zod.js';
|
export { jsonSchemaToZod } from './json-schema-to-zod';
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
import type { ZodObjectAny } from '@langchain/core/dist/types/zod';
|
import type { z } from 'zod';
|
||||||
import type { BaseOutputParser } from '@langchain/core/output_parsers';
|
import type { BaseOutputParser } from '@langchain/core/output_parsers';
|
||||||
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
|
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
|
||||||
import { NodeOperationError, type IExecuteFunctions, type INode } from 'n8n-workflow';
|
import { NodeOperationError, type IExecuteFunctions, type INode } from 'n8n-workflow';
|
||||||
|
|
||||||
|
type ZodObjectAny = z.ZodObject<any, any, any, any>;
|
||||||
|
|
||||||
export async function extractParsedOutput(
|
export async function extractParsedOutput(
|
||||||
ctx: IExecuteFunctions,
|
ctx: IExecuteFunctions,
|
||||||
outputParser: BaseOutputParser<unknown>,
|
outputParser: BaseOutputParser<unknown>,
|
||||||
|
|
|
@ -94,7 +94,7 @@ export class DocumentGithubLoader implements INodeType {
|
||||||
};
|
};
|
||||||
|
|
||||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||||
console.log('Supplying data for Github Document Loader');
|
this.logger.debug('Supplying data for Github Document Loader');
|
||||||
|
|
||||||
const repository = this.getNodeParameter('repository', itemIndex) as string;
|
const repository = this.getNodeParameter('repository', itemIndex) as string;
|
||||||
const branch = this.getNodeParameter('branch', itemIndex) as string;
|
const branch = this.getNodeParameter('branch', itemIndex) as string;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@n8n/n8n-nodes-langchain",
|
"name": "@n8n/n8n-nodes-langchain",
|
||||||
"version": "1.69.0",
|
"version": "1.71.0",
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -135,47 +135,47 @@
|
||||||
"@getzep/zep-js": "0.9.0",
|
"@getzep/zep-js": "0.9.0",
|
||||||
"@google-ai/generativelanguage": "2.6.0",
|
"@google-ai/generativelanguage": "2.6.0",
|
||||||
"@google-cloud/resource-manager": "5.3.0",
|
"@google-cloud/resource-manager": "5.3.0",
|
||||||
"@google/generative-ai": "0.19.0",
|
"@google/generative-ai": "0.21.0",
|
||||||
"@huggingface/inference": "2.8.0",
|
"@huggingface/inference": "2.8.0",
|
||||||
"@langchain/anthropic": "0.3.7",
|
"@langchain/anthropic": "0.3.8",
|
||||||
"@langchain/aws": "0.1.1",
|
"@langchain/aws": "0.1.2",
|
||||||
"@langchain/cohere": "0.3.1",
|
"@langchain/cohere": "0.3.1",
|
||||||
"@langchain/community": "0.3.11",
|
"@langchain/community": "0.3.15",
|
||||||
"@langchain/core": "catalog:",
|
"@langchain/core": "catalog:",
|
||||||
"@langchain/google-genai": "0.1.2",
|
"@langchain/google-genai": "0.1.4",
|
||||||
"@langchain/google-vertexai": "0.1.0",
|
"@langchain/google-vertexai": "0.1.3",
|
||||||
"@langchain/groq": "0.1.2",
|
"@langchain/groq": "0.1.2",
|
||||||
"@langchain/mistralai": "0.1.1",
|
"@langchain/mistralai": "0.2.0",
|
||||||
"@langchain/ollama": "0.1.1",
|
"@langchain/ollama": "0.1.2",
|
||||||
"@langchain/openai": "0.3.11",
|
"@langchain/openai": "0.3.14",
|
||||||
"@langchain/pinecone": "0.1.1",
|
"@langchain/pinecone": "0.1.3",
|
||||||
"@langchain/qdrant": "0.1.0",
|
"@langchain/qdrant": "0.1.1",
|
||||||
"@langchain/redis": "0.1.0",
|
"@langchain/redis": "0.1.0",
|
||||||
"@langchain/textsplitters": "0.1.0",
|
"@langchain/textsplitters": "0.1.0",
|
||||||
"@mozilla/readability": "0.5.0",
|
"@mozilla/readability": "0.5.0",
|
||||||
"@n8n/json-schema-to-zod": "workspace:*",
|
"@n8n/json-schema-to-zod": "workspace:*",
|
||||||
"@n8n/typeorm": "0.3.20-12",
|
"@n8n/typeorm": "0.3.20-12",
|
||||||
"@n8n/vm2": "3.9.25",
|
"@n8n/vm2": "3.9.25",
|
||||||
"@pinecone-database/pinecone": "3.0.3",
|
"@pinecone-database/pinecone": "4.0.0",
|
||||||
"@qdrant/js-client-rest": "1.11.0",
|
"@qdrant/js-client-rest": "1.11.0",
|
||||||
"@supabase/supabase-js": "2.45.4",
|
"@supabase/supabase-js": "2.45.4",
|
||||||
"@xata.io/client": "0.28.4",
|
"@xata.io/client": "0.28.4",
|
||||||
"basic-auth": "catalog:",
|
"basic-auth": "catalog:",
|
||||||
"cheerio": "1.0.0",
|
"cheerio": "1.0.0",
|
||||||
"cohere-ai": "7.13.2",
|
"cohere-ai": "7.14.0",
|
||||||
"d3-dsv": "2.0.0",
|
"d3-dsv": "2.0.0",
|
||||||
"epub2": "3.0.2",
|
"epub2": "3.0.2",
|
||||||
"form-data": "catalog:",
|
"form-data": "catalog:",
|
||||||
"generate-schema": "2.6.0",
|
"generate-schema": "2.6.0",
|
||||||
"html-to-text": "9.0.5",
|
"html-to-text": "9.0.5",
|
||||||
"jsdom": "23.0.1",
|
"jsdom": "23.0.1",
|
||||||
"langchain": "0.3.5",
|
"langchain": "0.3.6",
|
||||||
"lodash": "catalog:",
|
"lodash": "catalog:",
|
||||||
"mammoth": "1.7.2",
|
"mammoth": "1.7.2",
|
||||||
"mime-types": "2.1.35",
|
"mime-types": "2.1.35",
|
||||||
"n8n-nodes-base": "workspace:*",
|
"n8n-nodes-base": "workspace:*",
|
||||||
"n8n-workflow": "workspace:*",
|
"n8n-workflow": "workspace:*",
|
||||||
"openai": "4.69.0",
|
"openai": "4.73.1",
|
||||||
"pdf-parse": "1.1.1",
|
"pdf-parse": "1.1.1",
|
||||||
"pg": "8.12.0",
|
"pg": "8.12.0",
|
||||||
"redis": "4.6.12",
|
"redis": "4.6.12",
|
||||||
|
|
|
@ -66,7 +66,7 @@ export const inputSchemaField: INodeProperties = {
|
||||||
};
|
};
|
||||||
|
|
||||||
export const promptTypeOptions: INodeProperties = {
|
export const promptTypeOptions: INodeProperties = {
|
||||||
displayName: 'Prompt Source',
|
displayName: 'Prompt Source (User Message)',
|
||||||
name: 'promptType',
|
name: 'promptType',
|
||||||
type: 'options',
|
type: 'options',
|
||||||
options: [
|
options: [
|
||||||
|
|
|
@ -32,7 +32,9 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
|
||||||
[{ json: { action: 'parse', text } }],
|
[{ json: { action: 'parse', text } }],
|
||||||
]);
|
]);
|
||||||
try {
|
try {
|
||||||
const parsed = await super.parse(text);
|
const jsonString = text.includes('```') ? text.split(/```(?:json)?/)[1] : text;
|
||||||
|
const json = JSON.parse(jsonString.trim());
|
||||||
|
const parsed = await this.schema.parseAsync(json);
|
||||||
|
|
||||||
const result = (get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_OBJECT_KEY]) ??
|
const result = (get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_OBJECT_KEY]) ??
|
||||||
get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_ARRAY_KEY]) ??
|
get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_ARRAY_KEY]) ??
|
||||||
|
|
|
@ -3,19 +3,19 @@
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@chromatic-com/storybook": "^2.0.2",
|
"@chromatic-com/storybook": "^3.2.2",
|
||||||
"@storybook/addon-a11y": "^8.3.5",
|
"@storybook/addon-a11y": "^8.4.6",
|
||||||
"@storybook/addon-actions": "^8.3.5",
|
"@storybook/addon-actions": "^8.4.6",
|
||||||
"@storybook/addon-docs": "^8.3.5",
|
"@storybook/addon-docs": "^8.4.6",
|
||||||
"@storybook/addon-essentials": "^8.3.5",
|
"@storybook/addon-essentials": "^8.4.6",
|
||||||
"@storybook/addon-interactions": "^8.3.5",
|
"@storybook/addon-interactions": "^8.4.6",
|
||||||
"@storybook/addon-links": "^8.3.5",
|
"@storybook/addon-links": "^8.4.6",
|
||||||
"@storybook/addon-themes": "^8.3.5",
|
"@storybook/addon-themes": "^8.4.6",
|
||||||
"@storybook/blocks": "^8.3.5",
|
"@storybook/blocks": "^8.4.6",
|
||||||
"@storybook/test": "^8.3.5",
|
"@storybook/test": "^8.4.6",
|
||||||
"@storybook/vue3": "^8.3.5",
|
"@storybook/vue3": "^8.4.6",
|
||||||
"@storybook/vue3-vite": "^8.3.5",
|
"@storybook/vue3-vite": "^8.4.6",
|
||||||
"chromatic": "^11.10.2",
|
"chromatic": "^11.20.0",
|
||||||
"storybook": "^8.3.5"
|
"storybook": "^8.4.6"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@n8n/task-runner",
|
"name": "@n8n/task-runner",
|
||||||
"version": "1.7.0",
|
"version": "1.9.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"clean": "rimraf dist .turbo",
|
"clean": "rimraf dist .turbo",
|
||||||
"start": "node dist/start.js",
|
"start": "node dist/start.js",
|
||||||
|
|
|
@ -34,6 +34,9 @@ export class BaseRunnerConfig {
|
||||||
@Env('N8N_RUNNERS_AUTO_SHUTDOWN_TIMEOUT')
|
@Env('N8N_RUNNERS_AUTO_SHUTDOWN_TIMEOUT')
|
||||||
idleTimeout: number = 0;
|
idleTimeout: number = 0;
|
||||||
|
|
||||||
|
@Env('GENERIC_TIMEZONE')
|
||||||
|
timezone: string = 'America/New_York';
|
||||||
|
|
||||||
@Nested
|
@Nested
|
||||||
healthcheckServer!: HealthcheckServerConfig;
|
healthcheckServer!: HealthcheckServerConfig;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { DateTime } from 'luxon';
|
import { DateTime } from 'luxon';
|
||||||
import type { CodeExecutionMode, IDataObject } from 'n8n-workflow';
|
import { setGlobalState, type CodeExecutionMode, type IDataObject } from 'n8n-workflow';
|
||||||
import fs from 'node:fs';
|
import fs from 'node:fs';
|
||||||
import { builtinModules } from 'node:module';
|
import { builtinModules } from 'node:module';
|
||||||
|
|
||||||
|
@ -326,6 +326,43 @@ describe('JsTaskRunner', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('timezone', () => {
|
||||||
|
it('should use the specified timezone in the workflow', async () => {
|
||||||
|
const taskData = newDataRequestResponse(inputItems.map(wrapIntoJson), {});
|
||||||
|
taskData.workflow.settings = {
|
||||||
|
timezone: 'Europe/Helsinki',
|
||||||
|
};
|
||||||
|
|
||||||
|
const outcome = await execTaskWithParams({
|
||||||
|
task: newTaskWithSettings({
|
||||||
|
code: 'return { val: $now.toSeconds() }',
|
||||||
|
nodeMode: 'runOnceForAllItems',
|
||||||
|
}),
|
||||||
|
taskData,
|
||||||
|
});
|
||||||
|
|
||||||
|
const helsinkiTimeNow = DateTime.now().setZone('Europe/Helsinki').toSeconds();
|
||||||
|
expect(outcome.result[0].json.val).toBeCloseTo(helsinkiTimeNow, 1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use the default timezone', async () => {
|
||||||
|
setGlobalState({
|
||||||
|
defaultTimezone: 'Europe/Helsinki',
|
||||||
|
});
|
||||||
|
|
||||||
|
const outcome = await execTaskWithParams({
|
||||||
|
task: newTaskWithSettings({
|
||||||
|
code: 'return { val: $now.toSeconds() }',
|
||||||
|
nodeMode: 'runOnceForAllItems',
|
||||||
|
}),
|
||||||
|
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const helsinkiTimeNow = DateTime.now().setZone('Europe/Helsinki').toSeconds();
|
||||||
|
expect(outcome.result[0].json.val).toBeCloseTo(helsinkiTimeNow, 1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should allow access to Node.js Buffers', async () => {
|
it('should allow access to Node.js Buffers', async () => {
|
||||||
const outcomeAll = await execTaskWithParams({
|
const outcomeAll = await execTaskWithParams({
|
||||||
task: newTaskWithSettings({
|
task: newTaskWithSettings({
|
||||||
|
|
|
@ -17,7 +17,7 @@ describe('BuiltInsParser', () => {
|
||||||
const parseAndExpectOk = (code: string) => {
|
const parseAndExpectOk = (code: string) => {
|
||||||
const result = parser.parseUsedBuiltIns(code);
|
const result = parser.parseUsedBuiltIns(code);
|
||||||
if (!result.ok) {
|
if (!result.ok) {
|
||||||
fail(result.error);
|
throw result.error;
|
||||||
}
|
}
|
||||||
|
|
||||||
return result.result;
|
return result.result;
|
||||||
|
@ -151,6 +151,13 @@ describe('BuiltInsParser', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('$item', () => {
|
||||||
|
it('should require all nodes and input when $item is used', () => {
|
||||||
|
const state = parseAndExpectOk('$item("0").$node["my node"].json["title"]');
|
||||||
|
expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true }));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('ECMAScript syntax', () => {
|
describe('ECMAScript syntax', () => {
|
||||||
describe('ES2020', () => {
|
describe('ES2020', () => {
|
||||||
it('should parse optional chaining', () => {
|
it('should parse optional chaining', () => {
|
||||||
|
|
|
@ -125,6 +125,11 @@ export class BuiltInsParser {
|
||||||
private visitIdentifier = (node: Identifier, state: BuiltInsParserState) => {
|
private visitIdentifier = (node: Identifier, state: BuiltInsParserState) => {
|
||||||
if (node.name === '$env') {
|
if (node.name === '$env') {
|
||||||
state.markEnvAsNeeded();
|
state.markEnvAsNeeded();
|
||||||
|
} else if (node.name === '$item') {
|
||||||
|
// $item is legacy syntax that is basically an alias for WorkflowDataProxy
|
||||||
|
// and allows accessing any data. We need to support it for backwards
|
||||||
|
// compatibility, but we're not gonna implement any optimizations
|
||||||
|
state.markNeedsAllNodes();
|
||||||
} else if (
|
} else if (
|
||||||
node.name === '$input' ||
|
node.name === '$input' ||
|
||||||
node.name === '$json' ||
|
node.name === '$json' ||
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { ensureError } from 'n8n-workflow';
|
import { ensureError, setGlobalState } from 'n8n-workflow';
|
||||||
import Container from 'typedi';
|
import Container from 'typedi';
|
||||||
|
|
||||||
import { MainConfig } from './config/main-config';
|
import { MainConfig } from './config/main-config';
|
||||||
|
@ -44,6 +44,10 @@ function createSignalHandler(signal: string) {
|
||||||
void (async function start() {
|
void (async function start() {
|
||||||
const config = Container.get(MainConfig);
|
const config = Container.get(MainConfig);
|
||||||
|
|
||||||
|
setGlobalState({
|
||||||
|
defaultTimezone: config.baseRunnerConfig.timezone,
|
||||||
|
});
|
||||||
|
|
||||||
if (config.sentryConfig.sentryDsn) {
|
if (config.sentryConfig.sentryDsn) {
|
||||||
const { ErrorReporter } = await import('@/error-reporter');
|
const { ErrorReporter } = await import('@/error-reporter');
|
||||||
errorReporter = new ErrorReporter(config.sentryConfig);
|
errorReporter = new ErrorReporter(config.sentryConfig);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "n8n",
|
"name": "n8n",
|
||||||
"version": "1.69.0",
|
"version": "1.71.0",
|
||||||
"description": "n8n Workflow Automation Tool",
|
"description": "n8n Workflow Automation Tool",
|
||||||
"main": "dist/index",
|
"main": "dist/index",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
@ -94,7 +94,7 @@
|
||||||
"@n8n/permissions": "workspace:*",
|
"@n8n/permissions": "workspace:*",
|
||||||
"@n8n/task-runner": "workspace:*",
|
"@n8n/task-runner": "workspace:*",
|
||||||
"@n8n/typeorm": "0.3.20-12",
|
"@n8n/typeorm": "0.3.20-12",
|
||||||
"@n8n_io/ai-assistant-sdk": "1.10.3",
|
"@n8n_io/ai-assistant-sdk": "1.12.0",
|
||||||
"@n8n_io/license-sdk": "2.13.1",
|
"@n8n_io/license-sdk": "2.13.1",
|
||||||
"@oclif/core": "4.0.7",
|
"@oclif/core": "4.0.7",
|
||||||
"@rudderstack/rudder-sdk-node": "2.0.9",
|
"@rudderstack/rudder-sdk-node": "2.0.9",
|
||||||
|
|
61
packages/cli/src/__tests__/error-reporting.test.ts
Normal file
61
packages/cli/src/__tests__/error-reporting.test.ts
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
import { GlobalConfig } from '@n8n/config';
|
||||||
|
import type { ClientOptions, ErrorEvent } from '@sentry/types';
|
||||||
|
import { strict as assert } from 'node:assert';
|
||||||
|
import { Container } from 'typedi';
|
||||||
|
|
||||||
|
import { InternalServerError } from '@/errors/response-errors/internal-server.error';
|
||||||
|
|
||||||
|
const init = jest.fn();
|
||||||
|
|
||||||
|
jest.mock('@sentry/integrations');
|
||||||
|
jest.mock('@sentry/node', () => ({
|
||||||
|
init,
|
||||||
|
setTag: jest.fn(),
|
||||||
|
captureException: jest.fn(),
|
||||||
|
Integrations: {},
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.spyOn(process, 'on');
|
||||||
|
|
||||||
|
describe('initErrorHandling', () => {
|
||||||
|
let beforeSend: ClientOptions['beforeSend'];
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
Container.get(GlobalConfig).sentry.backendDsn = 'backend-dsn';
|
||||||
|
const errorReporting = require('@/error-reporting');
|
||||||
|
await errorReporting.initErrorHandling();
|
||||||
|
const options = (init.mock.calls[0] as [ClientOptions])[0];
|
||||||
|
beforeSend = options.beforeSend;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ignores errors with level warning', async () => {
|
||||||
|
const originalException = new InternalServerError('test');
|
||||||
|
originalException.level = 'warning';
|
||||||
|
|
||||||
|
const event = {} as ErrorEvent;
|
||||||
|
|
||||||
|
assert(beforeSend);
|
||||||
|
expect(await beforeSend(event, { originalException })).toEqual(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('keeps events with a cause with error level', async () => {
|
||||||
|
const cause = new Error('cause-error');
|
||||||
|
|
||||||
|
const originalException = new InternalServerError('test', cause);
|
||||||
|
const event = {} as ErrorEvent;
|
||||||
|
|
||||||
|
assert(beforeSend);
|
||||||
|
expect(await beforeSend(event, { originalException })).toEqual(event);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ignores events with error cause with warning level', async () => {
|
||||||
|
const cause: Error & { level?: 'warning' } = new Error('cause-error');
|
||||||
|
cause.level = 'warning';
|
||||||
|
|
||||||
|
const originalException = new InternalServerError('test', cause);
|
||||||
|
const event = {} as ErrorEvent;
|
||||||
|
|
||||||
|
assert(beforeSend);
|
||||||
|
expect(await beforeSend(event, { originalException })).toEqual(null);
|
||||||
|
});
|
||||||
|
});
|
43
packages/cli/src/__tests__/object-to-error.test.ts
Normal file
43
packages/cli/src/__tests__/object-to-error.test.ts
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
import { mock } from 'jest-mock-extended';
|
||||||
|
import type { INode } from 'n8n-workflow';
|
||||||
|
import { NodeOperationError, type Workflow } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import { objectToError } from '../workflow-execute-additional-data';
|
||||||
|
|
||||||
|
describe('objectToError', () => {
|
||||||
|
describe('node error handling', () => {
|
||||||
|
it('should create `NodeOperationError` when node is found', () => {
|
||||||
|
const errorObject = {
|
||||||
|
message: 'Test error',
|
||||||
|
node: {
|
||||||
|
name: 'testNode',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const workflow = mock<Workflow>();
|
||||||
|
const node = mock<INode>();
|
||||||
|
workflow.getNode.mockReturnValue(node);
|
||||||
|
|
||||||
|
const result = objectToError(errorObject, workflow);
|
||||||
|
|
||||||
|
expect(workflow.getNode).toHaveBeenCalledWith('testNode');
|
||||||
|
expect(result).toBeInstanceOf(NodeOperationError);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create `Error` when node is not found', () => {
|
||||||
|
const errorObject = {
|
||||||
|
message: 'Test error',
|
||||||
|
node: {
|
||||||
|
// missing `name`
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const workflow = mock<Workflow>();
|
||||||
|
|
||||||
|
const result = objectToError(errorObject, workflow);
|
||||||
|
|
||||||
|
expect(workflow.getNode).not.toHaveBeenCalled();
|
||||||
|
expect(result).toBeInstanceOf(Error);
|
||||||
|
expect(result).not.toBeInstanceOf(NodeOperationError);
|
||||||
|
expect(result.message).toBe('Test error');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -1,33 +1,46 @@
|
||||||
import { mock } from 'jest-mock-extended';
|
import { mock } from 'jest-mock-extended';
|
||||||
import type { InstanceSettings } from 'n8n-core';
|
import type { InstanceSettings } from 'n8n-core';
|
||||||
|
import type { IWorkflowBase } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import type { Project } from '@/databases/entities/project';
|
||||||
import type { ExecutionRepository } from '@/databases/repositories/execution.repository';
|
import type { ExecutionRepository } from '@/databases/repositories/execution.repository';
|
||||||
import type { IExecutionResponse } from '@/interfaces';
|
import type { IExecutionResponse } from '@/interfaces';
|
||||||
import type { MultiMainSetup } from '@/scaling/multi-main-setup.ee';
|
import type { MultiMainSetup } from '@/scaling/multi-main-setup.ee';
|
||||||
import { OrchestrationService } from '@/services/orchestration.service';
|
import { OrchestrationService } from '@/services/orchestration.service';
|
||||||
|
import type { OwnershipService } from '@/services/ownership.service';
|
||||||
import { WaitTracker } from '@/wait-tracker';
|
import { WaitTracker } from '@/wait-tracker';
|
||||||
|
import type { WorkflowRunner } from '@/workflow-runner';
|
||||||
import { mockLogger } from '@test/mocking';
|
import { mockLogger } from '@test/mocking';
|
||||||
|
|
||||||
jest.useFakeTimers();
|
jest.useFakeTimers();
|
||||||
|
|
||||||
describe('WaitTracker', () => {
|
describe('WaitTracker', () => {
|
||||||
|
const ownershipService = mock<OwnershipService>();
|
||||||
|
const workflowRunner = mock<WorkflowRunner>();
|
||||||
const executionRepository = mock<ExecutionRepository>();
|
const executionRepository = mock<ExecutionRepository>();
|
||||||
const multiMainSetup = mock<MultiMainSetup>();
|
const multiMainSetup = mock<MultiMainSetup>();
|
||||||
const orchestrationService = new OrchestrationService(mock(), multiMainSetup, mock());
|
const orchestrationService = new OrchestrationService(mock(), multiMainSetup, mock());
|
||||||
const instanceSettings = mock<InstanceSettings>({ isLeader: true });
|
const instanceSettings = mock<InstanceSettings>({ isLeader: true });
|
||||||
|
|
||||||
|
const project = mock<Project>({ id: 'projectId' });
|
||||||
const execution = mock<IExecutionResponse>({
|
const execution = mock<IExecutionResponse>({
|
||||||
id: '123',
|
id: '123',
|
||||||
|
finished: false,
|
||||||
waitTill: new Date(Date.now() + 1000),
|
waitTill: new Date(Date.now() + 1000),
|
||||||
|
mode: 'manual',
|
||||||
|
data: mock({
|
||||||
|
pushRef: 'push_ref',
|
||||||
|
}),
|
||||||
});
|
});
|
||||||
|
execution.workflowData = mock<IWorkflowBase>({ id: 'abcd' });
|
||||||
|
|
||||||
let waitTracker: WaitTracker;
|
let waitTracker: WaitTracker;
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
waitTracker = new WaitTracker(
|
waitTracker = new WaitTracker(
|
||||||
mockLogger(),
|
mockLogger(),
|
||||||
executionRepository,
|
executionRepository,
|
||||||
mock(),
|
ownershipService,
|
||||||
mock(),
|
workflowRunner,
|
||||||
orchestrationService,
|
orchestrationService,
|
||||||
instanceSettings,
|
instanceSettings,
|
||||||
);
|
);
|
||||||
|
@ -64,29 +77,31 @@ describe('WaitTracker', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('if execution to start', () => {
|
describe('if execution to start', () => {
|
||||||
it('if not enough time passed, should not start execution', async () => {
|
let startExecutionSpy: jest.SpyInstance<Promise<void>, [executionId: string]>;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
executionRepository.findSingleExecution.mockResolvedValue(execution);
|
||||||
executionRepository.getWaitingExecutions.mockResolvedValue([execution]);
|
executionRepository.getWaitingExecutions.mockResolvedValue([execution]);
|
||||||
|
ownershipService.getWorkflowProjectCached.mockResolvedValue(project);
|
||||||
|
|
||||||
|
startExecutionSpy = jest
|
||||||
|
.spyOn(waitTracker, 'startExecution')
|
||||||
|
.mockImplementation(async () => {});
|
||||||
|
|
||||||
waitTracker.init();
|
waitTracker.init();
|
||||||
|
});
|
||||||
|
|
||||||
executionRepository.getWaitingExecutions.mockResolvedValue([execution]);
|
it('if not enough time passed, should not start execution', async () => {
|
||||||
await waitTracker.getWaitingExecutions();
|
await waitTracker.getWaitingExecutions();
|
||||||
|
|
||||||
const startExecutionSpy = jest.spyOn(waitTracker, 'startExecution');
|
|
||||||
|
|
||||||
jest.advanceTimersByTime(100);
|
jest.advanceTimersByTime(100);
|
||||||
|
|
||||||
expect(startExecutionSpy).not.toHaveBeenCalled();
|
expect(startExecutionSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('if enough time passed, should start execution', async () => {
|
it('if enough time passed, should start execution', async () => {
|
||||||
executionRepository.getWaitingExecutions.mockResolvedValue([]);
|
|
||||||
waitTracker.init();
|
|
||||||
|
|
||||||
executionRepository.getWaitingExecutions.mockResolvedValue([execution]);
|
|
||||||
await waitTracker.getWaitingExecutions();
|
await waitTracker.getWaitingExecutions();
|
||||||
|
|
||||||
const startExecutionSpy = jest.spyOn(waitTracker, 'startExecution');
|
|
||||||
|
|
||||||
jest.advanceTimersByTime(2_000);
|
jest.advanceTimersByTime(2_000);
|
||||||
|
|
||||||
expect(startExecutionSpy).toHaveBeenCalledWith(execution.id);
|
expect(startExecutionSpy).toHaveBeenCalledWith(execution.id);
|
||||||
|
@ -100,13 +115,27 @@ describe('WaitTracker', () => {
|
||||||
waitTracker.init();
|
waitTracker.init();
|
||||||
|
|
||||||
executionRepository.findSingleExecution.mockResolvedValue(execution);
|
executionRepository.findSingleExecution.mockResolvedValue(execution);
|
||||||
waitTracker.startExecution(execution.id);
|
ownershipService.getWorkflowProjectCached.mockResolvedValue(project);
|
||||||
jest.advanceTimersByTime(5);
|
|
||||||
|
await waitTracker.startExecution(execution.id);
|
||||||
|
|
||||||
expect(executionRepository.findSingleExecution).toHaveBeenCalledWith(execution.id, {
|
expect(executionRepository.findSingleExecution).toHaveBeenCalledWith(execution.id, {
|
||||||
includeData: true,
|
includeData: true,
|
||||||
unflattenData: true,
|
unflattenData: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
expect(workflowRunner.run).toHaveBeenCalledWith(
|
||||||
|
{
|
||||||
|
executionMode: execution.mode,
|
||||||
|
executionData: execution.data,
|
||||||
|
workflowData: execution.workflowData,
|
||||||
|
projectId: project.id,
|
||||||
|
pushRef: execution.data.pushRef,
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
execution.id,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -135,8 +164,8 @@ describe('WaitTracker', () => {
|
||||||
const waitTracker = new WaitTracker(
|
const waitTracker = new WaitTracker(
|
||||||
mockLogger(),
|
mockLogger(),
|
||||||
executionRepository,
|
executionRepository,
|
||||||
mock(),
|
ownershipService,
|
||||||
mock(),
|
workflowRunner,
|
||||||
orchestrationService,
|
orchestrationService,
|
||||||
mock<InstanceSettings>({ isLeader: false }),
|
mock<InstanceSettings>({ isLeader: false }),
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,4 +1,23 @@
|
||||||
import { WorkflowHooks, type ExecutionError, type IWorkflowExecuteHooks } from 'n8n-workflow';
|
import { mock } from 'jest-mock-extended';
|
||||||
|
import { DirectedGraph, WorkflowExecute } from 'n8n-core';
|
||||||
|
import * as core from 'n8n-core';
|
||||||
|
import type {
|
||||||
|
IExecuteData,
|
||||||
|
INode,
|
||||||
|
IRun,
|
||||||
|
ITaskData,
|
||||||
|
IWaitingForExecution,
|
||||||
|
IWaitingForExecutionSource,
|
||||||
|
IWorkflowExecutionDataProcess,
|
||||||
|
StartNodeData,
|
||||||
|
} from 'n8n-workflow';
|
||||||
|
import {
|
||||||
|
Workflow,
|
||||||
|
WorkflowHooks,
|
||||||
|
type ExecutionError,
|
||||||
|
type IWorkflowExecuteHooks,
|
||||||
|
} from 'n8n-workflow';
|
||||||
|
import PCancelable from 'p-cancelable';
|
||||||
import Container from 'typedi';
|
import Container from 'typedi';
|
||||||
|
|
||||||
import { ActiveExecutions } from '@/active-executions';
|
import { ActiveExecutions } from '@/active-executions';
|
||||||
|
@ -6,6 +25,7 @@ import config from '@/config';
|
||||||
import type { User } from '@/databases/entities/user';
|
import type { User } from '@/databases/entities/user';
|
||||||
import { ExecutionNotFoundError } from '@/errors/execution-not-found-error';
|
import { ExecutionNotFoundError } from '@/errors/execution-not-found-error';
|
||||||
import { Telemetry } from '@/telemetry';
|
import { Telemetry } from '@/telemetry';
|
||||||
|
import { PermissionChecker } from '@/user-management/permission-checker';
|
||||||
import { WorkflowRunner } from '@/workflow-runner';
|
import { WorkflowRunner } from '@/workflow-runner';
|
||||||
import { mockInstance } from '@test/mocking';
|
import { mockInstance } from '@test/mocking';
|
||||||
import { createExecution } from '@test-integration/db/executions';
|
import { createExecution } from '@test-integration/db/executions';
|
||||||
|
@ -43,61 +63,138 @@ afterAll(() => {
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await testDb.truncate(['Workflow', 'SharedWorkflow']);
|
await testDb.truncate(['Workflow', 'SharedWorkflow']);
|
||||||
|
jest.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
test('processError should return early in Bull stalled edge case', async () => {
|
describe('processError', () => {
|
||||||
const workflow = await createWorkflow({}, owner);
|
test('processError should return early in Bull stalled edge case', async () => {
|
||||||
const execution = await createExecution(
|
const workflow = await createWorkflow({}, owner);
|
||||||
{
|
const execution = await createExecution(
|
||||||
status: 'success',
|
{
|
||||||
finished: true,
|
status: 'success',
|
||||||
},
|
finished: true,
|
||||||
workflow,
|
},
|
||||||
);
|
workflow,
|
||||||
config.set('executions.mode', 'queue');
|
);
|
||||||
await runner.processError(
|
config.set('executions.mode', 'queue');
|
||||||
new Error('test') as ExecutionError,
|
await runner.processError(
|
||||||
new Date(),
|
new Error('test') as ExecutionError,
|
||||||
'webhook',
|
new Date(),
|
||||||
execution.id,
|
'webhook',
|
||||||
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
|
execution.id,
|
||||||
);
|
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
|
||||||
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
|
);
|
||||||
|
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('processError should return early if the error is `ExecutionNotFoundError`', async () => {
|
||||||
|
const workflow = await createWorkflow({}, owner);
|
||||||
|
const execution = await createExecution({ status: 'success', finished: true }, workflow);
|
||||||
|
await runner.processError(
|
||||||
|
new ExecutionNotFoundError(execution.id),
|
||||||
|
new Date(),
|
||||||
|
'webhook',
|
||||||
|
execution.id,
|
||||||
|
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
|
||||||
|
);
|
||||||
|
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('processError should process error', async () => {
|
||||||
|
const workflow = await createWorkflow({}, owner);
|
||||||
|
const execution = await createExecution(
|
||||||
|
{
|
||||||
|
status: 'success',
|
||||||
|
finished: true,
|
||||||
|
},
|
||||||
|
workflow,
|
||||||
|
);
|
||||||
|
await Container.get(ActiveExecutions).add(
|
||||||
|
{ executionMode: 'webhook', workflowData: workflow },
|
||||||
|
execution.id,
|
||||||
|
);
|
||||||
|
config.set('executions.mode', 'regular');
|
||||||
|
await runner.processError(
|
||||||
|
new Error('test') as ExecutionError,
|
||||||
|
new Date(),
|
||||||
|
'webhook',
|
||||||
|
execution.id,
|
||||||
|
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
|
||||||
|
);
|
||||||
|
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test('processError should return early if the error is `ExecutionNotFoundError`', async () => {
|
describe('run', () => {
|
||||||
const workflow = await createWorkflow({}, owner);
|
it('uses recreateNodeExecutionStack to create a partial execution if a triggerToStartFrom with data is sent', async () => {
|
||||||
const execution = await createExecution({ status: 'success', finished: true }, workflow);
|
// ARRANGE
|
||||||
await runner.processError(
|
const activeExecutions = Container.get(ActiveExecutions);
|
||||||
new ExecutionNotFoundError(execution.id),
|
jest.spyOn(activeExecutions, 'add').mockResolvedValue('1');
|
||||||
new Date(),
|
jest.spyOn(activeExecutions, 'attachWorkflowExecution').mockReturnValueOnce();
|
||||||
'webhook',
|
const permissionChecker = Container.get(PermissionChecker);
|
||||||
execution.id,
|
jest.spyOn(permissionChecker, 'check').mockResolvedValueOnce();
|
||||||
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
|
|
||||||
);
|
|
||||||
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('processError should process error', async () => {
|
jest.spyOn(WorkflowExecute.prototype, 'processRunExecutionData').mockReturnValueOnce(
|
||||||
const workflow = await createWorkflow({}, owner);
|
new PCancelable(() => {
|
||||||
const execution = await createExecution(
|
return mock<IRun>();
|
||||||
{
|
}),
|
||||||
status: 'success',
|
);
|
||||||
finished: true,
|
|
||||||
},
|
jest.spyOn(Workflow.prototype, 'getNode').mockReturnValueOnce(mock<INode>());
|
||||||
workflow,
|
jest.spyOn(DirectedGraph, 'fromWorkflow').mockReturnValueOnce(new DirectedGraph());
|
||||||
);
|
const recreateNodeExecutionStackSpy = jest
|
||||||
await Container.get(ActiveExecutions).add(
|
.spyOn(core, 'recreateNodeExecutionStack')
|
||||||
{ executionMode: 'webhook', workflowData: workflow },
|
.mockReturnValueOnce({
|
||||||
execution.id,
|
nodeExecutionStack: mock<IExecuteData[]>(),
|
||||||
);
|
waitingExecution: mock<IWaitingForExecution>(),
|
||||||
config.set('executions.mode', 'regular');
|
waitingExecutionSource: mock<IWaitingForExecutionSource>(),
|
||||||
await runner.processError(
|
});
|
||||||
new Error('test') as ExecutionError,
|
|
||||||
new Date(),
|
const data = mock<IWorkflowExecutionDataProcess>({
|
||||||
'webhook',
|
triggerToStartFrom: { name: 'trigger', data: mock<ITaskData>() },
|
||||||
execution.id,
|
|
||||||
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
|
workflowData: { nodes: [] },
|
||||||
);
|
executionData: undefined,
|
||||||
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(1);
|
startNodes: [mock<StartNodeData>()],
|
||||||
|
destinationNode: undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
await runner.run(data);
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
expect(recreateNodeExecutionStackSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not use recreateNodeExecutionStack to create a partial execution if a triggerToStartFrom without data is sent', async () => {
|
||||||
|
// ARRANGE
|
||||||
|
const activeExecutions = Container.get(ActiveExecutions);
|
||||||
|
jest.spyOn(activeExecutions, 'add').mockResolvedValue('1');
|
||||||
|
jest.spyOn(activeExecutions, 'attachWorkflowExecution').mockReturnValueOnce();
|
||||||
|
const permissionChecker = Container.get(PermissionChecker);
|
||||||
|
jest.spyOn(permissionChecker, 'check').mockResolvedValueOnce();
|
||||||
|
|
||||||
|
jest.spyOn(WorkflowExecute.prototype, 'processRunExecutionData').mockReturnValueOnce(
|
||||||
|
new PCancelable(() => {
|
||||||
|
return mock<IRun>();
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const recreateNodeExecutionStackSpy = jest.spyOn(core, 'recreateNodeExecutionStack');
|
||||||
|
|
||||||
|
const data = mock<IWorkflowExecutionDataProcess>({
|
||||||
|
triggerToStartFrom: { name: 'trigger', data: undefined },
|
||||||
|
|
||||||
|
workflowData: { nodes: [] },
|
||||||
|
executionData: undefined,
|
||||||
|
startNodes: [mock<StartNodeData>()],
|
||||||
|
destinationNode: undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
await runner.run(data);
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
expect(recreateNodeExecutionStackSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -4,7 +4,7 @@ import fs from 'fs';
|
||||||
import { diff } from 'json-diff';
|
import { diff } from 'json-diff';
|
||||||
import pick from 'lodash/pick';
|
import pick from 'lodash/pick';
|
||||||
import type { IRun, ITaskData, IWorkflowExecutionDataProcess } from 'n8n-workflow';
|
import type { IRun, ITaskData, IWorkflowExecutionDataProcess } from 'n8n-workflow';
|
||||||
import { ApplicationError, jsonParse } from 'n8n-workflow';
|
import { ApplicationError, jsonParse, ErrorReporterProxy } from 'n8n-workflow';
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
import { sep } from 'path';
|
import { sep } from 'path';
|
||||||
import { Container } from 'typedi';
|
import { Container } from 'typedi';
|
||||||
|
@ -822,6 +822,11 @@ export class ExecuteBatch extends BaseCommand {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
ErrorReporterProxy.error(e, {
|
||||||
|
extra: {
|
||||||
|
workflowId: workflowData.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
executionResult.error = `Workflow failed to execute: ${(e as Error).message}`;
|
executionResult.error = `Workflow failed to execute: ${(e as Error).message}`;
|
||||||
executionResult.executionStatus = 'error';
|
executionResult.executionStatus = 'error';
|
||||||
}
|
}
|
||||||
|
|
|
@ -405,4 +405,11 @@ export const schema = {
|
||||||
doc: 'Set this to 1 to enable the new partial execution logic by default.',
|
doc: 'Set this to 1 to enable the new partial execution logic by default.',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
virtualSchemaView: {
|
||||||
|
doc: 'Whether to display the virtualized schema view',
|
||||||
|
format: Boolean,
|
||||||
|
default: false,
|
||||||
|
env: 'N8N_VIRTUAL_SCHEMA_VIEW',
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk';
|
import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk';
|
||||||
import type { Response } from 'express';
|
import type { Response } from 'express';
|
||||||
import { ErrorReporterProxy } from 'n8n-workflow';
|
|
||||||
import { strict as assert } from 'node:assert';
|
import { strict as assert } from 'node:assert';
|
||||||
import { WritableStream } from 'node:stream/web';
|
import { WritableStream } from 'node:stream/web';
|
||||||
|
|
||||||
|
@ -33,8 +32,7 @@ export class AiController {
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
assert(e instanceof Error);
|
assert(e instanceof Error);
|
||||||
ErrorReporterProxy.error(e);
|
throw new InternalServerError(e.message, e);
|
||||||
throw new InternalServerError(`Something went wrong: ${e.message}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,8 +44,7 @@ export class AiController {
|
||||||
return await this.aiService.applySuggestion(req.body, req.user);
|
return await this.aiService.applySuggestion(req.body, req.user);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
assert(e instanceof Error);
|
assert(e instanceof Error);
|
||||||
ErrorReporterProxy.error(e);
|
throw new InternalServerError(e.message, e);
|
||||||
throw new InternalServerError(`Something went wrong: ${e.message}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,8 +54,7 @@ export class AiController {
|
||||||
return await this.aiService.askAi(req.body, req.user);
|
return await this.aiService.askAi(req.body, req.user);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
assert(e instanceof Error);
|
assert(e instanceof Error);
|
||||||
ErrorReporterProxy.error(e);
|
throw new InternalServerError(e.message, e);
|
||||||
throw new InternalServerError(`Something went wrong: ${e.message}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,7 @@ export class AuthController {
|
||||||
/** Log in a user */
|
/** Log in a user */
|
||||||
@Post('/login', { skipAuth: true, rateLimit: true })
|
@Post('/login', { skipAuth: true, rateLimit: true })
|
||||||
async login(req: LoginRequest, res: Response): Promise<PublicUser | undefined> {
|
async login(req: LoginRequest, res: Response): Promise<PublicUser | undefined> {
|
||||||
const { email, password, mfaToken, mfaRecoveryCode } = req.body;
|
const { email, password, mfaCode, mfaRecoveryCode } = req.body;
|
||||||
if (!email) throw new ApplicationError('Email is required to log in');
|
if (!email) throw new ApplicationError('Email is required to log in');
|
||||||
if (!password) throw new ApplicationError('Password is required to log in');
|
if (!password) throw new ApplicationError('Password is required to log in');
|
||||||
|
|
||||||
|
@ -75,16 +75,16 @@ export class AuthController {
|
||||||
|
|
||||||
if (user) {
|
if (user) {
|
||||||
if (user.mfaEnabled) {
|
if (user.mfaEnabled) {
|
||||||
if (!mfaToken && !mfaRecoveryCode) {
|
if (!mfaCode && !mfaRecoveryCode) {
|
||||||
throw new AuthError('MFA Error', 998);
|
throw new AuthError('MFA Error', 998);
|
||||||
}
|
}
|
||||||
|
|
||||||
const isMFATokenValid = await this.mfaService.validateMfa(
|
const isMfaCodeOrMfaRecoveryCodeValid = await this.mfaService.validateMfa(
|
||||||
user.id,
|
user.id,
|
||||||
mfaToken,
|
mfaCode,
|
||||||
mfaRecoveryCode,
|
mfaRecoveryCode,
|
||||||
);
|
);
|
||||||
if (!isMFATokenValid) {
|
if (!isMfaCodeOrMfaRecoveryCodeValid) {
|
||||||
throw new AuthError('Invalid mfa token or recovery code');
|
throw new AuthError('Invalid mfa token or recovery code');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -201,7 +201,7 @@ export class CommunityPackagesController {
|
||||||
error instanceof Error ? error.message : UNKNOWN_FAILURE_REASON,
|
error instanceof Error ? error.message : UNKNOWN_FAILURE_REASON,
|
||||||
].join(':');
|
].join(':');
|
||||||
|
|
||||||
throw new InternalServerError(message);
|
throw new InternalServerError(message, error);
|
||||||
}
|
}
|
||||||
|
|
||||||
// broadcast to connected frontends that node list has been updated
|
// broadcast to connected frontends that node list has been updated
|
||||||
|
@ -283,7 +283,7 @@ export class CommunityPackagesController {
|
||||||
error instanceof Error ? error.message : UNKNOWN_FAILURE_REASON,
|
error instanceof Error ? error.message : UNKNOWN_FAILURE_REASON,
|
||||||
].join(':');
|
].join(':');
|
||||||
|
|
||||||
throw new InternalServerError(message);
|
throw new InternalServerError(message, error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,8 +68,8 @@ export class MeController {
|
||||||
throw new BadRequestError('Two-factor code is required to change email');
|
throw new BadRequestError('Two-factor code is required to change email');
|
||||||
}
|
}
|
||||||
|
|
||||||
const isMfaTokenValid = await this.mfaService.validateMfa(userId, payload.mfaCode, undefined);
|
const isMfaCodeValid = await this.mfaService.validateMfa(userId, payload.mfaCode, undefined);
|
||||||
if (!isMfaTokenValid) {
|
if (!isMfaCodeValid) {
|
||||||
throw new InvalidMfaCodeError();
|
throw new InvalidMfaCodeError();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -142,8 +142,8 @@ export class MeController {
|
||||||
throw new BadRequestError('Two-factor code is required to change password.');
|
throw new BadRequestError('Two-factor code is required to change password.');
|
||||||
}
|
}
|
||||||
|
|
||||||
const isMfaTokenValid = await this.mfaService.validateMfa(user.id, mfaCode, undefined);
|
const isMfaCodeValid = await this.mfaService.validateMfa(user.id, mfaCode, undefined);
|
||||||
if (!isMfaTokenValid) {
|
if (!isMfaCodeValid) {
|
||||||
throw new InvalidMfaCodeError();
|
throw new InvalidMfaCodeError();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,7 @@ export class MFAController {
|
||||||
|
|
||||||
@Post('/enable', { rateLimit: true })
|
@Post('/enable', { rateLimit: true })
|
||||||
async activateMFA(req: MFA.Activate) {
|
async activateMFA(req: MFA.Activate) {
|
||||||
const { token = null } = req.body;
|
const { mfaCode = null } = req.body;
|
||||||
const { id, mfaEnabled } = req.user;
|
const { id, mfaEnabled } = req.user;
|
||||||
|
|
||||||
await this.externalHooks.run('mfa.beforeSetup', [req.user]);
|
await this.externalHooks.run('mfa.beforeSetup', [req.user]);
|
||||||
|
@ -67,7 +67,7 @@ export class MFAController {
|
||||||
const { decryptedSecret: secret, decryptedRecoveryCodes: recoveryCodes } =
|
const { decryptedSecret: secret, decryptedRecoveryCodes: recoveryCodes } =
|
||||||
await this.mfaService.getSecretAndRecoveryCodes(id);
|
await this.mfaService.getSecretAndRecoveryCodes(id);
|
||||||
|
|
||||||
if (!token) throw new BadRequestError('Token is required to enable MFA feature');
|
if (!mfaCode) throw new BadRequestError('Token is required to enable MFA feature');
|
||||||
|
|
||||||
if (mfaEnabled) throw new BadRequestError('MFA already enabled');
|
if (mfaEnabled) throw new BadRequestError('MFA already enabled');
|
||||||
|
|
||||||
|
@ -75,10 +75,10 @@ export class MFAController {
|
||||||
throw new BadRequestError('Cannot enable MFA without generating secret and recovery codes');
|
throw new BadRequestError('Cannot enable MFA without generating secret and recovery codes');
|
||||||
}
|
}
|
||||||
|
|
||||||
const verified = this.mfaService.totp.verifySecret({ secret, token, window: 10 });
|
const verified = this.mfaService.totp.verifySecret({ secret, mfaCode, window: 10 });
|
||||||
|
|
||||||
if (!verified)
|
if (!verified)
|
||||||
throw new BadRequestError('MFA token expired. Close the modal and enable MFA again', 997);
|
throw new BadRequestError('MFA code expired. Close the modal and enable MFA again', 997);
|
||||||
|
|
||||||
await this.mfaService.enableMfa(id);
|
await this.mfaService.enableMfa(id);
|
||||||
}
|
}
|
||||||
|
@ -86,27 +86,38 @@ export class MFAController {
|
||||||
@Post('/disable', { rateLimit: true })
|
@Post('/disable', { rateLimit: true })
|
||||||
async disableMFA(req: MFA.Disable) {
|
async disableMFA(req: MFA.Disable) {
|
||||||
const { id: userId } = req.user;
|
const { id: userId } = req.user;
|
||||||
const { token = null } = req.body;
|
|
||||||
|
|
||||||
if (typeof token !== 'string' || !token) {
|
const { mfaCode, mfaRecoveryCode } = req.body;
|
||||||
throw new BadRequestError('Token is required to disable MFA feature');
|
|
||||||
|
const mfaCodeDefined = mfaCode && typeof mfaCode === 'string';
|
||||||
|
|
||||||
|
const mfaRecoveryCodeDefined = mfaRecoveryCode && typeof mfaRecoveryCode === 'string';
|
||||||
|
|
||||||
|
if (!mfaCodeDefined === !mfaRecoveryCodeDefined) {
|
||||||
|
throw new BadRequestError(
|
||||||
|
'Either MFA code or recovery code is required to disable MFA feature',
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.mfaService.disableMfa(userId, token);
|
if (mfaCodeDefined) {
|
||||||
|
await this.mfaService.disableMfaWithMfaCode(userId, mfaCode);
|
||||||
|
} else if (mfaRecoveryCodeDefined) {
|
||||||
|
await this.mfaService.disableMfaWithRecoveryCode(userId, mfaRecoveryCode);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Post('/verify', { rateLimit: true })
|
@Post('/verify', { rateLimit: true })
|
||||||
async verifyMFA(req: MFA.Verify) {
|
async verifyMFA(req: MFA.Verify) {
|
||||||
const { id } = req.user;
|
const { id } = req.user;
|
||||||
const { token } = req.body;
|
const { mfaCode } = req.body;
|
||||||
|
|
||||||
const { decryptedSecret: secret } = await this.mfaService.getSecretAndRecoveryCodes(id);
|
const { decryptedSecret: secret } = await this.mfaService.getSecretAndRecoveryCodes(id);
|
||||||
|
|
||||||
if (!token) throw new BadRequestError('Token is required to enable MFA feature');
|
if (!mfaCode) throw new BadRequestError('MFA code is required to enable MFA feature');
|
||||||
|
|
||||||
if (!secret) throw new BadRequestError('No MFA secret se for this user');
|
if (!secret) throw new BadRequestError('No MFA secret se for this user');
|
||||||
|
|
||||||
const verified = this.mfaService.totp.verifySecret({ secret, token });
|
const verified = this.mfaService.totp.verifySecret({ secret, mfaCode });
|
||||||
|
|
||||||
if (!verified) throw new BadRequestError('MFA secret could not be verified');
|
if (!verified) throw new BadRequestError('MFA secret could not be verified');
|
||||||
}
|
}
|
||||||
|
|
|
@ -120,7 +120,7 @@ export class PasswordResetController {
|
||||||
publicApi: false,
|
publicApi: false,
|
||||||
});
|
});
|
||||||
if (error instanceof Error) {
|
if (error instanceof Error) {
|
||||||
throw new InternalServerError(`Please contact your administrator: ${error.message}`);
|
throw new InternalServerError(`Please contact your administrator: ${error.message}`, error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,7 +171,7 @@ export class PasswordResetController {
|
||||||
*/
|
*/
|
||||||
@Post('/change-password', { skipAuth: true })
|
@Post('/change-password', { skipAuth: true })
|
||||||
async changePassword(req: PasswordResetRequest.NewPassword, res: Response) {
|
async changePassword(req: PasswordResetRequest.NewPassword, res: Response) {
|
||||||
const { token, password, mfaToken } = req.body;
|
const { token, password, mfaCode } = req.body;
|
||||||
|
|
||||||
if (!token || !password) {
|
if (!token || !password) {
|
||||||
this.logger.debug(
|
this.logger.debug(
|
||||||
|
@ -189,11 +189,11 @@ export class PasswordResetController {
|
||||||
if (!user) throw new NotFoundError('');
|
if (!user) throw new NotFoundError('');
|
||||||
|
|
||||||
if (user.mfaEnabled) {
|
if (user.mfaEnabled) {
|
||||||
if (!mfaToken) throw new BadRequestError('If MFA enabled, mfaToken is required.');
|
if (!mfaCode) throw new BadRequestError('If MFA enabled, mfaCode is required.');
|
||||||
|
|
||||||
const { decryptedSecret: secret } = await this.mfaService.getSecretAndRecoveryCodes(user.id);
|
const { decryptedSecret: secret } = await this.mfaService.getSecretAndRecoveryCodes(user.id);
|
||||||
|
|
||||||
const validToken = this.mfaService.totp.verifySecret({ secret, token: mfaToken });
|
const validToken = this.mfaService.totp.verifySecret({ secret, mfaCode });
|
||||||
|
|
||||||
if (!validToken) throw new BadRequestError('Invalid MFA token.');
|
if (!validToken) throw new BadRequestError('Invalid MFA token.');
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ export class TranslationController {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||||
return require(NODE_HEADERS_PATH);
|
return require(NODE_HEADERS_PATH);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new InternalServerError('Failed to load headers file');
|
throw new InternalServerError('Failed to load headers file', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,7 @@ export class PurgeInvalidWorkflowConnections1675940580449 implements Irreversibl
|
||||||
|
|
||||||
// It filters out all connections that are connected to a node that cannot receive input
|
// It filters out all connections that are connected to a node that cannot receive input
|
||||||
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
|
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
|
||||||
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
|
outputConnection[outputConnectionItemIdx] = (outputConnectionItem ?? []).filter(
|
||||||
(outgoingConnections) =>
|
(outgoingConnections) =>
|
||||||
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
|
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
|
||||||
);
|
);
|
||||||
|
|
|
@ -90,6 +90,17 @@ export const initErrorHandling = async () => {
|
||||||
if (tags) event.tags = { ...event.tags, ...tags };
|
if (tags) event.tags = { ...event.tags, ...tags };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
originalException instanceof Error &&
|
||||||
|
'cause' in originalException &&
|
||||||
|
originalException.cause instanceof Error &&
|
||||||
|
'level' in originalException.cause &&
|
||||||
|
originalException.cause.level === 'warning'
|
||||||
|
) {
|
||||||
|
// handle underlying errors propagating from dependencies like ai-assistant-sdk
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
if (originalException instanceof Error && originalException.stack) {
|
if (originalException instanceof Error && originalException.stack) {
|
||||||
const eventHash = createHash('sha1').update(originalException.stack).digest('base64');
|
const eventHash = createHash('sha1').update(originalException.stack).digest('base64');
|
||||||
if (seenErrors.has(eventHash)) return null;
|
if (seenErrors.has(eventHash)) return null;
|
||||||
|
|
|
@ -16,8 +16,9 @@ export abstract class ResponseError extends ApplicationError {
|
||||||
readonly errorCode: number = httpStatusCode,
|
readonly errorCode: number = httpStatusCode,
|
||||||
// The error hint the response
|
// The error hint the response
|
||||||
readonly hint: string | undefined = undefined,
|
readonly hint: string | undefined = undefined,
|
||||||
|
cause?: unknown,
|
||||||
) {
|
) {
|
||||||
super(message);
|
super(message, { cause });
|
||||||
this.name = 'ResponseError';
|
this.name = 'ResponseError';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { ResponseError } from './abstract/response.error';
|
import { ResponseError } from './abstract/response.error';
|
||||||
|
|
||||||
export class InternalServerError extends ResponseError {
|
export class InternalServerError extends ResponseError {
|
||||||
constructor(message: string, errorCode = 500) {
|
constructor(message: string, cause?: unknown) {
|
||||||
super(message, 500, errorCode);
|
super(message, 500, 500, undefined, cause);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
import { ForbiddenError } from './forbidden.error';
|
||||||
|
|
||||||
|
export class InvalidMfaRecoveryCodeError extends ForbiddenError {
|
||||||
|
constructor(hint?: string) {
|
||||||
|
super('Invalid MFA recovery code', hint);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
import { readFileSync } from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
import { createPinData } from '../utils.ee';
|
||||||
|
|
||||||
|
const wfUnderTestJson = JSON.parse(
|
||||||
|
readFileSync(path.join(__dirname, './mock-data/workflow.under-test.json'), { encoding: 'utf-8' }),
|
||||||
|
);
|
||||||
|
|
||||||
|
const executionDataJson = JSON.parse(
|
||||||
|
readFileSync(path.join(__dirname, './mock-data/execution-data.json'), { encoding: 'utf-8' }),
|
||||||
|
);
|
||||||
|
|
||||||
|
describe('createPinData', () => {
|
||||||
|
test('should create pin data from past execution data', () => {
|
||||||
|
const pinData = createPinData(wfUnderTestJson, executionDataJson);
|
||||||
|
|
||||||
|
expect(pinData).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
'When clicking ‘Test workflow’': expect.anything(),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,72 @@
|
||||||
|
import { EvaluationMetrics } from '../evaluation-metrics.ee';
|
||||||
|
|
||||||
|
describe('EvaluationMetrics', () => {
|
||||||
|
test('should aggregate metrics correctly', () => {
|
||||||
|
const testMetricNames = new Set(['metric1', 'metric2']);
|
||||||
|
const metrics = new EvaluationMetrics(testMetricNames);
|
||||||
|
|
||||||
|
metrics.addResults({ metric1: 1, metric2: 0 });
|
||||||
|
metrics.addResults({ metric1: 0.5, metric2: 0.2 });
|
||||||
|
|
||||||
|
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||||
|
|
||||||
|
expect(aggregatedMetrics).toEqual({ metric1: 0.75, metric2: 0.1 });
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should aggregate only numbers', () => {
|
||||||
|
const testMetricNames = new Set(['metric1', 'metric2']);
|
||||||
|
const metrics = new EvaluationMetrics(testMetricNames);
|
||||||
|
|
||||||
|
metrics.addResults({ metric1: 1, metric2: 0 });
|
||||||
|
metrics.addResults({ metric1: '0.5', metric2: 0.2 });
|
||||||
|
metrics.addResults({ metric1: 'not a number', metric2: [1, 2, 3] });
|
||||||
|
|
||||||
|
const aggregatedUpMetrics = metrics.getAggregatedMetrics();
|
||||||
|
|
||||||
|
expect(aggregatedUpMetrics).toEqual({ metric1: 1, metric2: 0.1 });
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle missing values', () => {
|
||||||
|
const testMetricNames = new Set(['metric1', 'metric2']);
|
||||||
|
const metrics = new EvaluationMetrics(testMetricNames);
|
||||||
|
|
||||||
|
metrics.addResults({ metric1: 1 });
|
||||||
|
metrics.addResults({ metric2: 0.2 });
|
||||||
|
|
||||||
|
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||||
|
|
||||||
|
expect(aggregatedMetrics).toEqual({ metric1: 1, metric2: 0.2 });
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle empty metrics', () => {
|
||||||
|
const testMetricNames = new Set(['metric1', 'metric2']);
|
||||||
|
const metrics = new EvaluationMetrics(testMetricNames);
|
||||||
|
|
||||||
|
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||||
|
|
||||||
|
expect(aggregatedMetrics).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle empty testMetrics', () => {
|
||||||
|
const metrics = new EvaluationMetrics(new Set());
|
||||||
|
|
||||||
|
metrics.addResults({ metric1: 1, metric2: 0 });
|
||||||
|
metrics.addResults({ metric1: 0.5, metric2: 0.2 });
|
||||||
|
|
||||||
|
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||||
|
|
||||||
|
expect(aggregatedMetrics).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should ignore non-relevant values', () => {
|
||||||
|
const testMetricNames = new Set(['metric1']);
|
||||||
|
const metrics = new EvaluationMetrics(testMetricNames);
|
||||||
|
|
||||||
|
metrics.addResults({ metric1: 1, notRelevant: 0 });
|
||||||
|
metrics.addResults({ metric1: 0.5, notRelevant2: { foo: 'bar' } });
|
||||||
|
|
||||||
|
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||||
|
|
||||||
|
expect(aggregatedMetrics).toEqual({ metric1: 0.75 });
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,40 @@
|
||||||
|
import { readFileSync } from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
import { getPastExecutionStartNode } from '../utils.ee';
|
||||||
|
|
||||||
|
const executionDataJson = JSON.parse(
|
||||||
|
readFileSync(path.join(__dirname, './mock-data/execution-data.json'), { encoding: 'utf-8' }),
|
||||||
|
);
|
||||||
|
|
||||||
|
const executionDataMultipleTriggersJson = JSON.parse(
|
||||||
|
readFileSync(path.join(__dirname, './mock-data/execution-data.multiple-triggers.json'), {
|
||||||
|
encoding: 'utf-8',
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const executionDataMultipleTriggersJson2 = JSON.parse(
|
||||||
|
readFileSync(path.join(__dirname, './mock-data/execution-data.multiple-triggers-2.json'), {
|
||||||
|
encoding: 'utf-8',
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
describe('getPastExecutionStartNode', () => {
|
||||||
|
test('should return the start node of the past execution', () => {
|
||||||
|
const startNode = getPastExecutionStartNode(executionDataJson);
|
||||||
|
|
||||||
|
expect(startNode).toEqual('When clicking ‘Test workflow’');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return the start node of the past execution with multiple triggers', () => {
|
||||||
|
const startNode = getPastExecutionStartNode(executionDataMultipleTriggersJson);
|
||||||
|
|
||||||
|
expect(startNode).toEqual('When clicking ‘Test workflow’');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return the start node of the past execution with multiple triggers - chat trigger', () => {
|
||||||
|
const startNode = getPastExecutionStartNode(executionDataMultipleTriggersJson2);
|
||||||
|
|
||||||
|
expect(startNode).toEqual('When chat message received');
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,95 @@
|
||||||
|
{
|
||||||
|
"startData": {},
|
||||||
|
"resultData": {
|
||||||
|
"runData": {
|
||||||
|
"When chat message received": [
|
||||||
|
{
|
||||||
|
"startTime": 1732882447976,
|
||||||
|
"executionTime": 0,
|
||||||
|
"executionStatus": "success",
|
||||||
|
"data": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"json": {
|
||||||
|
"sessionId": "192c5b3c0b0642d68eab1a747a59cb6e",
|
||||||
|
"action": "sendMessage",
|
||||||
|
"chatInput": "hey"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"source": [null]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"NoOp": [
|
||||||
|
{
|
||||||
|
"hints": [],
|
||||||
|
"startTime": 1732882448034,
|
||||||
|
"executionTime": 0,
|
||||||
|
"source": [
|
||||||
|
{
|
||||||
|
"previousNode": "When clicking ‘Test workflow’"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"executionStatus": "success",
|
||||||
|
"data": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"json": {
|
||||||
|
"sessionId": "192c5b3c0b0642d68eab1a747a59cb6e",
|
||||||
|
"action": "sendMessage",
|
||||||
|
"chatInput": "hey"
|
||||||
|
},
|
||||||
|
"pairedItem": {
|
||||||
|
"item": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"NoOp2": [
|
||||||
|
{
|
||||||
|
"hints": [],
|
||||||
|
"startTime": 1732882448037,
|
||||||
|
"executionTime": 0,
|
||||||
|
"source": [
|
||||||
|
{
|
||||||
|
"previousNode": "NoOp"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"executionStatus": "success",
|
||||||
|
"data": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"json": {
|
||||||
|
"sessionId": "192c5b3c0b0642d68eab1a747a59cb6e",
|
||||||
|
"action": "sendMessage",
|
||||||
|
"chatInput": "hey"
|
||||||
|
},
|
||||||
|
"pairedItem": {
|
||||||
|
"item": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pinData": {},
|
||||||
|
"lastNodeExecuted": "NoOp2"
|
||||||
|
},
|
||||||
|
"executionData": {
|
||||||
|
"contextData": {},
|
||||||
|
"nodeExecutionStack": [],
|
||||||
|
"metadata": {},
|
||||||
|
"waitingExecution": {},
|
||||||
|
"waitingExecutionSource": {}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
{
|
||||||
|
"startData": {},
|
||||||
|
"resultData": {
|
||||||
|
"runData": {
|
||||||
|
"When clicking ‘Test workflow’": [
|
||||||
|
{
|
||||||
|
"hints": [],
|
||||||
|
"startTime": 1732882424975,
|
||||||
|
"executionTime": 0,
|
||||||
|
"source": [],
|
||||||
|
"executionStatus": "success",
|
||||||
|
"data": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"json": {},
|
||||||
|
"pairedItem": {
|
||||||
|
"item": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"NoOp": [
|
||||||
|
{
|
||||||
|
"hints": [],
|
||||||
|
"startTime": 1732882424977,
|
||||||
|
"executionTime": 1,
|
||||||
|
"source": [
|
||||||
|
{
|
||||||
|
"previousNode": "When clicking ‘Test workflow’"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"executionStatus": "success",
|
||||||
|
"data": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"json": {},
|
||||||
|
"pairedItem": {
|
||||||
|
"item": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"NoOp2": [
|
||||||
|
{
|
||||||
|
"hints": [],
|
||||||
|
"startTime": 1732882424978,
|
||||||
|
"executionTime": 0,
|
||||||
|
"source": [
|
||||||
|
{
|
||||||
|
"previousNode": "NoOp"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"executionStatus": "success",
|
||||||
|
"data": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"json": {},
|
||||||
|
"pairedItem": {
|
||||||
|
"item": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"pinData": {},
|
||||||
|
"lastNodeExecuted": "NoOp2"
|
||||||
|
},
|
||||||
|
"executionData": {
|
||||||
|
"contextData": {},
|
||||||
|
"nodeExecutionStack": [],
|
||||||
|
"metadata": {},
|
||||||
|
"waitingExecution": {},
|
||||||
|
"waitingExecutionSource": {}
|
||||||
|
}
|
||||||
|
}
|
|
@ -57,6 +57,12 @@
|
||||||
"name": "success",
|
"name": "success",
|
||||||
"value": true,
|
"value": true,
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "877d1bf8-31a7-4571-9293-a6837b51d22b",
|
||||||
|
"name": "metric1",
|
||||||
|
"value": 0.1,
|
||||||
|
"type": "number"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
{
|
||||||
|
"name": "Multiple Triggers Workflow",
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"parameters": {},
|
||||||
|
"type": "n8n-nodes-base.manualTrigger",
|
||||||
|
"typeVersion": 1,
|
||||||
|
"position": [-20, -120],
|
||||||
|
"id": "19562c2d-d2c8-45c8-ae0a-1b1effe29817",
|
||||||
|
"name": "When clicking ‘Test workflow’"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameters": {
|
||||||
|
"options": {}
|
||||||
|
},
|
||||||
|
"type": "@n8n/n8n-nodes-langchain.chatTrigger",
|
||||||
|
"typeVersion": 1.1,
|
||||||
|
"position": [-20, 120],
|
||||||
|
"id": "9b4b833b-56f6-4099-9b7d-5e94b75a735c",
|
||||||
|
"name": "When chat message received",
|
||||||
|
"webhookId": "8aeccd03-d45f-48d2-a2c7-1fb8c53d2ad7"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameters": {},
|
||||||
|
"type": "n8n-nodes-base.noOp",
|
||||||
|
"typeVersion": 1,
|
||||||
|
"position": [260, -20],
|
||||||
|
"id": "d3ab7426-11e7-4f42-9a57-11b8de019783",
|
||||||
|
"name": "NoOp"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameters": {},
|
||||||
|
"type": "n8n-nodes-base.noOp",
|
||||||
|
"typeVersion": 1,
|
||||||
|
"position": [480, -20],
|
||||||
|
"id": "fb73bed6-ec2a-4283-b564-c96730b94889",
|
||||||
|
"name": "NoOp2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"connections": {
|
||||||
|
"When clicking ‘Test workflow’": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"node": "NoOp",
|
||||||
|
"type": "main",
|
||||||
|
"index": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"When chat message received": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"node": "NoOp",
|
||||||
|
"type": "main",
|
||||||
|
"index": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"NoOp": {
|
||||||
|
"main": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"node": "NoOp2",
|
||||||
|
"type": "main",
|
||||||
|
"index": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pinData": {}
|
||||||
|
}
|
|
@ -2,15 +2,17 @@ import type { SelectQueryBuilder } from '@n8n/typeorm';
|
||||||
import { stringify } from 'flatted';
|
import { stringify } from 'flatted';
|
||||||
import { readFileSync } from 'fs';
|
import { readFileSync } from 'fs';
|
||||||
import { mock, mockDeep } from 'jest-mock-extended';
|
import { mock, mockDeep } from 'jest-mock-extended';
|
||||||
import type { IRun } from 'n8n-workflow';
|
import type { GenericValue, IRun } from 'n8n-workflow';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
import type { ActiveExecutions } from '@/active-executions';
|
import type { ActiveExecutions } from '@/active-executions';
|
||||||
import type { ExecutionEntity } from '@/databases/entities/execution-entity';
|
import type { ExecutionEntity } from '@/databases/entities/execution-entity';
|
||||||
import type { TestDefinition } from '@/databases/entities/test-definition.ee';
|
import type { TestDefinition } from '@/databases/entities/test-definition.ee';
|
||||||
|
import type { TestMetric } from '@/databases/entities/test-metric.ee';
|
||||||
import type { TestRun } from '@/databases/entities/test-run.ee';
|
import type { TestRun } from '@/databases/entities/test-run.ee';
|
||||||
import type { User } from '@/databases/entities/user';
|
import type { User } from '@/databases/entities/user';
|
||||||
import type { ExecutionRepository } from '@/databases/repositories/execution.repository';
|
import type { ExecutionRepository } from '@/databases/repositories/execution.repository';
|
||||||
|
import type { TestMetricRepository } from '@/databases/repositories/test-metric.repository.ee';
|
||||||
import type { TestRunRepository } from '@/databases/repositories/test-run.repository.ee';
|
import type { TestRunRepository } from '@/databases/repositories/test-run.repository.ee';
|
||||||
import type { WorkflowRepository } from '@/databases/repositories/workflow.repository';
|
import type { WorkflowRepository } from '@/databases/repositories/workflow.repository';
|
||||||
import type { WorkflowRunner } from '@/workflow-runner';
|
import type { WorkflowRunner } from '@/workflow-runner';
|
||||||
|
@ -58,12 +60,38 @@ function mockExecutionData() {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function mockEvaluationExecutionData(metrics: Record<string, GenericValue>) {
|
||||||
|
return mock<IRun>({
|
||||||
|
data: {
|
||||||
|
resultData: {
|
||||||
|
lastNodeExecuted: 'lastNode',
|
||||||
|
runData: {
|
||||||
|
lastNode: [
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
main: [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
json: metrics,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
describe('TestRunnerService', () => {
|
describe('TestRunnerService', () => {
|
||||||
const executionRepository = mock<ExecutionRepository>();
|
const executionRepository = mock<ExecutionRepository>();
|
||||||
const workflowRepository = mock<WorkflowRepository>();
|
const workflowRepository = mock<WorkflowRepository>();
|
||||||
const workflowRunner = mock<WorkflowRunner>();
|
const workflowRunner = mock<WorkflowRunner>();
|
||||||
const activeExecutions = mock<ActiveExecutions>();
|
const activeExecutions = mock<ActiveExecutions>();
|
||||||
const testRunRepository = mock<TestRunRepository>();
|
const testRunRepository = mock<TestRunRepository>();
|
||||||
|
const testMetricRepository = mock<TestMetricRepository>();
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
const executionsQbMock = mockDeep<SelectQueryBuilder<ExecutionEntity>>({
|
const executionsQbMock = mockDeep<SelectQueryBuilder<ExecutionEntity>>({
|
||||||
|
@ -80,6 +108,11 @@ describe('TestRunnerService', () => {
|
||||||
.mockResolvedValueOnce(executionMocks[1]);
|
.mockResolvedValueOnce(executionMocks[1]);
|
||||||
|
|
||||||
testRunRepository.createTestRun.mockResolvedValue(mock<TestRun>({ id: 'test-run-id' }));
|
testRunRepository.createTestRun.mockResolvedValue(mock<TestRun>({ id: 'test-run-id' }));
|
||||||
|
|
||||||
|
testMetricRepository.find.mockResolvedValue([
|
||||||
|
mock<TestMetric>({ name: 'metric1' }),
|
||||||
|
mock<TestMetric>({ name: 'metric2' }),
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
|
@ -97,6 +130,7 @@ describe('TestRunnerService', () => {
|
||||||
executionRepository,
|
executionRepository,
|
||||||
activeExecutions,
|
activeExecutions,
|
||||||
testRunRepository,
|
testRunRepository,
|
||||||
|
testMetricRepository,
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(testRunnerService).toBeInstanceOf(TestRunnerService);
|
expect(testRunnerService).toBeInstanceOf(TestRunnerService);
|
||||||
|
@ -109,6 +143,7 @@ describe('TestRunnerService', () => {
|
||||||
executionRepository,
|
executionRepository,
|
||||||
activeExecutions,
|
activeExecutions,
|
||||||
testRunRepository,
|
testRunRepository,
|
||||||
|
testMetricRepository,
|
||||||
);
|
);
|
||||||
|
|
||||||
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
|
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
|
||||||
|
@ -143,6 +178,7 @@ describe('TestRunnerService', () => {
|
||||||
executionRepository,
|
executionRepository,
|
||||||
activeExecutions,
|
activeExecutions,
|
||||||
testRunRepository,
|
testRunRepository,
|
||||||
|
testMetricRepository,
|
||||||
);
|
);
|
||||||
|
|
||||||
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
|
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
|
||||||
|
@ -166,17 +202,17 @@ describe('TestRunnerService', () => {
|
||||||
.mockResolvedValue(mockExecutionData());
|
.mockResolvedValue(mockExecutionData());
|
||||||
|
|
||||||
activeExecutions.getPostExecutePromise
|
activeExecutions.getPostExecutePromise
|
||||||
.calledWith('some-execution-id-2')
|
.calledWith('some-execution-id-3')
|
||||||
.mockResolvedValue(mockExecutionData());
|
.mockResolvedValue(mockExecutionData());
|
||||||
|
|
||||||
// Mock executions of evaluation workflow
|
// Mock executions of evaluation workflow
|
||||||
activeExecutions.getPostExecutePromise
|
activeExecutions.getPostExecutePromise
|
||||||
.calledWith('some-execution-id-3')
|
.calledWith('some-execution-id-2')
|
||||||
.mockResolvedValue(mockExecutionData());
|
.mockResolvedValue(mockEvaluationExecutionData({ metric1: 1, metric2: 0 }));
|
||||||
|
|
||||||
activeExecutions.getPostExecutePromise
|
activeExecutions.getPostExecutePromise
|
||||||
.calledWith('some-execution-id-4')
|
.calledWith('some-execution-id-4')
|
||||||
.mockResolvedValue(mockExecutionData());
|
.mockResolvedValue(mockEvaluationExecutionData({ metric1: 0.5 }));
|
||||||
|
|
||||||
await testRunnerService.runTest(
|
await testRunnerService.runTest(
|
||||||
mock<User>(),
|
mock<User>(),
|
||||||
|
@ -225,7 +261,8 @@ describe('TestRunnerService', () => {
|
||||||
expect(testRunRepository.markAsRunning).toHaveBeenCalledWith('test-run-id');
|
expect(testRunRepository.markAsRunning).toHaveBeenCalledWith('test-run-id');
|
||||||
expect(testRunRepository.markAsCompleted).toHaveBeenCalledTimes(1);
|
expect(testRunRepository.markAsCompleted).toHaveBeenCalledTimes(1);
|
||||||
expect(testRunRepository.markAsCompleted).toHaveBeenCalledWith('test-run-id', {
|
expect(testRunRepository.markAsCompleted).toHaveBeenCalledWith('test-run-id', {
|
||||||
success: false,
|
metric1: 0.75,
|
||||||
|
metric2: 0,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
import type { IDataObject } from 'n8n-workflow';
|
||||||
|
|
||||||
|
export class EvaluationMetrics {
|
||||||
|
private readonly rawMetricsByName = new Map<string, number[]>();
|
||||||
|
|
||||||
|
constructor(private readonly metricNames: Set<string>) {
|
||||||
|
for (const metricName of metricNames) {
|
||||||
|
this.rawMetricsByName.set(metricName, []);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
addResults(result: IDataObject) {
|
||||||
|
for (const [metricName, metricValue] of Object.entries(result)) {
|
||||||
|
if (typeof metricValue === 'number' && this.metricNames.has(metricName)) {
|
||||||
|
this.rawMetricsByName.get(metricName)!.push(metricValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getAggregatedMetrics() {
|
||||||
|
const aggregatedMetrics: Record<string, number> = {};
|
||||||
|
|
||||||
|
for (const [metricName, metricValues] of this.rawMetricsByName.entries()) {
|
||||||
|
if (metricValues.length > 0) {
|
||||||
|
const metricSum = metricValues.reduce((acc, val) => acc + val, 0);
|
||||||
|
aggregatedMetrics[metricName] = metricSum / metricValues.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return aggregatedMetrics;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,9 +1,9 @@
|
||||||
import { parse } from 'flatted';
|
import { parse } from 'flatted';
|
||||||
import type {
|
import type {
|
||||||
IDataObject,
|
IDataObject,
|
||||||
IPinData,
|
|
||||||
IRun,
|
IRun,
|
||||||
IRunData,
|
IRunData,
|
||||||
|
IRunExecutionData,
|
||||||
IWorkflowExecutionDataProcess,
|
IWorkflowExecutionDataProcess,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
import assert from 'node:assert';
|
import assert from 'node:assert';
|
||||||
|
@ -15,12 +15,15 @@ import type { TestDefinition } from '@/databases/entities/test-definition.ee';
|
||||||
import type { User } from '@/databases/entities/user';
|
import type { User } from '@/databases/entities/user';
|
||||||
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
|
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
|
||||||
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
|
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
|
||||||
|
import { TestMetricRepository } from '@/databases/repositories/test-metric.repository.ee';
|
||||||
import { TestRunRepository } from '@/databases/repositories/test-run.repository.ee';
|
import { TestRunRepository } from '@/databases/repositories/test-run.repository.ee';
|
||||||
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
|
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
|
||||||
import type { IExecutionResponse } from '@/interfaces';
|
|
||||||
import { getRunData } from '@/workflow-execute-additional-data';
|
import { getRunData } from '@/workflow-execute-additional-data';
|
||||||
import { WorkflowRunner } from '@/workflow-runner';
|
import { WorkflowRunner } from '@/workflow-runner';
|
||||||
|
|
||||||
|
import { EvaluationMetrics } from './evaluation-metrics.ee';
|
||||||
|
import { createPinData, getPastExecutionStartNode } from './utils.ee';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This service orchestrates the running of test cases.
|
* This service orchestrates the running of test cases.
|
||||||
* It uses the test definitions to find
|
* It uses the test definitions to find
|
||||||
|
@ -39,45 +42,33 @@ export class TestRunnerService {
|
||||||
private readonly executionRepository: ExecutionRepository,
|
private readonly executionRepository: ExecutionRepository,
|
||||||
private readonly activeExecutions: ActiveExecutions,
|
private readonly activeExecutions: ActiveExecutions,
|
||||||
private readonly testRunRepository: TestRunRepository,
|
private readonly testRunRepository: TestRunRepository,
|
||||||
|
private readonly testMetricRepository: TestMetricRepository,
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts the execution data from the past execution.
|
|
||||||
* Creates a pin data object from the past execution data
|
|
||||||
* for the given workflow.
|
|
||||||
* For now, it only pins trigger nodes.
|
|
||||||
*/
|
|
||||||
private createTestDataFromExecution(workflow: WorkflowEntity, execution: ExecutionEntity) {
|
|
||||||
const executionData = parse(execution.executionData.data) as IExecutionResponse['data'];
|
|
||||||
|
|
||||||
const triggerNodes = workflow.nodes.filter((node) => /trigger$/i.test(node.type));
|
|
||||||
|
|
||||||
const pinData = {} as IPinData;
|
|
||||||
|
|
||||||
for (const triggerNode of triggerNodes) {
|
|
||||||
const triggerData = executionData.resultData.runData[triggerNode.name];
|
|
||||||
if (triggerData?.[0]?.data?.main?.[0]) {
|
|
||||||
pinData[triggerNode.name] = triggerData[0]?.data?.main?.[0];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { pinData, executionData };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs a test case with the given pin data.
|
* Runs a test case with the given pin data.
|
||||||
* Waits for the workflow under test to finish execution.
|
* Waits for the workflow under test to finish execution.
|
||||||
*/
|
*/
|
||||||
private async runTestCase(
|
private async runTestCase(
|
||||||
workflow: WorkflowEntity,
|
workflow: WorkflowEntity,
|
||||||
testCasePinData: IPinData,
|
pastExecutionData: IRunExecutionData,
|
||||||
userId: string,
|
userId: string,
|
||||||
): Promise<IRun | undefined> {
|
): Promise<IRun | undefined> {
|
||||||
|
// Create pin data from the past execution data
|
||||||
|
const pinData = createPinData(workflow, pastExecutionData);
|
||||||
|
|
||||||
|
// Determine the start node of the past execution
|
||||||
|
const pastExecutionStartNode = getPastExecutionStartNode(pastExecutionData);
|
||||||
|
|
||||||
// Prepare the data to run the workflow
|
// Prepare the data to run the workflow
|
||||||
const data: IWorkflowExecutionDataProcess = {
|
const data: IWorkflowExecutionDataProcess = {
|
||||||
|
destinationNode: pastExecutionData.startData?.destinationNode,
|
||||||
|
startNodes: pastExecutionStartNode
|
||||||
|
? [{ name: pastExecutionStartNode, sourceData: null }]
|
||||||
|
: undefined,
|
||||||
executionMode: 'evaluation',
|
executionMode: 'evaluation',
|
||||||
runData: {},
|
runData: {},
|
||||||
pinData: testCasePinData,
|
pinData,
|
||||||
workflowData: workflow,
|
workflowData: workflow,
|
||||||
partialExecutionVersion: '-1',
|
partialExecutionVersion: '-1',
|
||||||
userId,
|
userId,
|
||||||
|
@ -125,6 +116,11 @@ export class TestRunnerService {
|
||||||
return await executePromise;
|
return await executePromise;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Evaluation result is the first item in the output of the last node
|
||||||
|
* executed in the evaluation workflow. Defaults to an empty object
|
||||||
|
* in case the node doesn't produce any output items.
|
||||||
|
*/
|
||||||
private extractEvaluationResult(execution: IRun): IDataObject {
|
private extractEvaluationResult(execution: IRun): IDataObject {
|
||||||
const lastNodeExecuted = execution.data.resultData.lastNodeExecuted;
|
const lastNodeExecuted = execution.data.resultData.lastNodeExecuted;
|
||||||
assert(lastNodeExecuted, 'Could not find the last node executed in evaluation workflow');
|
assert(lastNodeExecuted, 'Could not find the last node executed in evaluation workflow');
|
||||||
|
@ -136,6 +132,21 @@ export class TestRunnerService {
|
||||||
return mainConnectionData?.[0]?.json ?? {};
|
return mainConnectionData?.[0]?.json ?? {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the metrics to collect from the evaluation workflow execution results.
|
||||||
|
*/
|
||||||
|
private async getTestMetricNames(testDefinitionId: string) {
|
||||||
|
const metrics = await this.testMetricRepository.find({
|
||||||
|
where: {
|
||||||
|
testDefinition: {
|
||||||
|
id: testDefinitionId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return new Set(metrics.map((m) => m.name));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new test run for the given test definition.
|
* Creates a new test run for the given test definition.
|
||||||
*/
|
*/
|
||||||
|
@ -164,11 +175,15 @@ export class TestRunnerService {
|
||||||
.andWhere('execution.workflowId = :workflowId', { workflowId: test.workflowId })
|
.andWhere('execution.workflowId = :workflowId', { workflowId: test.workflowId })
|
||||||
.getMany();
|
.getMany();
|
||||||
|
|
||||||
|
// Get the metrics to collect from the evaluation workflow
|
||||||
|
const testMetricNames = await this.getTestMetricNames(test.id);
|
||||||
|
|
||||||
// 2. Run over all the test cases
|
// 2. Run over all the test cases
|
||||||
|
|
||||||
await this.testRunRepository.markAsRunning(testRun.id);
|
await this.testRunRepository.markAsRunning(testRun.id);
|
||||||
|
|
||||||
const metrics = [];
|
// Object to collect the results of the evaluation workflow executions
|
||||||
|
const metrics = new EvaluationMetrics(testMetricNames);
|
||||||
|
|
||||||
for (const { id: pastExecutionId } of pastExecutions) {
|
for (const { id: pastExecutionId } of pastExecutions) {
|
||||||
// Fetch past execution with data
|
// Fetch past execution with data
|
||||||
|
@ -178,11 +193,10 @@ export class TestRunnerService {
|
||||||
});
|
});
|
||||||
assert(pastExecution, 'Execution not found');
|
assert(pastExecution, 'Execution not found');
|
||||||
|
|
||||||
const testData = this.createTestDataFromExecution(workflow, pastExecution);
|
const executionData = parse(pastExecution.executionData.data) as IRunExecutionData;
|
||||||
const { pinData, executionData } = testData;
|
|
||||||
|
|
||||||
// Run the test case and wait for it to finish
|
// Run the test case and wait for it to finish
|
||||||
const testCaseExecution = await this.runTestCase(workflow, pinData, user.id);
|
const testCaseExecution = await this.runTestCase(workflow, executionData, user.id);
|
||||||
|
|
||||||
// In case of a permission check issue, the test case execution will be undefined.
|
// In case of a permission check issue, the test case execution will be undefined.
|
||||||
// Skip them and continue with the next test case
|
// Skip them and continue with the next test case
|
||||||
|
@ -205,12 +219,10 @@ export class TestRunnerService {
|
||||||
assert(evalExecution);
|
assert(evalExecution);
|
||||||
|
|
||||||
// Extract the output of the last node executed in the evaluation workflow
|
// Extract the output of the last node executed in the evaluation workflow
|
||||||
metrics.push(this.extractEvaluationResult(evalExecution));
|
metrics.addResults(this.extractEvaluationResult(evalExecution));
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: 3. Aggregate the results
|
const aggregatedMetrics = metrics.getAggregatedMetrics();
|
||||||
// Now we just set success to true if all the test cases passed
|
|
||||||
const aggregatedMetrics = { success: metrics.every((metric) => metric.success) };
|
|
||||||
|
|
||||||
await this.testRunRepository.markAsCompleted(testRun.id, aggregatedMetrics);
|
await this.testRunRepository.markAsCompleted(testRun.id, aggregatedMetrics);
|
||||||
}
|
}
|
||||||
|
|
34
packages/cli/src/evaluation/test-runner/utils.ee.ts
Normal file
34
packages/cli/src/evaluation/test-runner/utils.ee.ts
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
import type { IRunExecutionData, IPinData } from 'n8n-workflow';
|
||||||
|
|
||||||
|
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the execution data from the past execution
|
||||||
|
* and creates a pin data object from it for the given workflow.
|
||||||
|
* For now, it only pins trigger nodes.
|
||||||
|
*/
|
||||||
|
export function createPinData(workflow: WorkflowEntity, executionData: IRunExecutionData) {
|
||||||
|
const triggerNodes = workflow.nodes.filter((node) => /trigger$/i.test(node.type));
|
||||||
|
|
||||||
|
const pinData = {} as IPinData;
|
||||||
|
|
||||||
|
for (const triggerNode of triggerNodes) {
|
||||||
|
const triggerData = executionData.resultData.runData[triggerNode.name];
|
||||||
|
if (triggerData?.[0]?.data?.main?.[0]) {
|
||||||
|
pinData[triggerNode.name] = triggerData[0]?.data?.main?.[0];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pinData;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the start node of the past execution.
|
||||||
|
* The start node is the node that has no source and has run data.
|
||||||
|
*/
|
||||||
|
export function getPastExecutionStartNode(executionData: IRunExecutionData) {
|
||||||
|
return Object.keys(executionData.resultData.runData).find((nodeName) => {
|
||||||
|
const data = executionData.resultData.runData[nodeName];
|
||||||
|
return !data[0].source || data[0].source.length === 0 || data[0].source[0] === null;
|
||||||
|
});
|
||||||
|
}
|
|
@ -251,7 +251,7 @@ export class ExecutionService {
|
||||||
requestFilters = requestFiltersRaw as IGetExecutionsQueryFilter;
|
requestFilters = requestFiltersRaw as IGetExecutionsQueryFilter;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new InternalServerError('Parameter "filter" contained invalid JSON string.');
|
throw new InternalServerError('Parameter "filter" contained invalid JSON string.', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ import { v4 as uuid } from 'uuid';
|
||||||
|
|
||||||
import { AuthUserRepository } from '@/databases/repositories/auth-user.repository';
|
import { AuthUserRepository } from '@/databases/repositories/auth-user.repository';
|
||||||
import { InvalidMfaCodeError } from '@/errors/response-errors/invalid-mfa-code.error';
|
import { InvalidMfaCodeError } from '@/errors/response-errors/invalid-mfa-code.error';
|
||||||
|
import { InvalidMfaRecoveryCodeError } from '@/errors/response-errors/invalid-mfa-recovery-code-error';
|
||||||
|
|
||||||
import { TOTPService } from './totp.service';
|
import { TOTPService } from './totp.service';
|
||||||
|
|
||||||
|
@ -56,13 +57,13 @@ export class MfaService {
|
||||||
|
|
||||||
async validateMfa(
|
async validateMfa(
|
||||||
userId: string,
|
userId: string,
|
||||||
mfaToken: string | undefined,
|
mfaCode: string | undefined,
|
||||||
mfaRecoveryCode: string | undefined,
|
mfaRecoveryCode: string | undefined,
|
||||||
) {
|
) {
|
||||||
const user = await this.authUserRepository.findOneByOrFail({ id: userId });
|
const user = await this.authUserRepository.findOneByOrFail({ id: userId });
|
||||||
if (mfaToken) {
|
if (mfaCode) {
|
||||||
const decryptedSecret = this.cipher.decrypt(user.mfaSecret!);
|
const decryptedSecret = this.cipher.decrypt(user.mfaSecret!);
|
||||||
return this.totp.verifySecret({ secret: decryptedSecret, token: mfaToken });
|
return this.totp.verifySecret({ secret: decryptedSecret, mfaCode });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mfaRecoveryCode) {
|
if (mfaRecoveryCode) {
|
||||||
|
@ -85,12 +86,27 @@ export class MfaService {
|
||||||
return await this.authUserRepository.save(user);
|
return await this.authUserRepository.save(user);
|
||||||
}
|
}
|
||||||
|
|
||||||
async disableMfa(userId: string, mfaToken: string) {
|
async disableMfaWithMfaCode(userId: string, mfaCode: string) {
|
||||||
const isValidToken = await this.validateMfa(userId, mfaToken, undefined);
|
const isValidToken = await this.validateMfa(userId, mfaCode, undefined);
|
||||||
|
|
||||||
if (!isValidToken) {
|
if (!isValidToken) {
|
||||||
throw new InvalidMfaCodeError();
|
throw new InvalidMfaCodeError();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await this.disableMfaForUser(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
async disableMfaWithRecoveryCode(userId: string, recoveryCode: string) {
|
||||||
|
const isValidToken = await this.validateMfa(userId, undefined, recoveryCode);
|
||||||
|
|
||||||
|
if (!isValidToken) {
|
||||||
|
throw new InvalidMfaRecoveryCodeError();
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.disableMfaForUser(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async disableMfaForUser(userId: string) {
|
||||||
await this.authUserRepository.update(userId, {
|
await this.authUserRepository.update(userId, {
|
||||||
mfaEnabled: false,
|
mfaEnabled: false,
|
||||||
mfaSecret: null,
|
mfaSecret: null,
|
||||||
|
|
|
@ -23,10 +23,14 @@ export class TOTPService {
|
||||||
}).toString();
|
}).toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
verifySecret({ secret, token, window = 2 }: { secret: string; token: string; window?: number }) {
|
verifySecret({
|
||||||
|
secret,
|
||||||
|
mfaCode,
|
||||||
|
window = 2,
|
||||||
|
}: { secret: string; mfaCode: string; window?: number }) {
|
||||||
return new OTPAuth.TOTP({
|
return new OTPAuth.TOTP({
|
||||||
secret: OTPAuth.Secret.fromBase32(secret),
|
secret: OTPAuth.Secret.fromBase32(secret),
|
||||||
}).validate({ token, window }) === null
|
}).validate({ token: mfaCode, window }) === null
|
||||||
? false
|
? false
|
||||||
: true;
|
: true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -228,7 +228,7 @@ export declare namespace PasswordResetRequest {
|
||||||
export type NewPassword = AuthlessRequest<
|
export type NewPassword = AuthlessRequest<
|
||||||
{},
|
{},
|
||||||
{},
|
{},
|
||||||
Pick<PublicUser, 'password'> & { token?: string; userId?: string; mfaToken?: string }
|
Pick<PublicUser, 'password'> & { token?: string; userId?: string; mfaCode?: string }
|
||||||
>;
|
>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -306,7 +306,7 @@ export type LoginRequest = AuthlessRequest<
|
||||||
{
|
{
|
||||||
email: string;
|
email: string;
|
||||||
password: string;
|
password: string;
|
||||||
mfaToken?: string;
|
mfaCode?: string;
|
||||||
mfaRecoveryCode?: string;
|
mfaRecoveryCode?: string;
|
||||||
}
|
}
|
||||||
>;
|
>;
|
||||||
|
@ -316,9 +316,9 @@ export type LoginRequest = AuthlessRequest<
|
||||||
// ----------------------------------
|
// ----------------------------------
|
||||||
|
|
||||||
export declare namespace MFA {
|
export declare namespace MFA {
|
||||||
type Verify = AuthenticatedRequest<{}, {}, { token: string }, {}>;
|
type Verify = AuthenticatedRequest<{}, {}, { mfaCode: string }, {}>;
|
||||||
type Activate = AuthenticatedRequest<{}, {}, { token: string }, {}>;
|
type Activate = AuthenticatedRequest<{}, {}, { mfaCode: string }, {}>;
|
||||||
type Disable = AuthenticatedRequest<{}, {}, { token: string }, {}>;
|
type Disable = AuthenticatedRequest<{}, {}, { mfaCode?: string; mfaRecoveryCode?: string }, {}>;
|
||||||
type Config = AuthenticatedRequest<{}, {}, { login: { enabled: boolean } }, {}>;
|
type Config = AuthenticatedRequest<{}, {}, { login: { enabled: boolean } }, {}>;
|
||||||
type ValidateRecoveryCode = AuthenticatedRequest<
|
type ValidateRecoveryCode = AuthenticatedRequest<
|
||||||
{},
|
{},
|
||||||
|
|
|
@ -76,6 +76,7 @@ describe('TaskRunnerProcess', () => {
|
||||||
'N8N_VERSION',
|
'N8N_VERSION',
|
||||||
'ENVIRONMENT',
|
'ENVIRONMENT',
|
||||||
'DEPLOYMENT_NAME',
|
'DEPLOYMENT_NAME',
|
||||||
|
'GENERIC_TIMEZONE',
|
||||||
])('should propagate %s from env as is', async (envVar) => {
|
])('should propagate %s from env as is', async (envVar) => {
|
||||||
jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken');
|
jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken');
|
||||||
process.env[envVar] = 'custom value';
|
process.env[envVar] = 'custom value';
|
||||||
|
|
|
@ -54,6 +54,7 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
|
||||||
|
|
||||||
private readonly passthroughEnvVars = [
|
private readonly passthroughEnvVars = [
|
||||||
'PATH',
|
'PATH',
|
||||||
|
'GENERIC_TIMEZONE',
|
||||||
'NODE_FUNCTION_ALLOW_BUILTIN',
|
'NODE_FUNCTION_ALLOW_BUILTIN',
|
||||||
'NODE_FUNCTION_ALLOW_EXTERNAL',
|
'NODE_FUNCTION_ALLOW_EXTERNAL',
|
||||||
'N8N_SENTRY_DSN',
|
'N8N_SENTRY_DSN',
|
||||||
|
|
|
@ -119,7 +119,7 @@ export class InstanceRiskReporter implements RiskReporter {
|
||||||
node: WorkflowEntity['nodes'][number];
|
node: WorkflowEntity['nodes'][number];
|
||||||
workflow: WorkflowEntity;
|
workflow: WorkflowEntity;
|
||||||
}) {
|
}) {
|
||||||
const childNodeNames = workflow.connections[node.name]?.main[0].map((i) => i.node);
|
const childNodeNames = workflow.connections[node.name]?.main[0]?.map((i) => i.node);
|
||||||
|
|
||||||
if (!childNodeNames) return false;
|
if (!childNodeNames) return false;
|
||||||
|
|
||||||
|
|
|
@ -231,6 +231,7 @@ export class FrontendService {
|
||||||
blockFileAccessToN8nFiles: this.securityConfig.blockFileAccessToN8nFiles,
|
blockFileAccessToN8nFiles: this.securityConfig.blockFileAccessToN8nFiles,
|
||||||
},
|
},
|
||||||
betaFeatures: this.frontendConfig.betaFeatures,
|
betaFeatures: this.frontendConfig.betaFeatures,
|
||||||
|
virtualSchemaView: config.getEnv('virtualSchemaView'),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,72 +0,0 @@
|
||||||
// eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import
|
|
||||||
import { In } from '@n8n/typeorm';
|
|
||||||
import { Service } from 'typedi';
|
|
||||||
|
|
||||||
import type { User } from '@/databases/entities/user';
|
|
||||||
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
|
|
||||||
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
|
|
||||||
import { UserService } from '@/services/user.service';
|
|
||||||
|
|
||||||
@Service()
|
|
||||||
export class UserOnboardingService {
|
|
||||||
constructor(
|
|
||||||
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
|
|
||||||
private readonly workflowRepository: WorkflowRepository,
|
|
||||||
private readonly userService: UserService,
|
|
||||||
) {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if user owns more than 15 workflows or more than 2 workflows with at least 2 nodes.
|
|
||||||
* If user does, set flag in its settings.
|
|
||||||
*/
|
|
||||||
async isBelowThreshold(user: User): Promise<boolean> {
|
|
||||||
let belowThreshold = true;
|
|
||||||
const skippedTypes = ['n8n-nodes-base.start', 'n8n-nodes-base.stickyNote'];
|
|
||||||
|
|
||||||
const ownedWorkflowsIds = await this.sharedWorkflowRepository
|
|
||||||
.find({
|
|
||||||
where: {
|
|
||||||
project: {
|
|
||||||
projectRelations: {
|
|
||||||
role: 'project:personalOwner',
|
|
||||||
userId: user.id,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
role: 'workflow:owner',
|
|
||||||
},
|
|
||||||
select: ['workflowId'],
|
|
||||||
})
|
|
||||||
.then((ownedWorkflows) => ownedWorkflows.map(({ workflowId }) => workflowId));
|
|
||||||
|
|
||||||
if (ownedWorkflowsIds.length > 15) {
|
|
||||||
belowThreshold = false;
|
|
||||||
} else {
|
|
||||||
// just fetch workflows' nodes to keep memory footprint low
|
|
||||||
const workflows = await this.workflowRepository.find({
|
|
||||||
where: { id: In(ownedWorkflowsIds) },
|
|
||||||
select: ['nodes'],
|
|
||||||
});
|
|
||||||
|
|
||||||
// valid workflow: 2+ nodes without start node
|
|
||||||
const validWorkflowCount = workflows.reduce((counter, workflow) => {
|
|
||||||
if (counter <= 2 && workflow.nodes.length > 2) {
|
|
||||||
const nodes = workflow.nodes.filter((node) => !skippedTypes.includes(node.type));
|
|
||||||
if (nodes.length >= 2) {
|
|
||||||
return counter + 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return counter;
|
|
||||||
}, 0);
|
|
||||||
|
|
||||||
// more than 2 valid workflows required
|
|
||||||
belowThreshold = validWorkflowCount <= 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
// user is above threshold --> set flag in settings
|
|
||||||
if (!belowThreshold) {
|
|
||||||
void this.userService.updateSettings(user.id, { isOnboarded: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
return belowThreshold;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,5 +1,5 @@
|
||||||
import type { IUserSettings } from 'n8n-workflow';
|
import type { IUserSettings } from 'n8n-workflow';
|
||||||
import { ApplicationError, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
|
import { ApplicationError } from 'n8n-workflow';
|
||||||
import { Service } from 'typedi';
|
import { Service } from 'typedi';
|
||||||
|
|
||||||
import type { User, AssignableRole } from '@/databases/entities/user';
|
import type { User, AssignableRole } from '@/databases/entities/user';
|
||||||
|
@ -213,9 +213,8 @@ export class UserService {
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
ErrorReporter.error(error);
|
|
||||||
this.logger.error('Failed to create user shells', { userShells: createdUsers });
|
this.logger.error('Failed to create user shells', { userShells: createdUsers });
|
||||||
throw new InternalServerError('An error occurred during user creation');
|
throw new InternalServerError('An error occurred during user creation', error);
|
||||||
}
|
}
|
||||||
|
|
||||||
pendingUsersToInvite.forEach(({ email, id }) => createdUsers.set(email, id));
|
pendingUsersToInvite.forEach(({ email, id }) => createdUsers.set(email, id));
|
||||||
|
|
|
@ -125,7 +125,7 @@ export class UserManagementMailer {
|
||||||
|
|
||||||
const error = toError(e);
|
const error = toError(e);
|
||||||
|
|
||||||
throw new InternalServerError(`Please contact your administrator: ${error.message}`);
|
throw new InternalServerError(`Please contact your administrator: ${error.message}`, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,7 +180,7 @@ export class UserManagementMailer {
|
||||||
|
|
||||||
const error = toError(e);
|
const error = toError(e);
|
||||||
|
|
||||||
throw new InternalServerError(`Please contact your administrator: ${error.message}`);
|
throw new InternalServerError(`Please contact your administrator: ${error.message}`, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ export function isStringArray(value: unknown): value is string[] {
|
||||||
|
|
||||||
export const isIntegerString = (value: string) => /^\d+$/.test(value);
|
export const isIntegerString = (value: string) => /^\d+$/.test(value);
|
||||||
|
|
||||||
export function isObjectLiteral(item: unknown): item is { [key: string]: string } {
|
export function isObjectLiteral(item: unknown): item is { [key: string]: unknown } {
|
||||||
return typeof item === 'object' && item !== null && !Array.isArray(item);
|
return typeof item === 'object' && item !== null && !Array.isArray(item);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,5 @@
|
||||||
import { InstanceSettings } from 'n8n-core';
|
import { InstanceSettings } from 'n8n-core';
|
||||||
import {
|
import { ApplicationError, type IWorkflowExecutionDataProcess } from 'n8n-workflow';
|
||||||
ApplicationError,
|
|
||||||
ErrorReporterProxy as ErrorReporter,
|
|
||||||
type IWorkflowExecutionDataProcess,
|
|
||||||
} from 'n8n-workflow';
|
|
||||||
import { Service } from 'typedi';
|
import { Service } from 'typedi';
|
||||||
|
|
||||||
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
|
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
|
||||||
|
@ -88,7 +84,7 @@ export class WaitTracker {
|
||||||
this.waitingExecutions[executionId] = {
|
this.waitingExecutions[executionId] = {
|
||||||
executionId,
|
executionId,
|
||||||
timer: setTimeout(() => {
|
timer: setTimeout(() => {
|
||||||
this.startExecution(executionId);
|
void this.startExecution(executionId);
|
||||||
}, triggerTime),
|
}, triggerTime),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -103,46 +99,40 @@ export class WaitTracker {
|
||||||
delete this.waitingExecutions[executionId];
|
delete this.waitingExecutions[executionId];
|
||||||
}
|
}
|
||||||
|
|
||||||
startExecution(executionId: string) {
|
async startExecution(executionId: string) {
|
||||||
this.logger.debug(`Resuming execution ${executionId}`, { executionId });
|
this.logger.debug(`Resuming execution ${executionId}`, { executionId });
|
||||||
delete this.waitingExecutions[executionId];
|
delete this.waitingExecutions[executionId];
|
||||||
|
|
||||||
(async () => {
|
// Get the data to execute
|
||||||
// Get the data to execute
|
const fullExecutionData = await this.executionRepository.findSingleExecution(executionId, {
|
||||||
const fullExecutionData = await this.executionRepository.findSingleExecution(executionId, {
|
includeData: true,
|
||||||
includeData: true,
|
unflattenData: true,
|
||||||
unflattenData: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!fullExecutionData) {
|
|
||||||
throw new ApplicationError('Execution does not exist.', { extra: { executionId } });
|
|
||||||
}
|
|
||||||
if (fullExecutionData.finished) {
|
|
||||||
throw new ApplicationError('The execution did succeed and can so not be started again.');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!fullExecutionData.workflowData.id) {
|
|
||||||
throw new ApplicationError('Only saved workflows can be resumed.');
|
|
||||||
}
|
|
||||||
const workflowId = fullExecutionData.workflowData.id;
|
|
||||||
const project = await this.ownershipService.getWorkflowProjectCached(workflowId);
|
|
||||||
|
|
||||||
const data: IWorkflowExecutionDataProcess = {
|
|
||||||
executionMode: fullExecutionData.mode,
|
|
||||||
executionData: fullExecutionData.data,
|
|
||||||
workflowData: fullExecutionData.workflowData,
|
|
||||||
projectId: project.id,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Start the execution again
|
|
||||||
await this.workflowRunner.run(data, false, false, executionId);
|
|
||||||
})().catch((error: Error) => {
|
|
||||||
ErrorReporter.error(error);
|
|
||||||
this.logger.error(
|
|
||||||
`There was a problem starting the waiting execution with id "${executionId}": "${error.message}"`,
|
|
||||||
{ executionId },
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (!fullExecutionData) {
|
||||||
|
throw new ApplicationError('Execution does not exist.', { extra: { executionId } });
|
||||||
|
}
|
||||||
|
if (fullExecutionData.finished) {
|
||||||
|
throw new ApplicationError('The execution did succeed and can so not be started again.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fullExecutionData.workflowData.id) {
|
||||||
|
throw new ApplicationError('Only saved workflows can be resumed.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowId = fullExecutionData.workflowData.id;
|
||||||
|
const project = await this.ownershipService.getWorkflowProjectCached(workflowId);
|
||||||
|
|
||||||
|
const data: IWorkflowExecutionDataProcess = {
|
||||||
|
executionMode: fullExecutionData.mode,
|
||||||
|
executionData: fullExecutionData.data,
|
||||||
|
workflowData: fullExecutionData.workflowData,
|
||||||
|
projectId: project.id,
|
||||||
|
pushRef: fullExecutionData.data.pushRef,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start the execution again
|
||||||
|
await this.workflowRunner.run(data, false, false, executionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
stopTracking() {
|
stopTracking() {
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
import type * as express from 'express';
|
import type * as express from 'express';
|
||||||
import { mock } from 'jest-mock-extended';
|
import { mock } from 'jest-mock-extended';
|
||||||
import type { IWebhookData, IWorkflowExecuteAdditionalData, Workflow } from 'n8n-workflow';
|
import type { ITaskData } from 'n8n-workflow';
|
||||||
|
import {
|
||||||
|
type IWebhookData,
|
||||||
|
type IWorkflowExecuteAdditionalData,
|
||||||
|
type Workflow,
|
||||||
|
} from 'n8n-workflow';
|
||||||
import { v4 as uuid } from 'uuid';
|
import { v4 as uuid } from 'uuid';
|
||||||
|
|
||||||
import { generateNanoId } from '@/databases/utils/generators';
|
import { generateNanoId } from '@/databases/utils/generators';
|
||||||
|
@ -43,12 +48,16 @@ describe('TestWebhooks', () => {
|
||||||
jest.useFakeTimers();
|
jest.useFakeTimers();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
describe('needsWebhook()', () => {
|
describe('needsWebhook()', () => {
|
||||||
const args: Parameters<typeof testWebhooks.needsWebhook> = [
|
const args: Parameters<typeof testWebhooks.needsWebhook>[0] = {
|
||||||
userId,
|
userId,
|
||||||
workflowEntity,
|
workflowEntity,
|
||||||
mock<IWorkflowExecuteAdditionalData>(),
|
additionalData: mock<IWorkflowExecuteAdditionalData>(),
|
||||||
];
|
};
|
||||||
|
|
||||||
test('if webhook is needed, should register then create webhook and return true', async () => {
|
test('if webhook is needed, should register then create webhook and return true', async () => {
|
||||||
const workflow = mock<Workflow>();
|
const workflow = mock<Workflow>();
|
||||||
|
@ -56,7 +65,7 @@ describe('TestWebhooks', () => {
|
||||||
jest.spyOn(testWebhooks, 'toWorkflow').mockReturnValueOnce(workflow);
|
jest.spyOn(testWebhooks, 'toWorkflow').mockReturnValueOnce(workflow);
|
||||||
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook]);
|
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook]);
|
||||||
|
|
||||||
const needsWebhook = await testWebhooks.needsWebhook(...args);
|
const needsWebhook = await testWebhooks.needsWebhook(args);
|
||||||
|
|
||||||
const [registerOrder] = registrations.register.mock.invocationCallOrder;
|
const [registerOrder] = registrations.register.mock.invocationCallOrder;
|
||||||
const [createOrder] = workflow.createWebhookIfNotExists.mock.invocationCallOrder;
|
const [createOrder] = workflow.createWebhookIfNotExists.mock.invocationCallOrder;
|
||||||
|
@ -72,7 +81,7 @@ describe('TestWebhooks', () => {
|
||||||
jest.spyOn(registrations, 'register').mockRejectedValueOnce(new Error(msg));
|
jest.spyOn(registrations, 'register').mockRejectedValueOnce(new Error(msg));
|
||||||
registrations.getAllRegistrations.mockResolvedValue([]);
|
registrations.getAllRegistrations.mockResolvedValue([]);
|
||||||
|
|
||||||
const needsWebhook = testWebhooks.needsWebhook(...args);
|
const needsWebhook = testWebhooks.needsWebhook(args);
|
||||||
|
|
||||||
await expect(needsWebhook).rejects.toThrowError(msg);
|
await expect(needsWebhook).rejects.toThrowError(msg);
|
||||||
});
|
});
|
||||||
|
@ -81,10 +90,55 @@ describe('TestWebhooks', () => {
|
||||||
webhook.webhookDescription.restartWebhook = true;
|
webhook.webhookDescription.restartWebhook = true;
|
||||||
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook]);
|
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook]);
|
||||||
|
|
||||||
const result = await testWebhooks.needsWebhook(...args);
|
const result = await testWebhooks.needsWebhook(args);
|
||||||
|
|
||||||
expect(result).toBe(false);
|
expect(result).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('returns false if a triggerToStartFrom with triggerData is given', async () => {
|
||||||
|
const workflow = mock<Workflow>();
|
||||||
|
jest.spyOn(testWebhooks, 'toWorkflow').mockReturnValueOnce(workflow);
|
||||||
|
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook]);
|
||||||
|
|
||||||
|
const needsWebhook = await testWebhooks.needsWebhook({
|
||||||
|
...args,
|
||||||
|
triggerToStartFrom: {
|
||||||
|
name: 'trigger',
|
||||||
|
data: mock<ITaskData>(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(needsWebhook).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('returns true, registers and then creates webhook if triggerToStartFrom is given with no triggerData', async () => {
|
||||||
|
// ARRANGE
|
||||||
|
const workflow = mock<Workflow>();
|
||||||
|
const webhook2 = mock<IWebhookData>({
|
||||||
|
node: 'trigger',
|
||||||
|
httpMethod,
|
||||||
|
path,
|
||||||
|
workflowId: workflowEntity.id,
|
||||||
|
userId,
|
||||||
|
});
|
||||||
|
jest.spyOn(testWebhooks, 'toWorkflow').mockReturnValueOnce(workflow);
|
||||||
|
jest.spyOn(WebhookHelpers, 'getWorkflowWebhooks').mockReturnValue([webhook, webhook2]);
|
||||||
|
|
||||||
|
// ACT
|
||||||
|
const needsWebhook = await testWebhooks.needsWebhook({
|
||||||
|
...args,
|
||||||
|
triggerToStartFrom: { name: 'trigger' },
|
||||||
|
});
|
||||||
|
|
||||||
|
// ASSERT
|
||||||
|
const [registerOrder] = registrations.register.mock.invocationCallOrder;
|
||||||
|
const [createOrder] = workflow.createWebhookIfNotExists.mock.invocationCallOrder;
|
||||||
|
|
||||||
|
expect(registerOrder).toBeLessThan(createOrder);
|
||||||
|
expect(registrations.register.mock.calls[0][0].webhook.node).toBe(webhook2.node);
|
||||||
|
expect(workflow.createWebhookIfNotExists.mock.calls[0][0].node).toBe(webhook2.node);
|
||||||
|
expect(needsWebhook).toBe(true);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('executeWebhook()', () => {
|
describe('executeWebhook()', () => {
|
||||||
|
|
|
@ -23,6 +23,7 @@ import type { TestWebhookRegistration } from '@/webhooks/test-webhook-registrati
|
||||||
import { TestWebhookRegistrationsService } from '@/webhooks/test-webhook-registrations.service';
|
import { TestWebhookRegistrationsService } from '@/webhooks/test-webhook-registrations.service';
|
||||||
import * as WebhookHelpers from '@/webhooks/webhook-helpers';
|
import * as WebhookHelpers from '@/webhooks/webhook-helpers';
|
||||||
import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data';
|
import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data';
|
||||||
|
import type { WorkflowRequest } from '@/workflows/workflow.request';
|
||||||
|
|
||||||
import type {
|
import type {
|
||||||
IWebhookResponseCallbackData,
|
IWebhookResponseCallbackData,
|
||||||
|
@ -218,25 +219,48 @@ export class TestWebhooks implements IWebhookManager {
|
||||||
* Return whether activating a workflow requires listening for webhook calls.
|
* Return whether activating a workflow requires listening for webhook calls.
|
||||||
* For every webhook call to listen for, also activate the webhook.
|
* For every webhook call to listen for, also activate the webhook.
|
||||||
*/
|
*/
|
||||||
async needsWebhook(
|
async needsWebhook(options: {
|
||||||
userId: string,
|
userId: string;
|
||||||
workflowEntity: IWorkflowDb,
|
workflowEntity: IWorkflowDb;
|
||||||
additionalData: IWorkflowExecuteAdditionalData,
|
additionalData: IWorkflowExecuteAdditionalData;
|
||||||
runData?: IRunData,
|
runData?: IRunData;
|
||||||
pushRef?: string,
|
pushRef?: string;
|
||||||
destinationNode?: string,
|
destinationNode?: string;
|
||||||
) {
|
triggerToStartFrom?: WorkflowRequest.ManualRunPayload['triggerToStartFrom'];
|
||||||
|
}) {
|
||||||
|
const {
|
||||||
|
userId,
|
||||||
|
workflowEntity,
|
||||||
|
additionalData,
|
||||||
|
runData,
|
||||||
|
pushRef,
|
||||||
|
destinationNode,
|
||||||
|
triggerToStartFrom,
|
||||||
|
} = options;
|
||||||
|
|
||||||
if (!workflowEntity.id) throw new WorkflowMissingIdError(workflowEntity);
|
if (!workflowEntity.id) throw new WorkflowMissingIdError(workflowEntity);
|
||||||
|
|
||||||
const workflow = this.toWorkflow(workflowEntity);
|
const workflow = this.toWorkflow(workflowEntity);
|
||||||
|
|
||||||
const webhooks = WebhookHelpers.getWorkflowWebhooks(
|
let webhooks = WebhookHelpers.getWorkflowWebhooks(
|
||||||
workflow,
|
workflow,
|
||||||
additionalData,
|
additionalData,
|
||||||
destinationNode,
|
destinationNode,
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// If we have a preferred trigger with data, we don't have to listen for a
|
||||||
|
// webhook.
|
||||||
|
if (triggerToStartFrom?.data) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have a preferred trigger without data we only want to listen for
|
||||||
|
// that trigger, not the other ones.
|
||||||
|
if (triggerToStartFrom) {
|
||||||
|
webhooks = webhooks.filter((w) => w.node === triggerToStartFrom.name);
|
||||||
|
}
|
||||||
|
|
||||||
if (!webhooks.some((w) => w.webhookDescription.restartWebhook !== true)) {
|
if (!webhooks.some((w) => w.webhookDescription.restartWebhook !== true)) {
|
||||||
return false; // no webhooks found to start a workflow
|
return false; // no webhooks found to start a workflow
|
||||||
}
|
}
|
||||||
|
|
|
@ -762,7 +762,7 @@ export async function executeWebhook(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const internalServerError = new InternalServerError(e.message);
|
const internalServerError = new InternalServerError(e.message, e);
|
||||||
if (e instanceof ExecutionCancelledError) internalServerError.level = 'warning';
|
if (e instanceof ExecutionCancelledError) internalServerError.level = 'warning';
|
||||||
throw internalServerError;
|
throw internalServerError;
|
||||||
});
|
});
|
||||||
|
|
|
@ -52,7 +52,7 @@ import type { IWorkflowErrorData, UpdateExecutionPayload } from '@/interfaces';
|
||||||
import { NodeTypes } from '@/node-types';
|
import { NodeTypes } from '@/node-types';
|
||||||
import { Push } from '@/push';
|
import { Push } from '@/push';
|
||||||
import { WorkflowStatisticsService } from '@/services/workflow-statistics.service';
|
import { WorkflowStatisticsService } from '@/services/workflow-statistics.service';
|
||||||
import { findSubworkflowStart, isWorkflowIdValid } from '@/utils';
|
import { findSubworkflowStart, isObjectLiteral, isWorkflowIdValid } from '@/utils';
|
||||||
import * as WorkflowHelpers from '@/workflow-helpers';
|
import * as WorkflowHelpers from '@/workflow-helpers';
|
||||||
|
|
||||||
import { WorkflowRepository } from './databases/repositories/workflow.repository';
|
import { WorkflowRepository } from './databases/repositories/workflow.repository';
|
||||||
|
@ -80,11 +80,20 @@ export function objectToError(errorObject: unknown, workflow: Workflow): Error {
|
||||||
if (errorObject instanceof Error) {
|
if (errorObject instanceof Error) {
|
||||||
// If it's already an Error instance, return it as is.
|
// If it's already an Error instance, return it as is.
|
||||||
return errorObject;
|
return errorObject;
|
||||||
} else if (errorObject && typeof errorObject === 'object' && 'message' in errorObject) {
|
} else if (
|
||||||
|
isObjectLiteral(errorObject) &&
|
||||||
|
'message' in errorObject &&
|
||||||
|
typeof errorObject.message === 'string'
|
||||||
|
) {
|
||||||
// If it's an object with a 'message' property, create a new Error instance.
|
// If it's an object with a 'message' property, create a new Error instance.
|
||||||
let error: Error | undefined;
|
let error: Error | undefined;
|
||||||
if ('node' in errorObject) {
|
if (
|
||||||
const node = workflow.getNode((errorObject.node as { name: string }).name);
|
'node' in errorObject &&
|
||||||
|
isObjectLiteral(errorObject.node) &&
|
||||||
|
typeof errorObject.node.name === 'string'
|
||||||
|
) {
|
||||||
|
const node = workflow.getNode(errorObject.node.name);
|
||||||
|
|
||||||
if (node) {
|
if (node) {
|
||||||
error = new NodeOperationError(
|
error = new NodeOperationError(
|
||||||
node,
|
node,
|
||||||
|
@ -95,7 +104,7 @@ export function objectToError(errorObject: unknown, workflow: Workflow): Error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (error === undefined) {
|
if (error === undefined) {
|
||||||
error = new Error(errorObject.message as string);
|
error = new Error(errorObject.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ('description' in errorObject) {
|
if ('description' in errorObject) {
|
||||||
|
|
|
@ -2,7 +2,14 @@
|
||||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||||
/* eslint-disable @typescript-eslint/no-shadow */
|
/* eslint-disable @typescript-eslint/no-shadow */
|
||||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||||
import { InstanceSettings, WorkflowExecute } from 'n8n-core';
|
import * as a from 'assert/strict';
|
||||||
|
import {
|
||||||
|
DirectedGraph,
|
||||||
|
InstanceSettings,
|
||||||
|
WorkflowExecute,
|
||||||
|
filterDisabledNodes,
|
||||||
|
recreateNodeExecutionStack,
|
||||||
|
} from 'n8n-core';
|
||||||
import type {
|
import type {
|
||||||
ExecutionError,
|
ExecutionError,
|
||||||
IDeferredPromise,
|
IDeferredPromise,
|
||||||
|
@ -12,6 +19,7 @@ import type {
|
||||||
WorkflowExecuteMode,
|
WorkflowExecuteMode,
|
||||||
WorkflowHooks,
|
WorkflowHooks,
|
||||||
IWorkflowExecutionDataProcess,
|
IWorkflowExecutionDataProcess,
|
||||||
|
IRunExecutionData,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
import {
|
import {
|
||||||
ErrorReporterProxy as ErrorReporter,
|
ErrorReporterProxy as ErrorReporter,
|
||||||
|
@ -203,6 +211,7 @@ export class WorkflowRunner {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Run the workflow in current process */
|
/** Run the workflow in current process */
|
||||||
|
// eslint-disable-next-line complexity
|
||||||
private async runMainProcess(
|
private async runMainProcess(
|
||||||
executionId: string,
|
executionId: string,
|
||||||
data: IWorkflowExecutionDataProcess,
|
data: IWorkflowExecutionDataProcess,
|
||||||
|
@ -286,12 +295,50 @@ export class WorkflowRunner {
|
||||||
data.executionData,
|
data.executionData,
|
||||||
);
|
);
|
||||||
workflowExecution = workflowExecute.processRunExecutionData(workflow);
|
workflowExecution = workflowExecute.processRunExecutionData(workflow);
|
||||||
|
} else if (data.triggerToStartFrom?.data && data.startNodes && !data.destinationNode) {
|
||||||
|
this.logger.debug(
|
||||||
|
`Execution ID ${executionId} had triggerToStartFrom. Starting from that trigger.`,
|
||||||
|
{ executionId },
|
||||||
|
);
|
||||||
|
const startNodes = data.startNodes.map((data) => {
|
||||||
|
const node = workflow.getNode(data.name);
|
||||||
|
a.ok(node, `Could not find a node named "${data.name}" in the workflow.`);
|
||||||
|
return node;
|
||||||
|
});
|
||||||
|
const runData = { [data.triggerToStartFrom.name]: [data.triggerToStartFrom.data] };
|
||||||
|
|
||||||
|
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||||
|
recreateNodeExecutionStack(
|
||||||
|
filterDisabledNodes(DirectedGraph.fromWorkflow(workflow)),
|
||||||
|
new Set(startNodes),
|
||||||
|
runData,
|
||||||
|
data.pinData ?? {},
|
||||||
|
);
|
||||||
|
const executionData: IRunExecutionData = {
|
||||||
|
resultData: { runData, pinData },
|
||||||
|
executionData: {
|
||||||
|
contextData: {},
|
||||||
|
metadata: {},
|
||||||
|
nodeExecutionStack,
|
||||||
|
waitingExecution,
|
||||||
|
waitingExecutionSource,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const workflowExecute = new WorkflowExecute(additionalData, 'manual', executionData);
|
||||||
|
workflowExecution = workflowExecute.processRunExecutionData(workflow);
|
||||||
} else if (
|
} else if (
|
||||||
data.runData === undefined ||
|
data.runData === undefined ||
|
||||||
data.startNodes === undefined ||
|
data.startNodes === undefined ||
|
||||||
data.startNodes.length === 0
|
data.startNodes.length === 0
|
||||||
) {
|
) {
|
||||||
// Full Execution
|
// Full Execution
|
||||||
|
// TODO: When the old partial execution logic is removed this block can
|
||||||
|
// be removed and the previous one can be merged into
|
||||||
|
// `workflowExecute.runPartialWorkflow2`.
|
||||||
|
// Partial executions then require either a destination node from which
|
||||||
|
// everything else can be derived, or a triggerToStartFrom with
|
||||||
|
// triggerData.
|
||||||
this.logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {
|
this.logger.debug(`Execution ID ${executionId} will run executing all nodes.`, {
|
||||||
executionId,
|
executionId,
|
||||||
});
|
});
|
||||||
|
@ -317,8 +364,9 @@ export class WorkflowRunner {
|
||||||
workflowExecution = workflowExecute.runPartialWorkflow2(
|
workflowExecution = workflowExecute.runPartialWorkflow2(
|
||||||
workflow,
|
workflow,
|
||||||
data.runData,
|
data.runData,
|
||||||
data.destinationNode,
|
|
||||||
data.pinData,
|
data.pinData,
|
||||||
|
data.dirtyNodeNames,
|
||||||
|
data.destinationNode,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
workflowExecution = workflowExecute.runPartialWorkflow(
|
workflowExecution = workflowExecute.runPartialWorkflow(
|
||||||
|
|
|
@ -89,7 +89,14 @@ export class WorkflowExecutionService {
|
||||||
}
|
}
|
||||||
|
|
||||||
async executeManually(
|
async executeManually(
|
||||||
{ workflowData, runData, startNodes, destinationNode }: WorkflowRequest.ManualRunPayload,
|
{
|
||||||
|
workflowData,
|
||||||
|
runData,
|
||||||
|
startNodes,
|
||||||
|
destinationNode,
|
||||||
|
dirtyNodeNames,
|
||||||
|
triggerToStartFrom,
|
||||||
|
}: WorkflowRequest.ManualRunPayload,
|
||||||
user: User,
|
user: User,
|
||||||
pushRef?: string,
|
pushRef?: string,
|
||||||
partialExecutionVersion?: string,
|
partialExecutionVersion?: string,
|
||||||
|
@ -111,14 +118,15 @@ export class WorkflowExecutionService {
|
||||||
) {
|
) {
|
||||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id);
|
const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id);
|
||||||
|
|
||||||
const needsWebhook = await this.testWebhooks.needsWebhook(
|
const needsWebhook = await this.testWebhooks.needsWebhook({
|
||||||
user.id,
|
userId: user.id,
|
||||||
workflowData,
|
workflowEntity: workflowData,
|
||||||
additionalData,
|
additionalData,
|
||||||
runData,
|
runData,
|
||||||
pushRef,
|
pushRef,
|
||||||
destinationNode,
|
destinationNode,
|
||||||
);
|
triggerToStartFrom,
|
||||||
|
});
|
||||||
|
|
||||||
if (needsWebhook) return { waitingForWebhook: true };
|
if (needsWebhook) return { waitingForWebhook: true };
|
||||||
}
|
}
|
||||||
|
@ -137,6 +145,8 @@ export class WorkflowExecutionService {
|
||||||
workflowData,
|
workflowData,
|
||||||
userId: user.id,
|
userId: user.id,
|
||||||
partialExecutionVersion: partialExecutionVersion ?? '0',
|
partialExecutionVersion: partialExecutionVersion ?? '0',
|
||||||
|
dirtyNodeNames,
|
||||||
|
triggerToStartFrom,
|
||||||
};
|
};
|
||||||
|
|
||||||
const hasRunData = (node: INode) => runData !== undefined && !!runData[node.name];
|
const hasRunData = (node: INode) => runData !== undefined && !!runData[node.name];
|
||||||
|
|
|
@ -1,4 +1,11 @@
|
||||||
import type { INode, IConnections, IWorkflowSettings, IRunData, StartNodeData } from 'n8n-workflow';
|
import type {
|
||||||
|
INode,
|
||||||
|
IConnections,
|
||||||
|
IWorkflowSettings,
|
||||||
|
IRunData,
|
||||||
|
StartNodeData,
|
||||||
|
ITaskData,
|
||||||
|
} from 'n8n-workflow';
|
||||||
|
|
||||||
import type { IWorkflowDb } from '@/interfaces';
|
import type { IWorkflowDb } from '@/interfaces';
|
||||||
import type { AuthenticatedRequest, ListQuery } from '@/requests';
|
import type { AuthenticatedRequest, ListQuery } from '@/requests';
|
||||||
|
@ -22,6 +29,11 @@ export declare namespace WorkflowRequest {
|
||||||
runData: IRunData;
|
runData: IRunData;
|
||||||
startNodes?: StartNodeData[];
|
startNodes?: StartNodeData[];
|
||||||
destinationNode?: string;
|
destinationNode?: string;
|
||||||
|
dirtyNodeNames?: string[];
|
||||||
|
triggerToStartFrom?: {
|
||||||
|
name: string;
|
||||||
|
data?: ITaskData;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
type Create = AuthenticatedRequest<{}, {}, CreateUpdatePayload>;
|
type Create = AuthenticatedRequest<{}, {}, CreateUpdatePayload>;
|
||||||
|
|
|
@ -33,7 +33,6 @@ import * as ResponseHelper from '@/response-helper';
|
||||||
import { NamingService } from '@/services/naming.service';
|
import { NamingService } from '@/services/naming.service';
|
||||||
import { ProjectService } from '@/services/project.service';
|
import { ProjectService } from '@/services/project.service';
|
||||||
import { TagService } from '@/services/tag.service';
|
import { TagService } from '@/services/tag.service';
|
||||||
import { UserOnboardingService } from '@/services/user-onboarding.service';
|
|
||||||
import { UserManagementMailer } from '@/user-management/email';
|
import { UserManagementMailer } from '@/user-management/email';
|
||||||
import * as utils from '@/utils';
|
import * as utils from '@/utils';
|
||||||
import * as WorkflowHelpers from '@/workflow-helpers';
|
import * as WorkflowHelpers from '@/workflow-helpers';
|
||||||
|
@ -55,7 +54,6 @@ export class WorkflowsController {
|
||||||
private readonly workflowHistoryService: WorkflowHistoryService,
|
private readonly workflowHistoryService: WorkflowHistoryService,
|
||||||
private readonly tagService: TagService,
|
private readonly tagService: TagService,
|
||||||
private readonly namingService: NamingService,
|
private readonly namingService: NamingService,
|
||||||
private readonly userOnboardingService: UserOnboardingService,
|
|
||||||
private readonly workflowRepository: WorkflowRepository,
|
private readonly workflowRepository: WorkflowRepository,
|
||||||
private readonly workflowService: WorkflowService,
|
private readonly workflowService: WorkflowService,
|
||||||
private readonly workflowExecutionService: WorkflowExecutionService,
|
private readonly workflowExecutionService: WorkflowExecutionService,
|
||||||
|
@ -213,13 +211,7 @@ export class WorkflowsController {
|
||||||
const requestedName = req.query.name ?? this.globalConfig.workflows.defaultName;
|
const requestedName = req.query.name ?? this.globalConfig.workflows.defaultName;
|
||||||
|
|
||||||
const name = await this.namingService.getUniqueWorkflowName(requestedName);
|
const name = await this.namingService.getUniqueWorkflowName(requestedName);
|
||||||
|
return { name };
|
||||||
const onboardingFlowEnabled =
|
|
||||||
!this.globalConfig.workflows.onboardingFlowDisabled &&
|
|
||||||
!req.user.settings?.isOnboarded &&
|
|
||||||
(await this.userOnboardingService.isBelowThreshold(req.user));
|
|
||||||
|
|
||||||
return { name, onboardingFlowEnabled };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get('/from-url')
|
@Get('/from-url')
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
||||||
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
||||||
<link
|
<link
|
||||||
href='http://fonts.googleapis.com/css?family=Open+Sans'
|
href='https://fonts.googleapis.com/css?family=Open+Sans'
|
||||||
rel='stylesheet'
|
rel='stylesheet'
|
||||||
type='text/css'
|
type='text/css'
|
||||||
/>
|
/>
|
||||||
|
@ -83,4 +83,4 @@
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
||||||
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
||||||
<link
|
<link
|
||||||
href='http://fonts.googleapis.com/css?family=Open+Sans'
|
href='https://fonts.googleapis.com/css?family=Open+Sans'
|
||||||
rel='stylesheet'
|
rel='stylesheet'
|
||||||
type='text/css'
|
type='text/css'
|
||||||
/>
|
/>
|
||||||
|
@ -71,4 +71,4 @@
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
||||||
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
||||||
<link
|
<link
|
||||||
href='http://fonts.googleapis.com/css?family=Open+Sans'
|
href='https://fonts.googleapis.com/css?family=Open+Sans'
|
||||||
rel='stylesheet'
|
rel='stylesheet'
|
||||||
type='text/css'
|
type='text/css'
|
||||||
/>
|
/>
|
||||||
|
@ -71,4 +71,4 @@
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
||||||
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
||||||
<link
|
<link
|
||||||
href='http://fonts.googleapis.com/css?family=Open+Sans'
|
href='https://fonts.googleapis.com/css?family=Open+Sans'
|
||||||
rel='stylesheet'
|
rel='stylesheet'
|
||||||
type='text/css'
|
type='text/css'
|
||||||
/>
|
/>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
<meta name='viewport' content='width=device-width, initial-scale=1.0' />
|
||||||
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
<link rel='icon' type='image/png' href='https://n8n.io/favicon.ico' />
|
||||||
<link
|
<link
|
||||||
href='http://fonts.googleapis.com/css?family=Open+Sans'
|
href='https://fonts.googleapis.com/css?family=Open+Sans'
|
||||||
rel='stylesheet'
|
rel='stylesheet'
|
||||||
type='text/css'
|
type='text/css'
|
||||||
/>
|
/>
|
||||||
|
@ -70,4 +70,4 @@
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -89,7 +89,7 @@ describe('POST /login', () => {
|
||||||
const response = await testServer.authlessAgent.post('/login').send({
|
const response = await testServer.authlessAgent.post('/login').send({
|
||||||
email: owner.email,
|
email: owner.email,
|
||||||
password: ownerPassword,
|
password: ownerPassword,
|
||||||
mfaToken: mfaService.totp.generateTOTP(secret),
|
mfaCode: mfaService.totp.generateTOTP(secret),
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(response.statusCode).toBe(200);
|
expect(response.statusCode).toBe(200);
|
||||||
|
|
|
@ -55,8 +55,8 @@ describe('Enable MFA setup', () => {
|
||||||
secondCall.body.data.recoveryCodes.join(''),
|
secondCall.body.data.recoveryCodes.join(''),
|
||||||
);
|
);
|
||||||
|
|
||||||
const token = new TOTPService().generateTOTP(firstCall.body.data.secret);
|
const mfaCode = new TOTPService().generateTOTP(firstCall.body.data.secret);
|
||||||
await testServer.authAgentFor(owner).post('/mfa/disable').send({ token }).expect(200);
|
await testServer.authAgentFor(owner).post('/mfa/disable').send({ mfaCode }).expect(200);
|
||||||
|
|
||||||
const thirdCall = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
const thirdCall = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
||||||
|
|
||||||
|
@ -84,22 +84,22 @@ describe('Enable MFA setup', () => {
|
||||||
await testServer.authlessAgent.post('/mfa/verify').expect(401);
|
await testServer.authlessAgent.post('/mfa/verify').expect(401);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('POST /verify should fail due to invalid MFA token', async () => {
|
test('POST /verify should fail due to invalid MFA code', async () => {
|
||||||
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token: '123' }).expect(400);
|
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode: '123' }).expect(400);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('POST /verify should fail due to missing token parameter', async () => {
|
test('POST /verify should fail due to missing mfaCode parameter', async () => {
|
||||||
await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
||||||
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token: '' }).expect(400);
|
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode: '' }).expect(400);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('POST /verify should validate MFA token', async () => {
|
test('POST /verify should validate MFA code', async () => {
|
||||||
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
||||||
|
|
||||||
const { secret } = response.body.data;
|
const { secret } = response.body.data;
|
||||||
const token = new TOTPService().generateTOTP(secret);
|
const mfaCode = new TOTPService().generateTOTP(secret);
|
||||||
|
|
||||||
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token }).expect(200);
|
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode }).expect(200);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -108,13 +108,13 @@ describe('Enable MFA setup', () => {
|
||||||
await testServer.authlessAgent.post('/mfa/enable').expect(401);
|
await testServer.authlessAgent.post('/mfa/enable').expect(401);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('POST /verify should fail due to missing token parameter', async () => {
|
test('POST /verify should fail due to missing mfaCode parameter', async () => {
|
||||||
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token: '' }).expect(400);
|
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode: '' }).expect(400);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('POST /enable should fail due to invalid MFA token', async () => {
|
test('POST /enable should fail due to invalid MFA code', async () => {
|
||||||
await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
||||||
await testServer.authAgentFor(owner).post('/mfa/enable').send({ token: '123' }).expect(400);
|
await testServer.authAgentFor(owner).post('/mfa/enable').send({ mfaCode: '123' }).expect(400);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('POST /enable should fail due to empty secret and recovery codes', async () => {
|
test('POST /enable should fail due to empty secret and recovery codes', async () => {
|
||||||
|
@ -125,10 +125,10 @@ describe('Enable MFA setup', () => {
|
||||||
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
||||||
|
|
||||||
const { secret } = response.body.data;
|
const { secret } = response.body.data;
|
||||||
const token = new TOTPService().generateTOTP(secret);
|
const mfaCode = new TOTPService().generateTOTP(secret);
|
||||||
|
|
||||||
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token }).expect(200);
|
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode }).expect(200);
|
||||||
await testServer.authAgentFor(owner).post('/mfa/enable').send({ token }).expect(200);
|
await testServer.authAgentFor(owner).post('/mfa/enable').send({ mfaCode }).expect(200);
|
||||||
|
|
||||||
const user = await Container.get(AuthUserRepository).findOneOrFail({
|
const user = await Container.get(AuthUserRepository).findOneOrFail({
|
||||||
where: {},
|
where: {},
|
||||||
|
@ -145,13 +145,13 @@ describe('Enable MFA setup', () => {
|
||||||
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
const response = await testServer.authAgentFor(owner).get('/mfa/qr').expect(200);
|
||||||
|
|
||||||
const { secret } = response.body.data;
|
const { secret } = response.body.data;
|
||||||
const token = new TOTPService().generateTOTP(secret);
|
const mfaCode = new TOTPService().generateTOTP(secret);
|
||||||
|
|
||||||
await testServer.authAgentFor(owner).post('/mfa/verify').send({ token }).expect(200);
|
await testServer.authAgentFor(owner).post('/mfa/verify').send({ mfaCode }).expect(200);
|
||||||
|
|
||||||
externalHooks.run.mockRejectedValue(new BadRequestError('Error message'));
|
externalHooks.run.mockRejectedValue(new BadRequestError('Error message'));
|
||||||
|
|
||||||
await testServer.authAgentFor(owner).post('/mfa/enable').send({ token }).expect(400);
|
await testServer.authAgentFor(owner).post('/mfa/enable').send({ mfaCode }).expect(400);
|
||||||
|
|
||||||
const user = await Container.get(AuthUserRepository).findOneOrFail({
|
const user = await Container.get(AuthUserRepository).findOneOrFail({
|
||||||
where: {},
|
where: {},
|
||||||
|
@ -165,13 +165,13 @@ describe('Enable MFA setup', () => {
|
||||||
describe('Disable MFA setup', () => {
|
describe('Disable MFA setup', () => {
|
||||||
test('POST /disable should disable login with MFA', async () => {
|
test('POST /disable should disable login with MFA', async () => {
|
||||||
const { user, rawSecret } = await createUserWithMfaEnabled();
|
const { user, rawSecret } = await createUserWithMfaEnabled();
|
||||||
const token = new TOTPService().generateTOTP(rawSecret);
|
const mfaCode = new TOTPService().generateTOTP(rawSecret);
|
||||||
|
|
||||||
await testServer
|
await testServer
|
||||||
.authAgentFor(user)
|
.authAgentFor(user)
|
||||||
.post('/mfa/disable')
|
.post('/mfa/disable')
|
||||||
.send({
|
.send({
|
||||||
token,
|
mfaCode,
|
||||||
})
|
})
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
|
@ -184,21 +184,39 @@ describe('Disable MFA setup', () => {
|
||||||
expect(dbUser.mfaRecoveryCodes.length).toBe(0);
|
expect(dbUser.mfaRecoveryCodes.length).toBe(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('POST /disable should fail if invalid token is given', async () => {
|
test('POST /disable should fail if invalid MFA recovery code is given', async () => {
|
||||||
const { user } = await createUserWithMfaEnabled();
|
const { user } = await createUserWithMfaEnabled();
|
||||||
|
|
||||||
await testServer
|
await testServer
|
||||||
.authAgentFor(user)
|
.authAgentFor(user)
|
||||||
.post('/mfa/disable')
|
.post('/mfa/disable')
|
||||||
.send({
|
.send({
|
||||||
token: 'invalid token',
|
mfaRecoveryCode: 'invalid token',
|
||||||
})
|
})
|
||||||
.expect(403);
|
.expect(403);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('POST /disable should fail if invalid MFA code is given', async () => {
|
||||||
|
const { user } = await createUserWithMfaEnabled();
|
||||||
|
|
||||||
|
await testServer
|
||||||
|
.authAgentFor(user)
|
||||||
|
.post('/mfa/disable')
|
||||||
|
.send({
|
||||||
|
mfaCode: 'invalid token',
|
||||||
|
})
|
||||||
|
.expect(403);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('POST /disable should fail if neither MFA code nor recovery code is sent', async () => {
|
||||||
|
const { user } = await createUserWithMfaEnabled();
|
||||||
|
|
||||||
|
await testServer.authAgentFor(user).post('/mfa/disable').send({ anotherParam: '' }).expect(400);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Change password with MFA enabled', () => {
|
describe('Change password with MFA enabled', () => {
|
||||||
test('POST /change-password should fail due to missing MFA token', async () => {
|
test('POST /change-password should fail due to missing MFA code', async () => {
|
||||||
await createUserWithMfaEnabled();
|
await createUserWithMfaEnabled();
|
||||||
|
|
||||||
const newPassword = randomValidPassword();
|
const newPassword = randomValidPassword();
|
||||||
|
@ -210,7 +228,7 @@ describe('Change password with MFA enabled', () => {
|
||||||
.expect(404);
|
.expect(404);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('POST /change-password should fail due to invalid MFA token', async () => {
|
test('POST /change-password should fail due to invalid MFA code', async () => {
|
||||||
await createUserWithMfaEnabled();
|
await createUserWithMfaEnabled();
|
||||||
|
|
||||||
const newPassword = randomValidPassword();
|
const newPassword = randomValidPassword();
|
||||||
|
@ -221,7 +239,7 @@ describe('Change password with MFA enabled', () => {
|
||||||
.send({
|
.send({
|
||||||
password: newPassword,
|
password: newPassword,
|
||||||
token: resetPasswordToken,
|
token: resetPasswordToken,
|
||||||
mfaToken: randomInt(10),
|
mfaCode: randomInt(10),
|
||||||
})
|
})
|
||||||
.expect(404);
|
.expect(404);
|
||||||
});
|
});
|
||||||
|
@ -235,14 +253,14 @@ describe('Change password with MFA enabled', () => {
|
||||||
|
|
||||||
const resetPasswordToken = Container.get(AuthService).generatePasswordResetToken(user);
|
const resetPasswordToken = Container.get(AuthService).generatePasswordResetToken(user);
|
||||||
|
|
||||||
const mfaToken = new TOTPService().generateTOTP(rawSecret);
|
const mfaCode = new TOTPService().generateTOTP(rawSecret);
|
||||||
|
|
||||||
await testServer.authlessAgent
|
await testServer.authlessAgent
|
||||||
.post('/change-password')
|
.post('/change-password')
|
||||||
.send({
|
.send({
|
||||||
password: newPassword,
|
password: newPassword,
|
||||||
token: resetPasswordToken,
|
token: resetPasswordToken,
|
||||||
mfaToken,
|
mfaCode,
|
||||||
})
|
})
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
|
@ -252,7 +270,7 @@ describe('Change password with MFA enabled', () => {
|
||||||
.send({
|
.send({
|
||||||
email: user.email,
|
email: user.email,
|
||||||
password: newPassword,
|
password: newPassword,
|
||||||
mfaToken: new TOTPService().generateTOTP(rawSecret),
|
mfaCode: new TOTPService().generateTOTP(rawSecret),
|
||||||
})
|
})
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
|
@ -315,7 +333,7 @@ describe('Login', () => {
|
||||||
|
|
||||||
await testServer.authlessAgent
|
await testServer.authlessAgent
|
||||||
.post('/login')
|
.post('/login')
|
||||||
.send({ email: user.email, password: rawPassword, mfaToken: 'wrongvalue' })
|
.send({ email: user.email, password: rawPassword, mfaCode: 'wrongvalue' })
|
||||||
.expect(401);
|
.expect(401);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -337,7 +355,7 @@ describe('Login', () => {
|
||||||
|
|
||||||
const response = await testServer.authlessAgent
|
const response = await testServer.authlessAgent
|
||||||
.post('/login')
|
.post('/login')
|
||||||
.send({ email: user.email, password: rawPassword, mfaToken: token })
|
.send({ email: user.email, password: rawPassword, mfaCode: token })
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
const data = response.body.data;
|
const data = response.body.data;
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue