mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-09 22:24:05 -08:00
Merge branch 'master' into patch-1
This commit is contained in:
commit
9ecf8062d6
|
@ -17,9 +17,9 @@ jobs:
|
|||
- uses: actions/checkout@v4.1.1
|
||||
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Install dependencies
|
||||
|
|
4
.github/workflows/check-pr-title.yml
vendored
4
.github/workflows/check-pr-title.yml
vendored
|
@ -19,9 +19,9 @@ jobs:
|
|||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Install dependencies
|
||||
|
|
4
.github/workflows/check-tests.yml
vendored
4
.github/workflows/check-tests.yml
vendored
|
@ -20,9 +20,9 @@ jobs:
|
|||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4.0.1
|
||||
uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
|
||||
- run: npm install --prefix=.github/scripts --no-package-lock
|
||||
|
||||
|
|
4
.github/workflows/chromatic.yml
vendored
4
.github/workflows/chromatic.yml
vendored
|
@ -15,9 +15,9 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
|
|
27
.github/workflows/ci-master.yml
vendored
27
.github/workflows/ci-master.yml
vendored
|
@ -9,20 +9,15 @@ jobs:
|
|||
install-and-build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
timeout-minutes: 30
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [18.x, 20.x]
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
|
||||
- run: corepack enable
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
node-version: 20.x
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
|
@ -35,7 +30,7 @@ jobs:
|
|||
uses: actions/cache/save@v4.0.0
|
||||
with:
|
||||
path: ./packages/**/dist
|
||||
key: ${{ github.sha }}-base:${{ matrix.node-version }}-test-lint
|
||||
key: ${{ github.sha }}-base:build
|
||||
|
||||
unit-test:
|
||||
name: Unit tests
|
||||
|
@ -43,20 +38,17 @@ jobs:
|
|||
needs: install-and-build
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [18.x, 20.x]
|
||||
node-version: [18.x, 20.x, 22.x]
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
nodeVersion: ${{ matrix.node-version }}
|
||||
cacheKey: ${{ github.sha }}-base:${{ matrix.node-version }}-test-lint
|
||||
cacheKey: ${{ github.sha }}-base:build
|
||||
collectCoverage: true
|
||||
|
||||
lint:
|
||||
name: Lint changes
|
||||
runs-on: ubuntu-latest
|
||||
needs: install-and-build
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [18.x, 20.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
with:
|
||||
|
@ -64,10 +56,9 @@ jobs:
|
|||
ref: ${{ inputs.branch }}
|
||||
|
||||
- run: corepack enable
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
node-version: 20.x
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
|
@ -77,7 +68,7 @@ jobs:
|
|||
uses: actions/cache/restore@v4.0.0
|
||||
with:
|
||||
path: ./packages/**/dist
|
||||
key: ${{ github.sha }}-base:${{ matrix.node-version }}-test-lint
|
||||
key: ${{ github.sha }}-base:build
|
||||
|
||||
- name: Lint
|
||||
env:
|
||||
|
|
17
.github/workflows/ci-postgres-mysql.yml
vendored
17
.github/workflows/ci-postgres-mysql.yml
vendored
|
@ -20,9 +20,9 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
|
@ -46,9 +46,9 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
|
@ -72,9 +72,9 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
|
@ -102,12 +102,13 @@ jobs:
|
|||
timeout-minutes: 20
|
||||
env:
|
||||
DB_POSTGRESDB_PASSWORD: password
|
||||
DB_POSTGRESDB_POOL_SIZE: 1 # Detect connection pooling deadlocks
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
|
|
16
.github/workflows/ci-pull-requests.yml
vendored
16
.github/workflows/ci-pull-requests.yml
vendored
|
@ -13,10 +13,9 @@ jobs:
|
|||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
|
||||
- run: corepack enable
|
||||
- name: Use Node.js 18
|
||||
uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
|
@ -29,7 +28,7 @@ jobs:
|
|||
uses: actions/cache/save@v4.0.0
|
||||
with:
|
||||
path: ./packages/**/dist
|
||||
key: ${{ github.sha }}-base:18-test-lint
|
||||
key: ${{ github.sha }}-base:build
|
||||
|
||||
unit-test:
|
||||
name: Unit tests
|
||||
|
@ -37,7 +36,7 @@ jobs:
|
|||
needs: install
|
||||
with:
|
||||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
cacheKey: ${{ github.sha }}-base:18-test-lint
|
||||
cacheKey: ${{ github.sha }}-base:build
|
||||
|
||||
lint:
|
||||
name: Lint changes
|
||||
|
@ -50,10 +49,9 @@ jobs:
|
|||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
|
||||
- run: corepack enable
|
||||
- name: Use Node.js 18
|
||||
uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
|
@ -63,7 +61,7 @@ jobs:
|
|||
uses: actions/cache/restore@v4.0.0
|
||||
with:
|
||||
path: ./packages/**/dist
|
||||
key: ${{ github.sha }}-base:18-test-lint
|
||||
key: ${{ github.sha }}-base:build
|
||||
|
||||
- name: Lint
|
||||
run: pnpm lint
|
||||
|
|
3
.github/workflows/docker-base-image.yml
vendored
3
.github/workflows/docker-base-image.yml
vendored
|
@ -7,10 +7,11 @@ on:
|
|||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '18'
|
||||
default: '20'
|
||||
options:
|
||||
- '18'
|
||||
- '20'
|
||||
- '22'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
|
4
.github/workflows/release-create-pr.yml
vendored
4
.github/workflows/release-create-pr.yml
vendored
|
@ -36,9 +36,9 @@ jobs:
|
|||
ref: ${{ github.event.inputs.base-branch }}
|
||||
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
|
||||
- run: npm install --prefix=.github/scripts --no-package-lock
|
||||
|
||||
|
|
4
.github/workflows/release-publish.yml
vendored
4
.github/workflows/release-publish.yml
vendored
|
@ -24,9 +24,9 @@ jobs:
|
|||
fetch-depth: 0
|
||||
|
||||
- run: corepack enable
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
|
|
|
@ -22,9 +22,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
- run: |
|
||||
echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc
|
||||
npm dist-tag add n8n@${{ github.event.inputs.version }} ${{ github.event.inputs.release-channel }}
|
||||
|
|
4
.github/workflows/test-workflows.yml
vendored
4
.github/workflows/test-workflows.yml
vendored
|
@ -26,9 +26,9 @@ jobs:
|
|||
- run: corepack enable
|
||||
working-directory: n8n
|
||||
|
||||
- uses: actions/setup-node@v4.0.1
|
||||
- uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: 18.x
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'n8n/pnpm-lock.yaml'
|
||||
|
||||
|
|
4
.github/workflows/units-tests-reusable.yml
vendored
4
.github/workflows/units-tests-reusable.yml
vendored
|
@ -12,7 +12,7 @@ on:
|
|||
description: 'Version of node to use.'
|
||||
required: false
|
||||
type: string
|
||||
default: '18.x'
|
||||
default: 20.x
|
||||
cacheKey:
|
||||
description: 'Cache key for modules and build artifacts.'
|
||||
required: false
|
||||
|
@ -37,7 +37,7 @@ jobs:
|
|||
|
||||
- run: corepack enable
|
||||
- name: Use Node.js ${{ inputs.nodeVersion }}
|
||||
uses: actions/setup-node@v4.0.1
|
||||
uses: actions/setup-node@v4.0.2
|
||||
with:
|
||||
node-version: ${{ inputs.nodeVersion }}
|
||||
cache: pnpm
|
||||
|
|
52
CHANGELOG.md
52
CHANGELOG.md
|
@ -1,3 +1,55 @@
|
|||
# [1.43.0](https://github.com/n8n-io/n8n/compare/n8n@1.42.0...n8n@1.43.0) (2024-05-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **core:** Account for retry of execution aborted by pre-execute hook ([#9474](https://github.com/n8n-io/n8n/issues/9474)) ([a217866](https://github.com/n8n-io/n8n/commit/a217866cef6caaef9244f3d16d90f7027adc0c12))
|
||||
* **core:** Add an option to disable STARTTLS for SMTP connections ([#9415](https://github.com/n8n-io/n8n/issues/9415)) ([0d73588](https://github.com/n8n-io/n8n/commit/0d7358807b4244be574060726388bd49fc90dc64))
|
||||
* **core:** Do not allow admins to delete the instance owner ([#9489](https://github.com/n8n-io/n8n/issues/9489)) ([fc83005](https://github.com/n8n-io/n8n/commit/fc83005ba0876ebea70f93de700adbd6e3095c96))
|
||||
* **core:** Do not allow admins to generate password-reset links for instance owner ([#9488](https://github.com/n8n-io/n8n/issues/9488)) ([88b9a40](https://github.com/n8n-io/n8n/commit/88b9a4070b7df943c3ba22047c0656a5d0a2111c))
|
||||
* **core:** Fix 431 for large dynamic node parameters ([#9384](https://github.com/n8n-io/n8n/issues/9384)) ([d21ad15](https://github.com/n8n-io/n8n/commit/d21ad15c1f12739af6a28983a6469347c26f1e08))
|
||||
* **core:** Handle credential in body for oauth2 refresh token ([#9179](https://github.com/n8n-io/n8n/issues/9179)) ([c9855e3](https://github.com/n8n-io/n8n/commit/c9855e3dce42f8830636914458d1061668a466a8))
|
||||
* **core:** Remove excess args from routing error ([#9377](https://github.com/n8n-io/n8n/issues/9377)) ([b1f977e](https://github.com/n8n-io/n8n/commit/b1f977ebd084ab3a8fb1d13109063de7d2a15296))
|
||||
* **core:** Retry before continue on fail ([#9395](https://github.com/n8n-io/n8n/issues/9395)) ([9b2ce81](https://github.com/n8n-io/n8n/commit/9b2ce819d42c4a541ae94956aaab608a989ec588))
|
||||
* **editor:** Emit change events from filter component on update ([#9479](https://github.com/n8n-io/n8n/issues/9479)) ([62df433](https://github.com/n8n-io/n8n/commit/62df4331d448dfdabd51db33560a87dd5d805a13))
|
||||
* **editor:** Fix blank Public API page ([#9409](https://github.com/n8n-io/n8n/issues/9409)) ([14fe9f2](https://github.com/n8n-io/n8n/commit/14fe9f268feeb0ca106ddaaa94c69cb356011524))
|
||||
* **editor:** Fix i18n translation addition ([#9451](https://github.com/n8n-io/n8n/issues/9451)) ([04dd476](https://github.com/n8n-io/n8n/commit/04dd4760e173bfc8a938413a5915d63291da8afe))
|
||||
* **editor:** Fix node execution errors showing undefined ([#9487](https://github.com/n8n-io/n8n/issues/9487)) ([62ee796](https://github.com/n8n-io/n8n/commit/62ee79689569b5d2c9823afac238e66e4c645d9b))
|
||||
* **editor:** Fix outdated roles in variables labels ([#9411](https://github.com/n8n-io/n8n/issues/9411)) ([38b498e](https://github.com/n8n-io/n8n/commit/38b498e73a71a9ca8b10a89e498aa8330acf2626))
|
||||
* **editor:** Fix project settings layout ([#9475](https://github.com/n8n-io/n8n/issues/9475)) ([96cf41f](https://github.com/n8n-io/n8n/commit/96cf41f8516881f0ba15b0b01dda7712f1edc845))
|
||||
* **editor:** Fix type errors in `components/executions/workflow` ([#9448](https://github.com/n8n-io/n8n/issues/9448)) ([9c768a0](https://github.com/n8n-io/n8n/commit/9c768a0443520f0c031d4d807d955d7778a00997))
|
||||
* **editor:** Fix type errors in i18n plugin ([#9441](https://github.com/n8n-io/n8n/issues/9441)) ([a7d3e59](https://github.com/n8n-io/n8n/commit/a7d3e59aef36dd65429ad0b2ea4696b107620eeb))
|
||||
* **editor:** Fix workflow history TS errors ([#9433](https://github.com/n8n-io/n8n/issues/9433)) ([bc05faf](https://github.com/n8n-io/n8n/commit/bc05faf0a6a0913013e4d46eefb1e45abc390883))
|
||||
* **editor:** Secondary button in dark mode ([#9401](https://github.com/n8n-io/n8n/issues/9401)) ([aad43d8](https://github.com/n8n-io/n8n/commit/aad43d8cdcc9621fbd864fbe0235c9ff4ddbfe3e))
|
||||
* **Email Trigger (IMAP) Node:** Handle attachments correctly ([#9410](https://github.com/n8n-io/n8n/issues/9410)) ([68a6c81](https://github.com/n8n-io/n8n/commit/68a6c8172973091e8474a9f173fa4a5e97284f18))
|
||||
* Fix color picker type errors ([#9436](https://github.com/n8n-io/n8n/issues/9436)) ([2967df2](https://github.com/n8n-io/n8n/commit/2967df2fe098278dd20126dc033b03cbb4b903ce))
|
||||
* Fix type errors in community nodes components ([#9445](https://github.com/n8n-io/n8n/issues/9445)) ([aac19d3](https://github.com/n8n-io/n8n/commit/aac19d328564bfecda53b338e2c56e5e30e5c0c1))
|
||||
* **Gmail Trigger Node:** Fetching duplicate emails ([#9424](https://github.com/n8n-io/n8n/issues/9424)) ([3761537](https://github.com/n8n-io/n8n/commit/3761537880f53d9e54b0200a63b067dc3d154787))
|
||||
* **HTML Node:** Fix typo preventing row attributes from being set in tables ([#9440](https://github.com/n8n-io/n8n/issues/9440)) ([28e3e21](https://github.com/n8n-io/n8n/commit/28e3e211771fd73a88e34b81858188156fca5fbb))
|
||||
* **HubSpot Trigger Node:** Fix issue with ticketId not being set ([#9403](https://github.com/n8n-io/n8n/issues/9403)) ([b5c7c06](https://github.com/n8n-io/n8n/commit/b5c7c061b7e854a06bd725f7905a7f3ac8dfedc2))
|
||||
* **Mattermost Node:** Change loadOptions to fetch all items ([#9413](https://github.com/n8n-io/n8n/issues/9413)) ([1377e21](https://github.com/n8n-io/n8n/commit/1377e212c709bc9ca6586c030ec083e89a3d8c37))
|
||||
* **Microsoft OneDrive Trigger Node:** Fix issue with test run failing ([#9386](https://github.com/n8n-io/n8n/issues/9386)) ([92a1d65](https://github.com/n8n-io/n8n/commit/92a1d65c4b00683cc334c70f183e5f8c99bfae65))
|
||||
* **RSS Feed Trigger Node:** Use newest date instead of first item for new items ([#9182](https://github.com/n8n-io/n8n/issues/9182)) ([7236a55](https://github.com/n8n-io/n8n/commit/7236a558b945c69fa5680e42c538af7c5276cc31))
|
||||
* Update operations to run per item ([#8967](https://github.com/n8n-io/n8n/issues/8967)) ([ef9d4ab](https://github.com/n8n-io/n8n/commit/ef9d4aba90c92f9b72a17de242a4ffeb7c034802))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add Slack trigger node ([#9190](https://github.com/n8n-io/n8n/issues/9190)) ([bf54930](https://github.com/n8n-io/n8n/commit/bf549301df541c43931fe4493b4bad7905fb0c8a))
|
||||
* **Custom n8n Workflow Tool Node:** Add support for tool input schema ([#9470](https://github.com/n8n-io/n8n/issues/9470)) ([2fa46b6](https://github.com/n8n-io/n8n/commit/2fa46b6faac5618a10403066c3dddf4ea9def12c))
|
||||
* **editor:** Add examples for Luxon DateTime expression methods ([#9361](https://github.com/n8n-io/n8n/issues/9361)) ([40bce7f](https://github.com/n8n-io/n8n/commit/40bce7f44332042bf8dba0442044acd76cc9bf21))
|
||||
* **editor:** Add examples for root expression methods ([#9373](https://github.com/n8n-io/n8n/issues/9373)) ([a591f63](https://github.com/n8n-io/n8n/commit/a591f63e3ff51c19fe48185144725e881c418b23))
|
||||
* **editor:** Expand supported Unicode range for expressions ([#9420](https://github.com/n8n-io/n8n/issues/9420)) ([2118236](https://github.com/n8n-io/n8n/commit/211823650ba298aac899ff944819290f0bd4654a))
|
||||
* **editor:** Update Node Details View header tabs structure ([#9425](https://github.com/n8n-io/n8n/issues/9425)) ([2782534](https://github.com/n8n-io/n8n/commit/2782534d78e9613bda41675b4574c8016b10b0a4))
|
||||
* **Extract from File Node:** Add option to set encoding for CSV files ([#9392](https://github.com/n8n-io/n8n/issues/9392)) ([f13dbc9](https://github.com/n8n-io/n8n/commit/f13dbc9cc31fba20b4cb0bedf11e56e16079f946))
|
||||
* **Linear Node:** Add identifier to outputs ([#9469](https://github.com/n8n-io/n8n/issues/9469)) ([ffe034c](https://github.com/n8n-io/n8n/commit/ffe034c72e07346cdbea4dda96c7e2c38ea73c45))
|
||||
* **OpenAI Node:** Use v2 assistants API and add support for memory ([#9406](https://github.com/n8n-io/n8n/issues/9406)) ([ce3eb12](https://github.com/n8n-io/n8n/commit/ce3eb12a6ba325d3785d54d90ff5a32152afd4c0))
|
||||
* RBAC ([#8922](https://github.com/n8n-io/n8n/issues/8922)) ([596c472](https://github.com/n8n-io/n8n/commit/596c472ecc756bf934c51e7efae0075fb23313b4))
|
||||
* **Strava Node:** Update to use sport type ([#9462](https://github.com/n8n-io/n8n/issues/9462)) ([9da9368](https://github.com/n8n-io/n8n/commit/9da93680c28f9191eac7edc452e5123749e5c148))
|
||||
* **Telegram Node:** Add support for local bot api server ([#8437](https://github.com/n8n-io/n8n/issues/8437)) ([87f965e](https://github.com/n8n-io/n8n/commit/87f965e9055904486f5fd815c060abb4376296a0))
|
||||
|
||||
|
||||
|
||||
# [1.42.0](https://github.com/n8n-io/n8n/compare/n8n@1.41.0...n8n@1.42.0) (2024-05-15)
|
||||
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ dependencies are installed and the packages get linked correctly. Here's a short
|
|||
|
||||
#### Node.js
|
||||
|
||||
[Node.js](https://nodejs.org/en/) version 16.9 or newer is required for development purposes.
|
||||
[Node.js](https://nodejs.org/en/) version 18.10 or newer is required for development purposes.
|
||||
|
||||
#### pnpm
|
||||
|
||||
|
|
|
@ -34,6 +34,27 @@ describe('User Management', { disableAutoLogin: true }, () => {
|
|||
cy.enableFeature('sharing');
|
||||
});
|
||||
|
||||
it.only('should login and logout', () => {
|
||||
cy.visit('/');
|
||||
cy.get('input[name="email"]').type(INSTANCE_OWNER.email);
|
||||
cy.get('input[name="password"]').type(INSTANCE_OWNER.password);
|
||||
cy.getByTestId('form-submit-button').click();
|
||||
mainSidebar.getters.logo().should('be.visible');
|
||||
mainSidebar.actions.goToSettings();
|
||||
settingsSidebar.getters.users().should('be.visible');
|
||||
|
||||
mainSidebar.actions.closeSettings();
|
||||
mainSidebar.actions.openUserMenu();
|
||||
cy.getByTestId('user-menu-item-logout').click();
|
||||
|
||||
cy.get('input[name="email"]').type(INSTANCE_MEMBERS[0].email);
|
||||
cy.get('input[name="password"]').type(INSTANCE_MEMBERS[0].password);
|
||||
cy.getByTestId('form-submit-button').click();
|
||||
mainSidebar.getters.logo().should('be.visible');
|
||||
mainSidebar.actions.goToSettings();
|
||||
cy.getByTestId('menu-item').filter('#settings-users').should('not.exist');
|
||||
});
|
||||
|
||||
it('should prevent non-owners to access UM settings', () => {
|
||||
usersSettingsPage.actions.loginAndVisit(
|
||||
INSTANCE_MEMBERS[0].email,
|
||||
|
|
|
@ -42,39 +42,6 @@ describe('Credentials', () => {
|
|||
credentialsPage.getters.credentialCards().should('have.length', 1);
|
||||
});
|
||||
|
||||
it.skip('should create a new credential using Add Credential button', () => {
|
||||
credentialsPage.getters.createCredentialButton().click();
|
||||
|
||||
credentialsModal.getters.newCredentialModal().should('be.visible');
|
||||
credentialsModal.getters.newCredentialTypeSelect().should('be.visible');
|
||||
credentialsModal.getters.newCredentialTypeOption('Airtable API').click();
|
||||
|
||||
credentialsModal.getters.newCredentialTypeButton().click();
|
||||
credentialsModal.getters.editCredentialModal().should('be.visible');
|
||||
credentialsModal.getters.connectionParameter('API Key').type('1234567890');
|
||||
|
||||
credentialsModal.actions.setName('Airtable Account');
|
||||
credentialsModal.actions.save();
|
||||
credentialsModal.actions.close();
|
||||
|
||||
credentialsPage.getters.credentialCards().should('have.length', 2);
|
||||
});
|
||||
|
||||
it.skip('should search credentials', () => {
|
||||
// Search by name
|
||||
credentialsPage.actions.search('Notion');
|
||||
credentialsPage.getters.credentialCards().should('have.length', 1);
|
||||
|
||||
// Search by Credential type
|
||||
credentialsPage.actions.search('Airtable API');
|
||||
credentialsPage.getters.credentialCards().should('have.length', 1);
|
||||
|
||||
// No results
|
||||
credentialsPage.actions.search('Google');
|
||||
credentialsPage.getters.credentialCards().should('have.length', 0);
|
||||
credentialsPage.getters.emptyList().should('be.visible');
|
||||
});
|
||||
|
||||
it('should sort credentials', () => {
|
||||
credentialsPage.actions.search('');
|
||||
credentialsPage.actions.sortBy('nameDesc');
|
||||
|
|
|
@ -71,6 +71,45 @@ describe('Current Workflow Executions', () => {
|
|||
cy.wait(executionsRefreshInterval);
|
||||
cy.url().should('not.include', '/executions');
|
||||
});
|
||||
|
||||
it.only('should auto load more items if there is space and auto scroll', () => {
|
||||
cy.viewport(1280, 960);
|
||||
executionsTab.actions.createManualExecutions(24);
|
||||
|
||||
cy.intercept('GET', '/rest/executions?filter=*').as('getExecutions');
|
||||
cy.intercept('GET', '/rest/executions/*').as('getExecution');
|
||||
executionsTab.actions.switchToExecutionsTab();
|
||||
|
||||
cy.wait(['@getExecutions']);
|
||||
executionsTab.getters.executionListItems().its('length').should('be.gte', 10);
|
||||
|
||||
cy.getByTestId('current-executions-list').scrollTo('bottom');
|
||||
cy.wait(['@getExecutions']);
|
||||
executionsTab.getters.executionListItems().should('have.length', 24);
|
||||
|
||||
executionsTab.getters.executionListItems().eq(14).click();
|
||||
cy.wait(['@getExecution']);
|
||||
cy.reload();
|
||||
|
||||
cy.wait(['@getExecutions']);
|
||||
executionsTab.getters.executionListItems().eq(14).should('not.be.visible');
|
||||
executionsTab.getters.executionListItems().should('have.length', 24);
|
||||
executionsTab.getters.executionListItems().first().should('not.be.visible');
|
||||
cy.getByTestId('current-executions-list').scrollTo(0, 0);
|
||||
executionsTab.getters.executionListItems().first().should('be.visible');
|
||||
executionsTab.getters.executionListItems().eq(14).should('not.be.visible');
|
||||
|
||||
executionsTab.actions.switchToEditorTab();
|
||||
executionsTab.actions.switchToExecutionsTab();
|
||||
|
||||
cy.wait(['@getExecutions']);
|
||||
executionsTab.getters.executionListItems().eq(14).should('not.be.visible');
|
||||
executionsTab.getters.executionListItems().should('have.length', 24);
|
||||
executionsTab.getters.executionListItems().first().should('not.be.visible');
|
||||
cy.getByTestId('current-executions-list').scrollTo(0, 0);
|
||||
executionsTab.getters.executionListItems().first().should('be.visible');
|
||||
executionsTab.getters.executionListItems().eq(14).should('not.be.visible');
|
||||
});
|
||||
});
|
||||
|
||||
const createMockExecutions = () => {
|
||||
|
|
|
@ -1,118 +0,0 @@
|
|||
import { WorkflowsPage as WorkflowsPageClass } from '../pages/workflows';
|
||||
import { WorkflowPage as WorkflowPageClass } from '../pages/workflow';
|
||||
|
||||
import { MainSidebar } from '../pages';
|
||||
import { INSTANCE_OWNER } from '../constants';
|
||||
|
||||
const WorkflowsPage = new WorkflowsPageClass();
|
||||
const WorkflowPages = new WorkflowPageClass();
|
||||
const mainSidebar = new MainSidebar();
|
||||
|
||||
describe.skip('Workflow filters', () => {
|
||||
before(() => {
|
||||
cy.enableFeature('sharing', true);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit(WorkflowsPage.url);
|
||||
});
|
||||
|
||||
it('Should filter by tags', () => {
|
||||
cy.visit(WorkflowsPage.url);
|
||||
WorkflowsPage.getters.newWorkflowButtonCard().click();
|
||||
cy.createFixtureWorkflow('Test_workflow_1.json', `Workflow 1`);
|
||||
cy.visit(WorkflowsPage.url);
|
||||
WorkflowsPage.getters.createWorkflowButton().click();
|
||||
cy.createFixtureWorkflow('Test_workflow_2.json', `Workflow 2`);
|
||||
cy.visit(WorkflowsPage.url);
|
||||
|
||||
WorkflowsPage.getters.workflowFilterButton().click();
|
||||
WorkflowsPage.getters.workflowTagsDropdown().click();
|
||||
WorkflowsPage.getters.workflowTagItem('other-tag-1').click();
|
||||
cy.get('body').click(0, 0);
|
||||
|
||||
WorkflowsPage.getters.workflowCards().should('have.length', 1);
|
||||
WorkflowsPage.getters.workflowCard('Workflow 2').should('contain.text', 'Workflow 2');
|
||||
mainSidebar.actions.goToSettings();
|
||||
cy.go('back');
|
||||
|
||||
WorkflowsPage.getters.workflowCards().should('have.length', 1);
|
||||
WorkflowsPage.getters.workflowCard('Workflow 2').should('contain.text', 'Workflow 2');
|
||||
WorkflowsPage.getters.workflowResetFilters().click();
|
||||
|
||||
WorkflowsPage.getters.workflowCards().each(($el) => {
|
||||
const workflowName = $el.find('[data-test-id="workflow-card-name"]').text();
|
||||
|
||||
WorkflowsPage.getters.workflowCardActions(workflowName).click();
|
||||
WorkflowsPage.getters.workflowDeleteButton().click();
|
||||
|
||||
cy.get('button').contains('delete').click();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should filter by status', () => {
|
||||
cy.visit(WorkflowsPage.url);
|
||||
WorkflowsPage.getters.newWorkflowButtonCard().click();
|
||||
cy.createFixtureWorkflow('Test_workflow_1.json', `Workflow 1`);
|
||||
cy.visit(WorkflowsPage.url);
|
||||
WorkflowsPage.getters.createWorkflowButton().click();
|
||||
cy.createFixtureWorkflow('Test_workflow_3.json', `Workflow 3`);
|
||||
WorkflowPages.getters.activatorSwitch().click();
|
||||
cy.visit(WorkflowsPage.url);
|
||||
|
||||
WorkflowsPage.getters.workflowFilterButton().click();
|
||||
WorkflowsPage.getters.workflowStatusDropdown().click();
|
||||
WorkflowsPage.getters.workflowStatusItem('Active').click();
|
||||
cy.get('body').click(0, 0);
|
||||
|
||||
WorkflowsPage.getters.workflowCards().should('have.length', 1);
|
||||
WorkflowsPage.getters.workflowCard('Workflow 3').should('contain.text', 'Workflow 3');
|
||||
mainSidebar.actions.goToSettings();
|
||||
cy.go('back');
|
||||
|
||||
WorkflowsPage.getters.workflowCards().should('have.length', 1);
|
||||
WorkflowsPage.getters.workflowCard('Workflow 3').should('contain.text', 'Workflow 3');
|
||||
WorkflowsPage.getters.workflowResetFilters().click();
|
||||
|
||||
WorkflowsPage.getters.workflowCards().each(($el) => {
|
||||
const workflowName = $el.find('[data-test-id="workflow-card-name"]').text();
|
||||
|
||||
WorkflowsPage.getters.workflowCardActions(workflowName).click();
|
||||
WorkflowsPage.getters.workflowDeleteButton().click();
|
||||
|
||||
cy.get('button').contains('delete').click();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should filter by owned by', () => {
|
||||
cy.visit(WorkflowsPage.url);
|
||||
|
||||
WorkflowsPage.getters.newWorkflowButtonCard().click();
|
||||
cy.createFixtureWorkflow('Test_workflow_1.json', `Workflow 1`);
|
||||
cy.visit(WorkflowsPage.url);
|
||||
WorkflowsPage.getters.createWorkflowButton().click();
|
||||
cy.createFixtureWorkflow('Test_workflow_3.json', `Workflow 3`);
|
||||
WorkflowPages.getters.activatorSwitch().click();
|
||||
cy.visit(WorkflowsPage.url);
|
||||
|
||||
WorkflowsPage.getters.workflowFilterButton().click();
|
||||
WorkflowsPage.getters.workflowOwnershipDropdown().realClick();
|
||||
WorkflowsPage.getters.workflowOwner(INSTANCE_OWNER.email).click();
|
||||
cy.get('body').click(0, 0);
|
||||
|
||||
WorkflowsPage.getters.workflowCards().should('have.length', 2);
|
||||
mainSidebar.actions.goToSettings();
|
||||
cy.go('back');
|
||||
|
||||
WorkflowsPage.getters.workflowResetFilters().click();
|
||||
|
||||
WorkflowsPage.getters.workflowCards().each(($el) => {
|
||||
const workflowName = $el.find('[data-test-id="workflow-card-name"]').text();
|
||||
|
||||
WorkflowsPage.getters.workflowCardActions(workflowName).click();
|
||||
WorkflowsPage.getters.workflowDeleteButton().click();
|
||||
|
||||
cy.get('button').contains('delete').click();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,147 +0,0 @@
|
|||
import { TemplatesPage } from '../pages/templates';
|
||||
import { WorkflowPage } from '../pages/workflow';
|
||||
import { TemplateWorkflowPage } from '../pages/template-workflow';
|
||||
import OnboardingWorkflow from '../fixtures/Onboarding_workflow.json';
|
||||
import WorkflowTemplate from '../fixtures/Workflow_template_write_http_query.json';
|
||||
|
||||
const templatesPage = new TemplatesPage();
|
||||
const workflowPage = new WorkflowPage();
|
||||
const templateWorkflowPage = new TemplateWorkflowPage();
|
||||
|
||||
|
||||
describe.skip('In-app templates repository', () => {
|
||||
beforeEach(() => {
|
||||
cy.intercept('GET', '**/api/templates/search?page=1&rows=20&category=&search=', { fixture: 'templates_search/all_templates_search_response.json' }).as('searchRequest');
|
||||
cy.intercept('GET', '**/api/templates/search?page=1&rows=20&category=Sales*', { fixture: 'templates_search/sales_templates_search_response.json' }).as('categorySearchRequest');
|
||||
cy.intercept('GET', '**/api/templates/workflows/*', { fixture: 'templates_search/test_template_preview.json' }).as('singleTemplateRequest');
|
||||
cy.intercept('GET', '**/api/workflows/templates/*', { fixture: 'templates_search/test_template_import.json' }).as('singleTemplateRequest');
|
||||
cy.intercept('GET', '**/rest/settings', (req) => {
|
||||
// Disable cache
|
||||
delete req.headers['if-none-match']
|
||||
req.reply((res) => {
|
||||
if (res.body.data) {
|
||||
// Enable in-app templates by setting a custom host
|
||||
res.body.data.templates = { enabled: true, host: 'https://api-staging.n8n.io/api/' };
|
||||
}
|
||||
});
|
||||
}).as('settingsRequest');
|
||||
});
|
||||
|
||||
it('can open onboarding flow', () => {
|
||||
templatesPage.actions.openOnboardingFlow(1, OnboardingWorkflow.name, OnboardingWorkflow, 'https://api-staging.n8n.io');
|
||||
cy.url().then(($url) => {
|
||||
expect($url).to.match(/.*\/workflow\/.*?onboardingId=1$/);
|
||||
})
|
||||
|
||||
workflowPage.actions.shouldHaveWorkflowName(`Demo: ${name}`);
|
||||
|
||||
workflowPage.getters.canvasNodes().should('have.length', 4);
|
||||
workflowPage.getters.stickies().should('have.length', 1);
|
||||
workflowPage.getters.canvasNodes().first().should('have.descendants', '.node-pin-data-icon');
|
||||
});
|
||||
|
||||
it('can import template', () => {
|
||||
templatesPage.actions.importTemplate(1, OnboardingWorkflow.name, OnboardingWorkflow, 'https://api-staging.n8n.io');
|
||||
|
||||
cy.url().then(($url) => {
|
||||
expect($url).to.include('/workflow/new?templateId=1');
|
||||
});
|
||||
|
||||
workflowPage.getters.canvasNodes().should('have.length', 4);
|
||||
workflowPage.getters.stickies().should('have.length', 1);
|
||||
workflowPage.actions.shouldHaveWorkflowName(OnboardingWorkflow.name);
|
||||
});
|
||||
|
||||
it('should save template id with the workflow', () => {
|
||||
cy.visit(templatesPage.url);
|
||||
cy.get('.el-skeleton.n8n-loading').should('not.exist');
|
||||
templatesPage.getters.firstTemplateCard().should('exist');
|
||||
templatesPage.getters.templatesLoadingContainer().should('not.exist');
|
||||
templatesPage.getters.firstTemplateCard().click();
|
||||
cy.url().should('include', '/templates/');
|
||||
|
||||
cy.url().then(($url) => {
|
||||
const templateId = $url.split('/').pop();
|
||||
|
||||
templatesPage.getters.useTemplateButton().click();
|
||||
cy.url().should('include', '/workflow/new');
|
||||
workflowPage.actions.saveWorkflowOnButtonClick();
|
||||
|
||||
workflowPage.actions.selectAll();
|
||||
workflowPage.actions.hitCopy();
|
||||
|
||||
cy.grantBrowserPermissions('clipboardReadWrite', 'clipboardSanitizedWrite');
|
||||
// Check workflow JSON by copying it to clipboard
|
||||
cy.readClipboard().then((workflowJSON) => {
|
||||
expect(workflowJSON).to.contain(`"templateId": "${templateId}"`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('can open template with images and hides workflow screenshots', () => {
|
||||
templateWorkflowPage.actions.openTemplate(WorkflowTemplate, 'https://api-staging.n8n.io');
|
||||
|
||||
templateWorkflowPage.getters.description().find('img').should('have.length', 1);
|
||||
});
|
||||
|
||||
|
||||
it('renders search elements correctly', () => {
|
||||
cy.visit(templatesPage.url);
|
||||
templatesPage.getters.searchInput().should('exist');
|
||||
templatesPage.getters.allCategoriesFilter().should('exist');
|
||||
templatesPage.getters.categoryFilters().should('have.length.greaterThan', 1);
|
||||
templatesPage.getters.templateCards().should('have.length.greaterThan', 0);
|
||||
});
|
||||
|
||||
it('can filter templates by category', () => {
|
||||
cy.visit(templatesPage.url);
|
||||
templatesPage.getters.templatesLoadingContainer().should('not.exist');
|
||||
templatesPage.getters.categoryFilter('sales').should('exist');
|
||||
let initialTemplateCount = 0;
|
||||
let initialCollectionCount = 0;
|
||||
|
||||
templatesPage.getters.templateCountLabel().then(($el) => {
|
||||
initialTemplateCount = parseInt($el.text().replace(/\D/g, ''), 10);
|
||||
templatesPage.getters.collectionCountLabel().then(($el) => {
|
||||
initialCollectionCount = parseInt($el.text().replace(/\D/g, ''), 10);
|
||||
|
||||
templatesPage.getters.categoryFilter('sales').click();
|
||||
templatesPage.getters.templatesLoadingContainer().should('not.exist');
|
||||
|
||||
// Should have less templates and collections after selecting a category
|
||||
templatesPage.getters.templateCountLabel().should(($el) => {
|
||||
expect(parseInt($el.text().replace(/\D/g, ''), 10)).to.be.lessThan(initialTemplateCount);
|
||||
});
|
||||
templatesPage.getters.collectionCountLabel().should(($el) => {
|
||||
expect(parseInt($el.text().replace(/\D/g, ''), 10)).to.be.lessThan(initialCollectionCount);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should preserve search query in URL', () => {
|
||||
cy.visit(templatesPage.url);
|
||||
templatesPage.getters.templatesLoadingContainer().should('not.exist');
|
||||
templatesPage.getters.categoryFilter('sales').should('exist');
|
||||
templatesPage.getters.categoryFilter('sales').click();
|
||||
templatesPage.getters.searchInput().type('auto');
|
||||
|
||||
cy.url().should('include', '?categories=');
|
||||
cy.url().should('include', '&search=');
|
||||
|
||||
cy.reload();
|
||||
|
||||
// Should preserve search query in URL
|
||||
cy.url().should('include', '?categories=');
|
||||
cy.url().should('include', '&search=');
|
||||
|
||||
// Sales category should still be selected
|
||||
templatesPage.getters.categoryFilter('sales').find('label').should('have.class', 'is-checked');
|
||||
// Search input should still have the search query
|
||||
templatesPage.getters.searchInput().should('have.value', 'auto');
|
||||
// Sales checkbox should be pushed to the top
|
||||
templatesPage.getters.categoryFilters().eq(1).then(($el) => {
|
||||
expect($el.text()).to.equal('Sales');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,22 +1,30 @@
|
|||
import { INSTANCE_ADMIN, INSTANCE_MEMBERS } from '../constants';
|
||||
import { WorkflowsPage, WorkflowPage, CredentialsModal, CredentialsPage } from '../pages';
|
||||
import {
|
||||
WorkflowsPage,
|
||||
WorkflowPage,
|
||||
CredentialsModal,
|
||||
CredentialsPage,
|
||||
WorkflowExecutionsTab,
|
||||
} from '../pages';
|
||||
import * as projects from '../composables/projects';
|
||||
|
||||
const workflowsPage = new WorkflowsPage();
|
||||
const workflowPage = new WorkflowPage();
|
||||
const credentialsPage = new CredentialsPage();
|
||||
const credentialsModal = new CredentialsModal();
|
||||
const executionsTab = new WorkflowExecutionsTab();
|
||||
|
||||
describe('Projects', () => {
|
||||
beforeEach(() => {
|
||||
before(() => {
|
||||
cy.resetDatabase();
|
||||
cy.enableFeature('sharing');
|
||||
cy.enableFeature('advancedPermissions');
|
||||
cy.enableFeature('projectRole:admin');
|
||||
cy.enableFeature('projectRole:editor');
|
||||
cy.changeQuota('maxTeamProjects', -1);
|
||||
});
|
||||
|
||||
it('should handle workflows and credentials', () => {
|
||||
it('should handle workflows and credentials and menu items', () => {
|
||||
cy.signin(INSTANCE_ADMIN);
|
||||
cy.visit(workflowsPage.url);
|
||||
workflowsPage.getters.workflowCards().should('not.have.length');
|
||||
|
@ -126,9 +134,9 @@ describe('Projects', () => {
|
|||
});
|
||||
|
||||
projects.getMenuItems().last().click();
|
||||
cy.intercept('GET', '/rest/credentials*').as('credentialsList');
|
||||
cy.intercept('GET', '/rest/credentials*').as('credentialsListProjectId');
|
||||
projects.getProjectTabCredentials().click();
|
||||
cy.wait('@credentialsList').then((interception) => {
|
||||
cy.wait('@credentialsListProjectId').then((interception) => {
|
||||
const url = new URL(interception.request.url);
|
||||
const queryParams = new URLSearchParams(url.search);
|
||||
const filter = queryParams.get('filter');
|
||||
|
@ -142,10 +150,82 @@ describe('Projects', () => {
|
|||
projects.getHomeButton().click();
|
||||
workflowsPage.getters.workflowCards().should('have.length', 2);
|
||||
|
||||
cy.intercept('GET', '/rest/credentials*').as('credentialsList');
|
||||
cy.intercept('GET', '/rest/credentials*').as('credentialsListFilterless');
|
||||
projects.getProjectTabCredentials().click();
|
||||
cy.wait('@credentialsList').then((interception) => {
|
||||
cy.wait('@credentialsListFilterless').then((interception) => {
|
||||
expect(interception.request.url).not.to.contain('filter');
|
||||
});
|
||||
|
||||
let menuItems = cy.getByTestId('menu-item');
|
||||
|
||||
menuItems.filter('[class*=active_]').should('have.length', 1);
|
||||
menuItems.filter(':contains("Home")[class*=active_]').should('exist');
|
||||
|
||||
projects.getMenuItems().first().click();
|
||||
|
||||
menuItems = cy.getByTestId('menu-item');
|
||||
|
||||
menuItems.filter('[class*=active_]').should('have.length', 1);
|
||||
menuItems.filter(':contains("Development")[class*=active_]').should('exist');
|
||||
|
||||
cy.intercept('GET', '/rest/workflows/*').as('loadWorkflow');
|
||||
workflowsPage.getters.workflowCards().first().click();
|
||||
|
||||
cy.wait('@loadWorkflow');
|
||||
menuItems = cy.getByTestId('menu-item');
|
||||
|
||||
menuItems.filter('[class*=active_]').should('have.length', 1);
|
||||
menuItems.filter(':contains("Development")[class*=active_]').should('exist');
|
||||
|
||||
cy.intercept('GET', '/rest/executions*').as('loadExecutions');
|
||||
executionsTab.actions.switchToExecutionsTab();
|
||||
|
||||
cy.wait('@loadExecutions');
|
||||
menuItems = cy.getByTestId('menu-item');
|
||||
|
||||
menuItems.filter('[class*=active_]').should('have.length', 1);
|
||||
menuItems.filter(':contains("Development")[class*=active_]').should('exist');
|
||||
|
||||
executionsTab.actions.switchToEditorTab();
|
||||
|
||||
menuItems = cy.getByTestId('menu-item');
|
||||
|
||||
menuItems.filter('[class*=active_]').should('have.length', 1);
|
||||
menuItems.filter(':contains("Development")[class*=active_]').should('exist');
|
||||
|
||||
cy.getByTestId('menu-item').filter(':contains("Variables")').click();
|
||||
cy.getByTestId('unavailable-resources-list').should('be.visible');
|
||||
|
||||
menuItems = cy.getByTestId('menu-item');
|
||||
|
||||
menuItems.filter('[class*=active_]').should('have.length', 1);
|
||||
menuItems.filter(':contains("Variables")[class*=active_]').should('exist');
|
||||
|
||||
projects.getHomeButton().click();
|
||||
menuItems = cy.getByTestId('menu-item');
|
||||
|
||||
menuItems.filter('[class*=active_]').should('have.length', 1);
|
||||
menuItems.filter(':contains("Home")[class*=active_]').should('exist');
|
||||
|
||||
workflowsPage.getters.workflowCards().should('have.length', 2).first().click();
|
||||
|
||||
cy.wait('@loadWorkflow');
|
||||
cy.getByTestId('execute-workflow-button').should('be.visible');
|
||||
|
||||
menuItems = cy.getByTestId('menu-item');
|
||||
menuItems.filter(':contains("Home")[class*=active_]').should('not.exist');
|
||||
|
||||
menuItems = cy.getByTestId('menu-item');
|
||||
menuItems.filter('[class*=active_]').should('have.length', 1);
|
||||
menuItems.filter(':contains("Development")[class*=active_]').should('exist');
|
||||
});
|
||||
|
||||
it('should not show project add button and projects to a member if not invited to any project', () => {
|
||||
cy.signout();
|
||||
cy.signin(INSTANCE_MEMBERS[1]);
|
||||
cy.visit(workflowsPage.url);
|
||||
|
||||
projects.getAddProjectButton().should('not.exist');
|
||||
projects.getMenuItems().should('not.exist');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -24,6 +24,14 @@ describe('NDV', () => {
|
|||
ndv.getters.container().should('not.be.visible');
|
||||
});
|
||||
|
||||
it('should show input panel when node is not connected', () => {
|
||||
workflowPage.actions.addInitialNodeToCanvas('Manual');
|
||||
workflowPage.actions.deselectAll();
|
||||
workflowPage.actions.addNodeToCanvas('Set');
|
||||
workflowPage.getters.canvasNodes().last().dblclick();
|
||||
ndv.getters.container().should('be.visible').should('contain', 'Wire me up');
|
||||
});
|
||||
|
||||
it('should test webhook node', () => {
|
||||
workflowPage.actions.addInitialNodeToCanvas('Webhook');
|
||||
workflowPage.getters.canvasNodes().first().dblclick();
|
||||
|
@ -105,13 +113,26 @@ describe('NDV', () => {
|
|||
});
|
||||
|
||||
it('should show all validation errors when opening pasted node', () => {
|
||||
cy.fixture('Test_workflow_ndv_errors.json').then((data) => {
|
||||
cy.get('body').paste(JSON.stringify(data));
|
||||
cy.createFixtureWorkflow('Test_workflow_ndv_errors.json', 'Validation errors');
|
||||
workflowPage.getters.canvasNodes().should('have.have.length', 1);
|
||||
workflowPage.actions.openNode('Airtable');
|
||||
cy.get('.has-issues').should('have.length', 3);
|
||||
cy.get('[class*=hasIssues]').should('have.length', 1);
|
||||
});
|
||||
|
||||
it('should render run errors correctly', () => {
|
||||
cy.createFixtureWorkflow('Test_workflow_ndv_run_error.json', 'Run error');
|
||||
workflowPage.actions.openNode('Error');
|
||||
ndv.actions.execute();
|
||||
ndv.getters
|
||||
.nodeRunErrorMessage()
|
||||
.should('have.text', 'Info for expression missing from previous node');
|
||||
ndv.getters
|
||||
.nodeRunErrorDescription()
|
||||
.should(
|
||||
'contains.text',
|
||||
"An expression here won't work because it uses .item and n8n can't figure out the matching item.",
|
||||
);
|
||||
});
|
||||
|
||||
it('should save workflow using keyboard shortcut from NDV', () => {
|
||||
|
@ -395,7 +416,11 @@ describe('NDV', () => {
|
|||
});
|
||||
|
||||
it('should not retrieve remote options when a parameter value changes', () => {
|
||||
cy.intercept('/rest/dynamic-node-parameters/options?**', cy.spy().as('fetchParameterOptions'));
|
||||
cy.intercept(
|
||||
'POST',
|
||||
'/rest/dynamic-node-parameters/options',
|
||||
cy.spy().as('fetchParameterOptions'),
|
||||
);
|
||||
workflowPage.actions.addInitialNodeToCanvas('E2e Test', { action: 'Remote Options' });
|
||||
// Type something into the field
|
||||
ndv.actions.typeIntoParameterInput('otherField', 'test');
|
||||
|
|
162
cypress/fixtures/Test_workflow_ndv_run_error.json
Normal file
162
cypress/fixtures/Test_workflow_ndv_run_error.json
Normal file
|
@ -0,0 +1,162 @@
|
|||
{
|
||||
"name": "My workflow 52",
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {
|
||||
"jsCode": "\nreturn [\n {\n \"field\": \"the same\"\n }\n];"
|
||||
},
|
||||
"id": "38c14c4a-7af1-4b04-be76-f8e474c95569",
|
||||
"name": "Break pairedItem chain",
|
||||
"type": "n8n-nodes-base.code",
|
||||
"typeVersion": 2,
|
||||
"position": [
|
||||
240,
|
||||
1020
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "78c4964a-c4e8-47e5-81f3-89ba778feb8b",
|
||||
"name": "Edit Fields",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [
|
||||
40,
|
||||
1020
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {},
|
||||
"id": "4f4c6527-d565-448a-96bd-8f5414caf8cc",
|
||||
"name": "When clicking \"Test workflow\"",
|
||||
"type": "n8n-nodes-base.manualTrigger",
|
||||
"typeVersion": 1,
|
||||
"position": [
|
||||
-180,
|
||||
1020
|
||||
]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"fields": {
|
||||
"values": [
|
||||
{
|
||||
"stringValue": "={{ $('Edit Fields').item.json.name }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "44f4e5da-bfe9-4dc3-8d1f-f38e9f364754",
|
||||
"name": "Error",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 3.2,
|
||||
"position": [
|
||||
460,
|
||||
1020
|
||||
]
|
||||
}
|
||||
],
|
||||
"pinData": {
|
||||
"Edit Fields": [
|
||||
{
|
||||
"json": {
|
||||
"id": "23423532",
|
||||
"name": "Jay Gatsby",
|
||||
"email": "gatsby@west-egg.com",
|
||||
"notes": "Keeps asking about a green light??",
|
||||
"country": "US",
|
||||
"created": "1925-04-10"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423533",
|
||||
"name": "José Arcadio Buendía",
|
||||
"email": "jab@macondo.co",
|
||||
"notes": "Lots of people named after him. Very confusing",
|
||||
"country": "CO",
|
||||
"created": "1967-05-05"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423534",
|
||||
"name": "Max Sendak",
|
||||
"email": "info@in-and-out-of-weeks.org",
|
||||
"notes": "Keeps rolling his terrible eyes",
|
||||
"country": "US",
|
||||
"created": "1963-04-09"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423535",
|
||||
"name": "Zaphod Beeblebrox",
|
||||
"email": "captain@heartofgold.com",
|
||||
"notes": "Felt like I was talking to more than one person",
|
||||
"country": null,
|
||||
"created": "1979-10-12"
|
||||
}
|
||||
},
|
||||
{
|
||||
"json": {
|
||||
"id": "23423536",
|
||||
"name": "Edmund Pevensie",
|
||||
"email": "edmund@narnia.gov",
|
||||
"notes": "Passionate sailor",
|
||||
"country": "UK",
|
||||
"created": "1950-10-16"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"connections": {
|
||||
"Break pairedItem chain": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Error",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Edit Fields": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Break pairedItem chain",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"When clicking \"Test workflow\"": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Edit Fields",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"active": false,
|
||||
"settings": {
|
||||
"executionOrder": "v1"
|
||||
},
|
||||
"versionId": "ca53267f-4eb4-481d-9e09-ecb97f6b09e2",
|
||||
"meta": {
|
||||
"templateCredsSetupCompleted": true,
|
||||
"instanceId": "27cc9b56542ad45b38725555722c50a1c3fee1670bbb67980558314ee08517c4"
|
||||
},
|
||||
"id": "6fr8GiRyMlZCiDQW",
|
||||
"tags": []
|
||||
}
|
|
@ -124,6 +124,8 @@ export class NDV extends BasePage {
|
|||
codeEditorFullscreen: () => this.getters.codeEditorDialog().find('.cm-content'),
|
||||
nodeRunSuccessIndicator: () => cy.getByTestId('node-run-info-success'),
|
||||
nodeRunErrorIndicator: () => cy.getByTestId('node-run-info-danger'),
|
||||
nodeRunErrorMessage: () => cy.getByTestId('node-error-message'),
|
||||
nodeRunErrorDescription: () => cy.getByTestId('node-error-description'),
|
||||
};
|
||||
|
||||
actions = {
|
||||
|
|
|
@ -7,6 +7,7 @@ export class MainSidebar extends BasePage {
|
|||
getters = {
|
||||
menuItem: (id: string) => cy.getByTestId('menu-item').get('#' + id),
|
||||
settings: () => this.getters.menuItem('settings'),
|
||||
settingsBack: () => cy.getByTestId('settings-back'),
|
||||
templates: () => this.getters.menuItem('templates'),
|
||||
workflows: () => this.getters.menuItem('workflows'),
|
||||
credentials: () => this.getters.menuItem('credentials'),
|
||||
|
@ -30,8 +31,8 @@ export class MainSidebar extends BasePage {
|
|||
openUserMenu: () => {
|
||||
this.getters.userMenu().click();
|
||||
},
|
||||
openUserMenu: () => {
|
||||
this.getters.userMenu().click();
|
||||
closeSettings: () => {
|
||||
this.getters.settingsBack().click();
|
||||
},
|
||||
signout: () => {
|
||||
const workflowsPage = new WorkflowsPage();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
ARG NODE_VERSION=18
|
||||
ARG NODE_VERSION=20
|
||||
|
||||
# 1. Use a builder step to download various dependencies
|
||||
FROM node:${NODE_VERSION}-alpine as builder
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
ARG NODE_VERSION=18
|
||||
ARG NODE_VERSION=20
|
||||
|
||||
# 1. Create an image to build n8n
|
||||
FROM --platform=linux/amd64 n8nio/base:${NODE_VERSION} as builder
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
ARG NODE_VERSION=18
|
||||
ARG NODE_VERSION=20
|
||||
FROM n8nio/base:${NODE_VERSION}
|
||||
|
||||
ARG N8N_VERSION
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n-monorepo",
|
||||
"version": "1.42.0",
|
||||
"version": "1.43.0",
|
||||
"private": true,
|
||||
"homepage": "https://n8n.io",
|
||||
"engines": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@n8n/chat",
|
||||
"version": "0.14.0",
|
||||
"version": "0.15.0",
|
||||
"scripts": {
|
||||
"dev": "pnpm run storybook",
|
||||
"build": "pnpm type-check && pnpm build:vite && pnpm run build:individual && npm run build:prepare",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@n8n/client-oauth2",
|
||||
"version": "0.14.0",
|
||||
"version": "0.15.0",
|
||||
"scripts": {
|
||||
"clean": "rimraf dist .turbo",
|
||||
"dev": "pnpm watch",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@n8n/imap",
|
||||
"version": "0.2.0",
|
||||
"version": "0.3.0",
|
||||
"scripts": {
|
||||
"clean": "rimraf dist .turbo",
|
||||
"dev": "pnpm watch",
|
||||
|
|
|
@ -149,5 +149,11 @@ module.exports = {
|
|||
'n8n-nodes-base/node-param-type-options-password-missing': 'error',
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['**/*.test.ts', '**/test/**/*.ts'],
|
||||
rules: {
|
||||
'import/no-extraneous-dependencies': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
@ -46,7 +46,7 @@ function getInputs(
|
|||
[NodeConnectionType.AiOutputParser]: 'Output Parser',
|
||||
};
|
||||
|
||||
return inputs.map(({ type, filter, required }) => {
|
||||
return inputs.map(({ type, filter }) => {
|
||||
const input: INodeInputConfiguration = {
|
||||
type,
|
||||
displayName: type in displayNames ? displayNames[type] : undefined,
|
||||
|
@ -370,13 +370,13 @@ export class Agent implements INodeType {
|
|||
if (agentType === 'conversationalAgent') {
|
||||
return await conversationalAgentExecute.call(this, nodeVersion);
|
||||
} else if (agentType === 'toolsAgent') {
|
||||
return await toolsAgentExecute.call(this, nodeVersion);
|
||||
return await toolsAgentExecute.call(this);
|
||||
} else if (agentType === 'openAiFunctionsAgent') {
|
||||
return await openAiFunctionsAgentExecute.call(this, nodeVersion);
|
||||
} else if (agentType === 'reActAgent') {
|
||||
return await reActAgentAgentExecute.call(this, nodeVersion);
|
||||
} else if (agentType === 'sqlAgent') {
|
||||
return await sqlAgentAgentExecute.call(this, nodeVersion);
|
||||
return await sqlAgentAgentExecute.call(this);
|
||||
} else if (agentType === 'planAndExecuteAgent') {
|
||||
return await planAndExecuteAgentExecute.call(this, nodeVersion);
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
getConnectedTools,
|
||||
} from '../../../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../../../utils/tracing';
|
||||
import { throwIfToolSchema } from '../../../../../utils/schemaParsing';
|
||||
|
||||
export async function conversationalAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
|
@ -111,6 +112,8 @@ export async function conversationalAgentExecute(
|
|||
|
||||
returnData.push({ json: response });
|
||||
} catch (error) {
|
||||
throwIfToolSchema(this, error);
|
||||
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
|
||||
continue;
|
||||
|
@ -120,5 +123,5 @@ export async function conversationalAgentExecute(
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
|
|
|
@ -125,5 +125,5 @@ export async function openAiFunctionsAgentExecute(
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
getPromptInputByType,
|
||||
} from '../../../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../../../utils/tracing';
|
||||
import { throwIfToolSchema } from '../../../../../utils/schemaParsing';
|
||||
|
||||
export async function planAndExecuteAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
|
@ -91,6 +92,7 @@ export async function planAndExecuteAgentExecute(
|
|||
|
||||
returnData.push({ json: response });
|
||||
} catch (error) {
|
||||
throwIfToolSchema(this, error);
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
|
||||
continue;
|
||||
|
@ -100,5 +102,5 @@ export async function planAndExecuteAgentExecute(
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import {
|
|||
} from 'n8n-workflow';
|
||||
|
||||
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
|
||||
import type { BaseLanguageModel } from 'langchain/base_language';
|
||||
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
|
||||
import type { BaseOutputParser } from '@langchain/core/output_parsers';
|
||||
import { PromptTemplate } from '@langchain/core/prompts';
|
||||
import { CombiningOutputParser } from 'langchain/output_parsers';
|
||||
|
@ -18,6 +18,7 @@ import {
|
|||
isChatInstance,
|
||||
} from '../../../../../utils/helpers';
|
||||
import { getTracingConfig } from '../../../../../utils/tracing';
|
||||
import { throwIfToolSchema } from '../../../../../utils/schemaParsing';
|
||||
|
||||
export async function reActAgentAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
|
@ -112,6 +113,7 @@ export async function reActAgentAgentExecute(
|
|||
|
||||
returnData.push({ json: response });
|
||||
} catch (error) {
|
||||
throwIfToolSchema(this, error);
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
|
||||
continue;
|
||||
|
@ -121,5 +123,5 @@ export async function reActAgentAgentExecute(
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
|
|
|
@ -28,7 +28,6 @@ const parseTablesString = (tablesString: string) =>
|
|||
|
||||
export async function sqlAgentAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
nodeVersion: number,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing SQL Agent');
|
||||
|
||||
|
@ -152,5 +151,5 @@ export async function sqlAgentAgentExecute(
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
|
|
|
@ -39,10 +39,7 @@ function getOutputParserSchema(outputParser: BaseOutputParser): ZodObject<any, a
|
|||
return schema;
|
||||
}
|
||||
|
||||
export async function toolsAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
nodeVersion: number,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Tools Agent');
|
||||
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
|
||||
|
||||
|
@ -185,5 +182,5 @@ export async function toolsAgentExecute(
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
|
|
|
@ -392,6 +392,6 @@ export class OpenAiAssistant implements INodeType {
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -189,6 +189,6 @@ export class ChainRetrievalQa implements INodeType {
|
|||
throw error;
|
||||
}
|
||||
}
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -258,6 +258,6 @@ export class ChainSummarizationV1 implements INodeType {
|
|||
returnData.push({ json: { response } });
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,8 +11,8 @@ import type {
|
|||
import { loadSummarizationChain } from 'langchain/chains';
|
||||
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
|
||||
import type { Document } from '@langchain/core/documents';
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader';
|
||||
import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader';
|
||||
import { getTemplateNoticeField } from '../../../../utils/sharedFields';
|
||||
|
@ -425,6 +425,6 @@ export class ChainSummarizationV2 implements INodeType {
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
return [returnData];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,6 +92,8 @@ function getSandbox(
|
|||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.executeWorkflow = this.executeWorkflow;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.getWorkflowDataProxy = this.getWorkflowDataProxy;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.logger = this.logger;
|
||||
|
||||
if (options?.addItems) {
|
||||
|
|
|
@ -7,7 +7,7 @@ import {
|
|||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
|
||||
|
|
|
@ -7,7 +7,7 @@ import {
|
|||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
|
||||
import { metadataFilterField } from '../../../utils/sharedFields';
|
||||
|
|
|
@ -6,8 +6,8 @@ import {
|
|||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { GithubRepoLoader } from 'langchain/document_loaders/web/github';
|
||||
import type { CharacterTextSplitter } from 'langchain/text_splitter';
|
||||
import { GithubRepoLoader } from '@langchain/community/document_loaders/web/github';
|
||||
import type { CharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
|
@ -116,7 +116,9 @@ export class DocumentGithubLoader implements INodeType {
|
|||
accessToken: (credentials.accessToken as string) || '',
|
||||
});
|
||||
|
||||
const loadedDocs = textSplitter ? await docs.loadAndSplit(textSplitter) : await docs.load();
|
||||
const loadedDocs = textSplitter
|
||||
? await textSplitter.splitDocuments(await docs.load())
|
||||
: await docs.load();
|
||||
|
||||
return {
|
||||
response: logWrapper(loadedDocs, this),
|
||||
|
|
|
@ -7,7 +7,7 @@ import {
|
|||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
|
||||
import { getConnectionHintNoticeField, metadataFilterField } from '../../../utils/sharedFields';
|
||||
|
|
|
@ -98,7 +98,7 @@ export class MemoryChatRetriever implements INodeType {
|
|||
const messages = await memory?.chatHistory.getMessages();
|
||||
|
||||
if (simplifyOutput && messages) {
|
||||
return await this.prepareOutputData(simplifyMessages(messages));
|
||||
return [simplifyMessages(messages)];
|
||||
}
|
||||
|
||||
const serializedMessages =
|
||||
|
@ -107,6 +107,6 @@ export class MemoryChatRetriever implements INodeType {
|
|||
return { json: serializedMessage as unknown as IDataObject };
|
||||
}) ?? [];
|
||||
|
||||
return await this.prepareOutputData(serializedMessages);
|
||||
return [serializedMessages];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,11 +13,15 @@ import type { JSONSchema7 } from 'json-schema';
|
|||
import { StructuredOutputParser } from 'langchain/output_parsers';
|
||||
import { OutputParserException } from '@langchain/core/output_parsers';
|
||||
import get from 'lodash/get';
|
||||
import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
|
||||
import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox';
|
||||
import { makeResolverFromLegacyOptions } from '@n8n/vm2';
|
||||
import type { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { generateSchema, getSandboxWithZod } from '../../../utils/schemaParsing';
|
||||
import {
|
||||
inputSchemaField,
|
||||
jsonSchemaExampleField,
|
||||
schemaTypeField,
|
||||
} from '../../../utils/descriptions';
|
||||
|
||||
const STRUCTURED_OUTPUT_KEY = '__structured__output';
|
||||
const STRUCTURED_OUTPUT_OBJECT_KEY = '__structured__output__object';
|
||||
|
@ -87,8 +91,8 @@ export class OutputParserStructured implements INodeType {
|
|||
name: 'outputParserStructured',
|
||||
icon: 'fa:code',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1],
|
||||
defaultVersion: 1.1,
|
||||
version: [1, 1.1, 1.2],
|
||||
defaultVersion: 1.2,
|
||||
description: 'Return data in a defined JSON format',
|
||||
defaults: {
|
||||
name: 'Structured Output Parser',
|
||||
|
@ -115,6 +119,33 @@ export class OutputParserStructured implements INodeType {
|
|||
outputNames: ['Output Parser'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{ ...schemaTypeField, displayOptions: { show: { '@version': [{ _cnd: { gte: 1.2 } }] } } },
|
||||
{
|
||||
...jsonSchemaExampleField,
|
||||
default: `{
|
||||
"state": "California",
|
||||
"cities": ["Los Angeles", "San Francisco", "San Diego"]
|
||||
}`,
|
||||
},
|
||||
{
|
||||
...inputSchemaField,
|
||||
displayName: 'JSON Schema',
|
||||
description: 'JSON Schema to structure and validate the output against',
|
||||
default: `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"state": {
|
||||
"type": "string"
|
||||
},
|
||||
"cities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}`,
|
||||
},
|
||||
{
|
||||
displayName: 'JSON Schema',
|
||||
name: 'jsonSchema',
|
||||
|
@ -138,6 +169,11 @@ export class OutputParserStructured implements INodeType {
|
|||
rows: 10,
|
||||
},
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [{ _cnd: { lte: 1.1 } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName:
|
||||
|
@ -145,72 +181,36 @@ export class OutputParserStructured implements INodeType {
|
|||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
schemaType: ['fromJson'],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const schema = this.getNodeParameter('jsonSchema', itemIndex) as string;
|
||||
const schemaType = this.getNodeParameter('schemaType', itemIndex, '') as 'fromJson' | 'manual';
|
||||
// We initialize these even though one of them will always be empty
|
||||
// it makes it easer to navigate the ternary operator
|
||||
const jsonExample = this.getNodeParameter('jsonSchemaExample', itemIndex, '') as string;
|
||||
let inputSchema: string;
|
||||
|
||||
let itemSchema: JSONSchema7;
|
||||
try {
|
||||
itemSchema = jsonParse<JSONSchema7>(schema);
|
||||
|
||||
// If the type is not defined, we assume it's an object
|
||||
if (itemSchema.type === undefined) {
|
||||
itemSchema = {
|
||||
type: 'object',
|
||||
properties: itemSchema.properties ?? (itemSchema as { [key: string]: JSONSchema7 }),
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(this.getNode(), 'Error during parsing of JSON Schema.');
|
||||
if (this.getNode().typeVersion <= 1.1) {
|
||||
inputSchema = this.getNodeParameter('jsonSchema', itemIndex, '') as string;
|
||||
} else {
|
||||
inputSchema = this.getNodeParameter('inputSchema', itemIndex, '') as string;
|
||||
}
|
||||
|
||||
const vmResolver = makeResolverFromLegacyOptions({
|
||||
external: {
|
||||
modules: ['json-schema-to-zod', 'zod'],
|
||||
transitive: false,
|
||||
},
|
||||
resolve(moduleName, parentDirname) {
|
||||
if (moduleName === 'json-schema-to-zod') {
|
||||
return require.resolve(
|
||||
'@n8n/n8n-nodes-langchain/node_modules/json-schema-to-zod/dist/cjs/jsonSchemaToZod.js',
|
||||
{
|
||||
paths: [parentDirname],
|
||||
},
|
||||
);
|
||||
}
|
||||
if (moduleName === 'zod') {
|
||||
return require.resolve('@n8n/n8n-nodes-langchain/node_modules/zod.cjs', {
|
||||
paths: [parentDirname],
|
||||
});
|
||||
}
|
||||
return;
|
||||
},
|
||||
builtin: [],
|
||||
});
|
||||
const context = getSandboxContext.call(this, itemIndex);
|
||||
// Make sure to remove the description from root schema
|
||||
const { description, ...restOfSchema } = itemSchema;
|
||||
const sandboxedSchema = new JavaScriptSandbox(
|
||||
context,
|
||||
`
|
||||
const { z } = require('zod');
|
||||
const { parseSchema } = require('json-schema-to-zod');
|
||||
const zodSchema = parseSchema(${JSON.stringify(restOfSchema)});
|
||||
const itemSchema = new Function('z', 'return (' + zodSchema + ')')(z)
|
||||
return itemSchema
|
||||
`,
|
||||
itemIndex,
|
||||
this.helpers,
|
||||
{ resolver: vmResolver },
|
||||
);
|
||||
const jsonSchema =
|
||||
schemaType === 'fromJson' ? generateSchema(jsonExample) : jsonParse<JSONSchema7>(inputSchema);
|
||||
|
||||
const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0);
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
try {
|
||||
const parser = await N8nStructuredOutputParser.fromZedJsonSchema(
|
||||
sandboxedSchema,
|
||||
zodSchemaSandbox,
|
||||
nodeVersion,
|
||||
);
|
||||
return {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import type { IExecuteFunctions, IWorkflowDataProxyData } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INode, IWorkflowDataProxyData } from 'n8n-workflow';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { normalizeItems } from 'n8n-core';
|
||||
import type { z } from 'zod';
|
||||
|
@ -12,7 +12,7 @@ describe('OutputParserStructured', () => {
|
|||
});
|
||||
const workflowDataProxy = mock<IWorkflowDataProxyData>({ $input: mock() });
|
||||
thisArg.getWorkflowDataProxy.mockReturnValue(workflowDataProxy);
|
||||
thisArg.getNode.mockReturnValue({ typeVersion: 1.1 });
|
||||
thisArg.getNode.mockReturnValue(mock<INode>({ typeVersion: 1.1 }));
|
||||
thisArg.addInputData.mockReturnValue({ index: 0 });
|
||||
thisArg.addOutputData.mockReturnValue();
|
||||
|
||||
|
|
|
@ -6,8 +6,8 @@ import {
|
|||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { CharacterTextSplitterParams } from 'langchain/text_splitter';
|
||||
import { CharacterTextSplitter } from 'langchain/text_splitter';
|
||||
import type { CharacterTextSplitterParams } from '@langchain/textsplitters';
|
||||
import { CharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
|
|
|
@ -9,8 +9,8 @@ import {
|
|||
import type {
|
||||
RecursiveCharacterTextSplitterParams,
|
||||
SupportedTextSplitterLanguage,
|
||||
} from 'langchain/text_splitter';
|
||||
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
|
||||
} from '@langchain/textsplitters';
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ import {
|
|||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { TokenTextSplitter } from 'langchain/text_splitter';
|
||||
import { TokenTextSplitter } from '@langchain/textsplitters';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
|
|
|
@ -9,16 +9,23 @@ import type {
|
|||
ExecutionError,
|
||||
IDataObject,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
|
||||
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
|
||||
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
|
||||
|
||||
import { DynamicTool } from '@langchain/core/tools';
|
||||
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
|
||||
import get from 'lodash/get';
|
||||
import isObject from 'lodash/isObject';
|
||||
import type { CallbackManagerForToolRun } from '@langchain/core/callbacks/manager';
|
||||
import type { JSONSchema7 } from 'json-schema';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
import type { DynamicZodObject } from '../../../types/zod.types';
|
||||
import { generateSchema, getSandboxWithZod } from '../../../utils/schemaParsing';
|
||||
import {
|
||||
jsonSchemaExampleField,
|
||||
schemaTypeField,
|
||||
inputSchemaField,
|
||||
} from '../../../utils/descriptions';
|
||||
export class ToolWorkflow implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Custom n8n Workflow Tool',
|
||||
|
@ -314,6 +321,21 @@ export class ToolWorkflow implements INodeType {
|
|||
},
|
||||
],
|
||||
},
|
||||
// ----------------------------------
|
||||
// Output Parsing
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Specify Input Schema',
|
||||
name: 'specifyInputSchema',
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether to specify the schema for the function. This would require the LLM to provide the input in the correct format and would validate it against the schema.',
|
||||
noDataExpression: true,
|
||||
default: false,
|
||||
},
|
||||
{ ...schemaTypeField, displayOptions: { show: { specifyInputSchema: [true] } } },
|
||||
jsonSchemaExampleField,
|
||||
inputSchemaField,
|
||||
],
|
||||
};
|
||||
|
||||
|
@ -321,8 +343,11 @@ export class ToolWorkflow implements INodeType {
|
|||
const name = this.getNodeParameter('name', itemIndex) as string;
|
||||
const description = this.getNodeParameter('description', itemIndex) as string;
|
||||
|
||||
const useSchema = this.getNodeParameter('specifyInputSchema', itemIndex) as boolean;
|
||||
let tool: DynamicTool | DynamicStructuredTool | undefined = undefined;
|
||||
|
||||
const runFunction = async (
|
||||
query: string,
|
||||
query: string | IDataObject,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<string> => {
|
||||
const source = this.getNodeParameter('source', itemIndex) as string;
|
||||
|
@ -416,12 +441,10 @@ export class ToolWorkflow implements INodeType {
|
|||
return response;
|
||||
};
|
||||
|
||||
return {
|
||||
response: new DynamicTool({
|
||||
name,
|
||||
description,
|
||||
|
||||
func: async (query: string, runManager?: CallbackManagerForToolRun): Promise<string> => {
|
||||
const toolHandler = async (
|
||||
query: string | IDataObject,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<string> => {
|
||||
const { index } = this.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]);
|
||||
|
||||
let response: string = '';
|
||||
|
@ -458,8 +481,46 @@ export class ToolWorkflow implements INodeType {
|
|||
void this.addOutputData(NodeConnectionType.AiTool, index, [[{ json: { response } }]]);
|
||||
}
|
||||
return response;
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const functionBase = {
|
||||
name,
|
||||
description,
|
||||
func: toolHandler,
|
||||
};
|
||||
|
||||
if (useSchema) {
|
||||
try {
|
||||
// We initialize these even though one of them will always be empty
|
||||
// it makes it easer to navigate the ternary operator
|
||||
const jsonExample = this.getNodeParameter('jsonSchemaExample', itemIndex, '') as string;
|
||||
const inputSchema = this.getNodeParameter('inputSchema', itemIndex, '') as string;
|
||||
|
||||
const schemaType = this.getNodeParameter('schemaType', itemIndex) as 'fromJson' | 'manual';
|
||||
const jsonSchema =
|
||||
schemaType === 'fromJson'
|
||||
? generateSchema(jsonExample)
|
||||
: jsonParse<JSONSchema7>(inputSchema);
|
||||
|
||||
const zodSchemaSandbox = getSandboxWithZod(this, jsonSchema, 0);
|
||||
const zodSchema = (await zodSchemaSandbox.runCode()) as DynamicZodObject;
|
||||
|
||||
tool = new DynamicStructuredTool<typeof zodSchema>({
|
||||
schema: zodSchema,
|
||||
...functionBase,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'Error during parsing of JSON Schema. \n ' + error,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
tool = new DynamicTool(functionBase);
|
||||
}
|
||||
|
||||
return {
|
||||
response: tool,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -108,6 +108,6 @@ export class VectorStoreInMemoryInsert implements INodeType {
|
|||
clearStore,
|
||||
);
|
||||
|
||||
return await this.prepareOutputData(serializedDocuments);
|
||||
return [serializedDocuments];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -134,6 +134,6 @@ export class VectorStorePineconeInsert implements INodeType {
|
|||
pineconeIndex,
|
||||
});
|
||||
|
||||
return await this.prepareOutputData(serializedDocuments);
|
||||
return [serializedDocuments];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ export const VectorStoreQdrant = createVectorStoreNode({
|
|||
methods: { listSearch: { qdrantCollectionsSearch } },
|
||||
insertFields,
|
||||
sharedFields,
|
||||
async getVectorStoreClient(context, filter, embeddings, itemIndex) {
|
||||
async getVectorStoreClient(context, _, embeddings, itemIndex) {
|
||||
const collection = context.getNodeParameter('qdrantCollection', itemIndex, '', {
|
||||
extractValue: true,
|
||||
}) as string;
|
||||
|
|
|
@ -122,6 +122,6 @@ export class VectorStoreSupabaseInsert implements INodeType {
|
|||
queryName,
|
||||
});
|
||||
|
||||
return await this.prepareOutputData(serializedDocuments);
|
||||
return [serializedDocuments];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -139,6 +139,6 @@ export class VectorStoreZepInsert implements INodeType {
|
|||
|
||||
await ZepVectorStore.fromDocuments(processedDocuments, embeddings, zepConfig);
|
||||
|
||||
return await this.prepareOutputData(serializedDocuments);
|
||||
return [serializedDocuments];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -240,7 +240,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
|||
void logAiEvent(this, 'n8n.ai.vector.store.searched', { query: prompt });
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(resultData);
|
||||
return [resultData];
|
||||
}
|
||||
|
||||
if (mode === 'insert') {
|
||||
|
@ -270,7 +270,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
|||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(resultData);
|
||||
return [resultData];
|
||||
}
|
||||
|
||||
throw new NodeOperationError(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@n8n/n8n-nodes-langchain",
|
||||
"version": "1.42.0",
|
||||
"version": "1.43.0",
|
||||
"description": "",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -120,59 +120,59 @@
|
|||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@aws-sdk/types": "3.357.0",
|
||||
"@aws-sdk/types": "^3.535.0",
|
||||
"@types/basic-auth": "^1.1.3",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/html-to-text": "^9.0.1",
|
||||
"@types/json-schema": "^7.0.15",
|
||||
"@types/temp": "^0.9.1",
|
||||
"eslint-plugin-n8n-nodes-base": "^1.16.0",
|
||||
"n8n-core": "workspace:*"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-bedrock-runtime": "3.549.0",
|
||||
"@aws-sdk/credential-provider-node": "3.549.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "3.535.0",
|
||||
"@aws-sdk/credential-provider-node": "3.535.0",
|
||||
"@getzep/zep-js": "0.9.0",
|
||||
"@google-ai/generativelanguage": "0.2.1",
|
||||
"@google/generative-ai": "0.8.0",
|
||||
"@huggingface/inference": "2.6.4",
|
||||
"@langchain/anthropic": "^0.1.16",
|
||||
"@langchain/cohere": "^0.0.8",
|
||||
"@langchain/community": "0.0.53",
|
||||
"@langchain/core": "0.1.61",
|
||||
"@langchain/google-genai": "^0.0.12",
|
||||
"@langchain/groq": "^0.0.8",
|
||||
"@langchain/mistralai": "0.0.19",
|
||||
"@langchain/openai": "^0.0.28",
|
||||
"@langchain/pinecone": "^0.0.4",
|
||||
"@langchain/redis": "^0.0.2",
|
||||
"@google-ai/generativelanguage": "2.5.0",
|
||||
"@google/generative-ai": "0.11.4",
|
||||
"@huggingface/inference": "2.7.0",
|
||||
"@langchain/anthropic": "0.1.21",
|
||||
"@langchain/cohere": "0.0.10",
|
||||
"@langchain/community": "0.2.2",
|
||||
"@langchain/core": "0.2.0",
|
||||
"@langchain/google-genai": "0.0.16",
|
||||
"@langchain/groq": "0.0.12",
|
||||
"@langchain/mistralai": "0.0.22",
|
||||
"@langchain/openai": "0.0.33",
|
||||
"@langchain/pinecone": "0.0.6",
|
||||
"@langchain/redis": "0.0.5",
|
||||
"@langchain/textsplitters": "0.0.2",
|
||||
"@n8n/typeorm": "0.3.20-9",
|
||||
"@n8n/vm2": "3.9.20",
|
||||
"@pinecone-database/pinecone": "2.2.0",
|
||||
"@qdrant/js-client-rest": "1.7.0",
|
||||
"@supabase/supabase-js": "2.38.5",
|
||||
"@xata.io/client": "0.28.0",
|
||||
"@pinecone-database/pinecone": "2.2.1",
|
||||
"@qdrant/js-client-rest": "1.9.0",
|
||||
"@supabase/supabase-js": "2.43.4",
|
||||
"@xata.io/client": "0.28.4",
|
||||
"basic-auth": "2.0.1",
|
||||
"cohere-ai": "6.2.2",
|
||||
"cohere-ai": "7.10.1",
|
||||
"d3-dsv": "2.0.0",
|
||||
"epub2": "3.0.2",
|
||||
"form-data": "4.0.0",
|
||||
"generate-schema": "2.6.0",
|
||||
"html-to-text": "9.0.5",
|
||||
"jest-mock-extended": "^3.0.4",
|
||||
"json-schema-to-zod": "2.0.14",
|
||||
"langchain": "0.1.36",
|
||||
"json-schema-to-zod": "2.1.0",
|
||||
"langchain": "0.2.2",
|
||||
"lodash": "4.17.21",
|
||||
"mammoth": "1.6.0",
|
||||
"mammoth": "1.7.2",
|
||||
"n8n-nodes-base": "workspace:*",
|
||||
"n8n-workflow": "workspace:*",
|
||||
"openai": "4.38.5",
|
||||
"openai": "4.47.1",
|
||||
"pdf-parse": "1.1.1",
|
||||
"pg": "8.11.3",
|
||||
"redis": "4.6.12",
|
||||
"sqlite3": "5.1.7",
|
||||
"temp": "0.9.4",
|
||||
"tmp-promise": "3.0.3",
|
||||
"zod": "3.22.4",
|
||||
"zod-to-json-schema": "3.22.4"
|
||||
"zod": "3.23.8",
|
||||
"zod-to-json-schema": "3.23.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,8 @@
|
|||
"credentials/**/*.ts",
|
||||
"nodes/**/*.ts",
|
||||
"nodes/**/*.json",
|
||||
"credentials/translations/**/*.json"
|
||||
"credentials/translations/**/*.json",
|
||||
"types/*.ts"
|
||||
],
|
||||
"exclude": ["nodes/**/*.test.ts", "test/**"]
|
||||
}
|
||||
|
|
|
@ -20,5 +20,5 @@
|
|||
"skipLibCheck": true,
|
||||
"outDir": "./dist/"
|
||||
},
|
||||
"include": ["credentials/**/*", "nodes/**/*", "utils/**/*.ts", "nodes/**/*.json"]
|
||||
"include": ["credentials/**/*", "nodes/**/*", "utils/**/*.ts", "nodes/**/*.json", "types/*.ts"]
|
||||
}
|
||||
|
|
27
packages/@n8n/nodes-langchain/types/generate-schema.d.ts
vendored
Normal file
27
packages/@n8n/nodes-langchain/types/generate-schema.d.ts
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
declare module 'generate-schema' {
|
||||
export interface SchemaObject {
|
||||
$schema: string;
|
||||
title?: string;
|
||||
type: string;
|
||||
properties?: {
|
||||
[key: string]: SchemaObject | SchemaArray | SchemaProperty;
|
||||
};
|
||||
required?: string[];
|
||||
items?: SchemaObject | SchemaArray;
|
||||
}
|
||||
|
||||
export interface SchemaArray {
|
||||
type: string;
|
||||
items?: SchemaObject | SchemaArray | SchemaProperty;
|
||||
oneOf?: Array<SchemaObject | SchemaArray | SchemaProperty>;
|
||||
required?: string[];
|
||||
}
|
||||
|
||||
export interface SchemaProperty {
|
||||
type: string | string[];
|
||||
format?: string;
|
||||
}
|
||||
|
||||
export function json(title: string, schema: SchemaObject): SchemaObject;
|
||||
export function json(schema: SchemaObject): SchemaObject;
|
||||
}
|
4
packages/@n8n/nodes-langchain/types/zod.types.ts
Normal file
4
packages/@n8n/nodes-langchain/types/zod.types.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
import type { z } from 'zod';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export type DynamicZodObject = z.ZodObject<any, any, any, any>;
|
|
@ -3,14 +3,14 @@ import { createWriteStream } from 'fs';
|
|||
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
import type { Document } from '@langchain/core/documents';
|
||||
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
|
||||
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
|
||||
import { CSVLoader } from '@langchain/community/document_loaders/fs/csv';
|
||||
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
|
||||
import { JSONLoader } from 'langchain/document_loaders/fs/json';
|
||||
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
|
||||
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
|
||||
import { TextLoader } from 'langchain/document_loaders/fs/text';
|
||||
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
|
||||
import { EPubLoader } from '@langchain/community/document_loaders/fs/epub';
|
||||
import { file as tmpFile, type DirectoryResult } from 'tmp-promise';
|
||||
|
||||
import { getMetadataFiltersValues } from './helpers';
|
||||
|
@ -186,7 +186,7 @@ export class N8nBinaryLoader {
|
|||
}
|
||||
|
||||
const loadedDoc = this.textSplitter
|
||||
? await loader.loadAndSplit(this.textSplitter)
|
||||
? await this.textSplitter.splitDocuments(await loader.load())
|
||||
: await loader.load();
|
||||
|
||||
docs.push(...loadedDoc);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { type IExecuteFunctions, type INodeExecutionData, NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
import type { Document } from '@langchain/core/documents';
|
||||
import { JSONLoader } from 'langchain/document_loaders/fs/json';
|
||||
import { TextLoader } from 'langchain/document_loaders/fs/text';
|
||||
|
@ -75,7 +75,7 @@ export class N8nJsonLoader {
|
|||
}
|
||||
|
||||
const docs = this.textSplitter
|
||||
? await documentLoader.loadAndSplit(this.textSplitter)
|
||||
? await this.textSplitter.splitDocuments(await documentLoader.load())
|
||||
: await documentLoader.load();
|
||||
|
||||
if (metadata) {
|
||||
|
|
|
@ -1,5 +1,70 @@
|
|||
import type { INodeProperties } from 'n8n-workflow';
|
||||
|
||||
export const schemaTypeField: INodeProperties = {
|
||||
displayName: 'Schema Type',
|
||||
name: 'schemaType',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Generate From JSON Example',
|
||||
value: 'fromJson',
|
||||
description: 'Generate a schema from an example JSON object',
|
||||
},
|
||||
{
|
||||
name: 'Define Below',
|
||||
value: 'manual',
|
||||
description: 'Define the JSON schema manually',
|
||||
},
|
||||
],
|
||||
default: 'fromJson',
|
||||
description: 'How to specify the schema for the function',
|
||||
};
|
||||
|
||||
export const jsonSchemaExampleField: INodeProperties = {
|
||||
displayName: 'JSON Example',
|
||||
name: 'jsonSchemaExample',
|
||||
type: 'json',
|
||||
default: `{
|
||||
"some_input": "some_value"
|
||||
}`,
|
||||
noDataExpression: true,
|
||||
typeOptions: {
|
||||
rows: 10,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
schemaType: ['fromJson'],
|
||||
},
|
||||
},
|
||||
description: 'Example JSON object to use to generate the schema',
|
||||
};
|
||||
|
||||
export const inputSchemaField: INodeProperties = {
|
||||
displayName: 'Input Schema',
|
||||
name: 'inputSchema',
|
||||
type: 'json',
|
||||
default: `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"some_input": {
|
||||
"type": "string",
|
||||
"description": "Some input to the function"
|
||||
}
|
||||
}
|
||||
}`,
|
||||
noDataExpression: true,
|
||||
typeOptions: {
|
||||
rows: 10,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
schemaType: ['manual'],
|
||||
},
|
||||
},
|
||||
description: 'Schema to use for the function',
|
||||
};
|
||||
|
||||
export const promptTypeOptions: INodeProperties = {
|
||||
displayName: 'Prompt',
|
||||
name: 'promptType',
|
||||
|
|
|
@ -10,7 +10,7 @@ import type { BaseCallbackConfig, Callbacks } from '@langchain/core/callbacks/ma
|
|||
import { Embeddings } from '@langchain/core/embeddings';
|
||||
import { VectorStore } from '@langchain/core/vectorstores';
|
||||
import type { Document } from '@langchain/core/documents';
|
||||
import { TextSplitter } from 'langchain/text_splitter';
|
||||
import { TextSplitter } from '@langchain/textsplitters';
|
||||
import { BaseChatMemory } from '@langchain/community/memory/chat_memory';
|
||||
import { BaseRetriever } from '@langchain/core/retrievers';
|
||||
import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
|
||||
|
|
81
packages/@n8n/nodes-langchain/utils/schemaParsing.ts
Normal file
81
packages/@n8n/nodes-langchain/utils/schemaParsing.ts
Normal file
|
@ -0,0 +1,81 @@
|
|||
import { makeResolverFromLegacyOptions } from '@n8n/vm2';
|
||||
import { json as generateJsonSchema } from 'generate-schema';
|
||||
import type { SchemaObject } from 'generate-schema';
|
||||
import type { JSONSchema7 } from 'json-schema';
|
||||
import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox';
|
||||
import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
|
||||
import type { IExecuteFunctions } from 'n8n-workflow';
|
||||
import { NodeOperationError, jsonParse } from 'n8n-workflow';
|
||||
|
||||
const vmResolver = makeResolverFromLegacyOptions({
|
||||
external: {
|
||||
modules: ['json-schema-to-zod', 'zod'],
|
||||
transitive: false,
|
||||
},
|
||||
resolve(moduleName, parentDirname) {
|
||||
if (moduleName === 'json-schema-to-zod') {
|
||||
return require.resolve(
|
||||
'@n8n/n8n-nodes-langchain/node_modules/json-schema-to-zod/dist/cjs/jsonSchemaToZod.js',
|
||||
{
|
||||
paths: [parentDirname],
|
||||
},
|
||||
);
|
||||
}
|
||||
if (moduleName === 'zod') {
|
||||
return require.resolve('@n8n/n8n-nodes-langchain/node_modules/zod.cjs', {
|
||||
paths: [parentDirname],
|
||||
});
|
||||
}
|
||||
return;
|
||||
},
|
||||
builtin: [],
|
||||
});
|
||||
|
||||
export function getSandboxWithZod(ctx: IExecuteFunctions, schema: JSONSchema7, itemIndex: number) {
|
||||
const context = getSandboxContext.call(ctx, itemIndex);
|
||||
let itemSchema: JSONSchema7 = schema;
|
||||
try {
|
||||
// If the root type is not defined, we assume it's an object
|
||||
if (itemSchema.type === undefined) {
|
||||
itemSchema = {
|
||||
type: 'object',
|
||||
properties: itemSchema.properties ?? (itemSchema as { [key: string]: JSONSchema7 }),
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(ctx.getNode(), 'Error during parsing of JSON Schema.');
|
||||
}
|
||||
|
||||
// Make sure to remove the description from root schema
|
||||
const { description, ...restOfSchema } = itemSchema;
|
||||
const sandboxedSchema = new JavaScriptSandbox(
|
||||
context,
|
||||
`
|
||||
const { z } = require('zod');
|
||||
const { parseSchema } = require('json-schema-to-zod');
|
||||
const zodSchema = parseSchema(${JSON.stringify(restOfSchema)});
|
||||
const itemSchema = new Function('z', 'return (' + zodSchema + ')')(z)
|
||||
return itemSchema
|
||||
`,
|
||||
itemIndex,
|
||||
ctx.helpers,
|
||||
{ resolver: vmResolver },
|
||||
);
|
||||
return sandboxedSchema;
|
||||
}
|
||||
|
||||
export function generateSchema(schemaString: string): JSONSchema7 {
|
||||
const parsedSchema = jsonParse<SchemaObject>(schemaString);
|
||||
|
||||
return generateJsonSchema(parsedSchema) as JSONSchema7;
|
||||
}
|
||||
|
||||
export function throwIfToolSchema(ctx: IExecuteFunctions, error: Error) {
|
||||
if (error?.message?.includes('tool input did not match expected schema')) {
|
||||
throw new NodeOperationError(
|
||||
ctx.getNode(),
|
||||
`${error.message}.
|
||||
This is most likely because some of your tools are configured to require a specific schema. This is not supported by Conversational Agent. Remove the schema from the tool configuration or use Tools agent instead.`,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@n8n/permissions",
|
||||
"version": "0.6.0",
|
||||
"version": "0.7.0",
|
||||
"scripts": {
|
||||
"clean": "rimraf dist .turbo",
|
||||
"dev": "pnpm watch",
|
||||
|
|
|
@ -21,10 +21,10 @@ if (process.argv.length === 2) {
|
|||
const nodeVersion = process.versions.node;
|
||||
const nodeVersionMajor = require('semver').major(nodeVersion);
|
||||
|
||||
if (![18, 20].includes(nodeVersionMajor)) {
|
||||
if (![18, 20, 22].includes(nodeVersionMajor)) {
|
||||
console.log(`
|
||||
Your Node.js version (${nodeVersion}) is currently not supported by n8n.
|
||||
Please use Node.js v18 (recommended), or v20 instead!
|
||||
Please use Node.js v18 (recommended), v20, or v22 instead!
|
||||
`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n",
|
||||
"version": "1.42.0",
|
||||
"version": "1.43.0",
|
||||
"description": "n8n Workflow Automation Tool",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -90,16 +90,16 @@
|
|||
"ts-essentials": "^7.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@langchain/community": "0.0.53",
|
||||
"@langchain/core": "0.1.61",
|
||||
"@langchain/openai": "0.0.28",
|
||||
"@langchain/pinecone": "^0.0.3",
|
||||
"@langchain/community": "0.2.2",
|
||||
"@langchain/core": "0.2.0",
|
||||
"@langchain/openai": "0.0.33",
|
||||
"@langchain/pinecone": "0.0.6",
|
||||
"@n8n/client-oauth2": "workspace:*",
|
||||
"@n8n/localtunnel": "2.1.0",
|
||||
"@n8n/n8n-nodes-langchain": "workspace:*",
|
||||
"@n8n/permissions": "workspace:*",
|
||||
"@n8n/typeorm": "0.3.20-9",
|
||||
"@n8n_io/license-sdk": "2.10.0",
|
||||
"@n8n_io/license-sdk": "2.12.0",
|
||||
"@oclif/core": "3.18.1",
|
||||
"@pinecone-database/pinecone": "2.1.0",
|
||||
"@rudderstack/rudder-sdk-node": "2.0.7",
|
||||
|
@ -141,7 +141,7 @@
|
|||
"json-diff": "1.0.6",
|
||||
"jsonschema": "1.4.1",
|
||||
"jsonwebtoken": "9.0.2",
|
||||
"langchain": "0.1.36",
|
||||
"langchain": "0.2.2",
|
||||
"ldapts": "4.2.6",
|
||||
"lodash": "4.17.21",
|
||||
"luxon": "3.3.0",
|
||||
|
|
|
@ -537,7 +537,9 @@ export class ActiveWorkflowManager {
|
|||
const dbWorkflow = existingWorkflow ?? (await this.workflowRepository.findById(workflowId));
|
||||
|
||||
if (!dbWorkflow) {
|
||||
throw new WorkflowActivationError(`Failed to find workflow with ID "${workflowId}"`);
|
||||
throw new WorkflowActivationError(`Failed to find workflow with ID "${workflowId}"`, {
|
||||
level: 'warning',
|
||||
});
|
||||
}
|
||||
|
||||
if (shouldDisplayActivationMessage) {
|
||||
|
@ -564,6 +566,7 @@ export class ActiveWorkflowManager {
|
|||
if (!canBeActivated) {
|
||||
throw new WorkflowActivationError(
|
||||
`Workflow ${dbWorkflow.display()} has no node to start the workflow - at least one trigger, poller or webhook node is required`,
|
||||
{ level: 'warning' },
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -153,11 +153,6 @@ export const loadPublicApiVersions = async (
|
|||
};
|
||||
};
|
||||
|
||||
function isApiEnabledByLicense(): boolean {
|
||||
const license = Container.get(License);
|
||||
return !license.isAPIDisabled();
|
||||
}
|
||||
|
||||
export function isApiEnabled(): boolean {
|
||||
return !config.get('publicApi.disabled') && isApiEnabledByLicense();
|
||||
return !config.get('publicApi.disabled') && !Container.get(License).isAPIDisabled();
|
||||
}
|
||||
|
|
|
@ -69,6 +69,7 @@ export async function saveCredential(
|
|||
|
||||
const personalProject = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
user.id,
|
||||
transactionManager,
|
||||
);
|
||||
|
||||
Object.assign(newSharedCredential, {
|
||||
|
|
|
@ -7,7 +7,6 @@ import { SharedWorkflow, type WorkflowSharingRole } from '@db/entities/SharedWor
|
|||
import { WorkflowRepository } from '@db/repositories/workflow.repository';
|
||||
import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository';
|
||||
import type { Project } from '@/databases/entities/Project';
|
||||
import { WorkflowTagMappingRepository } from '@db/repositories/workflowTagMapping.repository';
|
||||
import { TagRepository } from '@db/repositories/tag.repository';
|
||||
import { License } from '@/License';
|
||||
import { WorkflowSharingService } from '@/workflows/workflowSharing.service';
|
||||
|
@ -113,9 +112,7 @@ export async function getWorkflowTags(workflowId: string) {
|
|||
|
||||
export async function updateTags(workflowId: string, newTags: string[]): Promise<any> {
|
||||
await Db.transaction(async (transactionManager) => {
|
||||
const oldTags = await Container.get(WorkflowTagMappingRepository).findBy({
|
||||
workflowId,
|
||||
});
|
||||
const oldTags = await transactionManager.findBy(WorkflowTagMapping, { workflowId });
|
||||
if (oldTags.length > 0) {
|
||||
await transactionManager.delete(WorkflowTagMapping, oldTags);
|
||||
}
|
||||
|
|
|
@ -371,7 +371,8 @@ export class Server extends AbstractServer {
|
|||
const isPreviewMode = process.env.N8N_PREVIEW_MODE === 'true';
|
||||
const securityHeadersMiddleware = helmet({
|
||||
contentSecurityPolicy: false,
|
||||
xFrameOptions: isPreviewMode || inE2ETests ? false : { action: 'sameorigin' },
|
||||
xFrameOptions:
|
||||
isPreviewMode || inE2ETests || inDevelopment ? false : { action: 'sameorigin' },
|
||||
dnsPrefetchControl: false,
|
||||
// This is only relevant for Internet-explorer, which we do not support
|
||||
ieNoOpen: false,
|
||||
|
|
|
@ -3,7 +3,7 @@ import {
|
|||
ErrorReporterProxy as ErrorReporter,
|
||||
WorkflowOperationError,
|
||||
} from 'n8n-workflow';
|
||||
import { Container, Service } from 'typedi';
|
||||
import { Service } from 'typedi';
|
||||
import type { ExecutionStopResult, IWorkflowExecutionDataProcess } from '@/Interfaces';
|
||||
import { WorkflowRunner } from '@/WorkflowRunner';
|
||||
import { ExecutionRepository } from '@db/repositories/execution.repository';
|
||||
|
@ -137,10 +137,7 @@ export class WaitTracker {
|
|||
fullExecutionData.waitTill = null;
|
||||
fullExecutionData.status = 'canceled';
|
||||
|
||||
await Container.get(ExecutionRepository).updateExistingExecution(
|
||||
executionId,
|
||||
fullExecutionData,
|
||||
);
|
||||
await this.executionRepository.updateExistingExecution(executionId, fullExecutionData);
|
||||
|
||||
return {
|
||||
mode: fullExecutionData.mode,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Command, Flags } from '@oclif/core';
|
||||
import type { DataSourceOptions as ConnectionOptions } from '@n8n/typeorm';
|
||||
import { DataSource as Connection } from '@n8n/typeorm';
|
||||
import { MigrationExecutor, DataSource as Connection } from '@n8n/typeorm';
|
||||
import { Container } from 'typedi';
|
||||
import { Logger } from '@/Logger';
|
||||
import { setSchema } from '@/Db';
|
||||
|
@ -13,27 +13,44 @@ import config from '@/config';
|
|||
// Mocking turned into a mess due to this command using typeorm and the db
|
||||
// config directly and customizing and monkey patching parts.
|
||||
export async function main(
|
||||
connectionOptions: ConnectionOptions,
|
||||
logger: Logger,
|
||||
DataSource: typeof Connection,
|
||||
connection: Connection,
|
||||
migrationExecutor: MigrationExecutor,
|
||||
) {
|
||||
const dbType = config.getEnv('database.type');
|
||||
const executedMigrations = await migrationExecutor.getExecutedMigrations();
|
||||
const lastExecutedMigration = executedMigrations.at(0);
|
||||
|
||||
(connectionOptions.migrations as Migration[]).forEach(wrapMigration);
|
||||
|
||||
const connection = new DataSource(connectionOptions);
|
||||
await connection.initialize();
|
||||
if (dbType === 'postgresdb') await setSchema(connection);
|
||||
|
||||
const lastMigration = connection.migrations.at(-1);
|
||||
|
||||
if (lastMigration === undefined) {
|
||||
logger.error('There is no migration to reverse.');
|
||||
if (lastExecutedMigration === undefined) {
|
||||
logger.error(
|
||||
"Cancelled command. The database was never migrated. Are you sure you're connected to the right database?.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!lastMigration.down) {
|
||||
logger.error('The last migration was irreversible and cannot be reverted.');
|
||||
const lastMigrationInstance = connection.migrations.find((m) => {
|
||||
// Migration names are optional. If a migration has no name property
|
||||
// TypeORM will default to the class name.
|
||||
const name1 = m.name ?? m.constructor.name;
|
||||
const name2 = lastExecutedMigration.name;
|
||||
|
||||
return name1 === name2;
|
||||
});
|
||||
|
||||
if (lastMigrationInstance === undefined) {
|
||||
logger.error(
|
||||
`The last migration that was executed is "${lastExecutedMigration.name}", but I could not find that migration's code in the currently installed version of n8n.`,
|
||||
);
|
||||
logger.error(
|
||||
'This usually means that you downgraded n8n before running `n8n db:revert`. Please upgrade n8n again and run `n8n db:revert` and then downgrade again.',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!lastMigrationInstance.down) {
|
||||
const message = lastMigrationInstance.name
|
||||
? `Cancelled command. The last migration "${lastMigrationInstance.name}" was irreversible.`
|
||||
: 'Cancelled command. The last migration was irreversible.';
|
||||
logger.error(message);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -68,7 +85,17 @@ export class DbRevertMigrationCommand extends Command {
|
|||
logging: ['query', 'error', 'schema'],
|
||||
};
|
||||
|
||||
return await main(connectionOptions, this.logger, Connection);
|
||||
const connection = new Connection(connectionOptions);
|
||||
await connection.initialize();
|
||||
|
||||
const dbType = config.getEnv('database.type');
|
||||
if (dbType === 'postgresdb') await setSchema(connection);
|
||||
|
||||
const migrationExecutor = new MigrationExecutor(connection);
|
||||
|
||||
(connectionOptions.migrations as Migration[]).forEach(wrapMigration);
|
||||
|
||||
return await main(this.logger, connection, migrationExecutor);
|
||||
}
|
||||
|
||||
async catch(error: Error) {
|
||||
|
|
|
@ -13,9 +13,9 @@ import { BaseCommand } from '../BaseCommand';
|
|||
import type { ICredentialsEncrypted } from 'n8n-workflow';
|
||||
import { ApplicationError, jsonParse } from 'n8n-workflow';
|
||||
import { UM_FIX_INSTRUCTION } from '@/constants';
|
||||
import { UserRepository } from '@db/repositories/user.repository';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
import type { Project } from '@/databases/entities/Project';
|
||||
import { Project } from '@/databases/entities/Project';
|
||||
import { User } from '@/databases/entities/User';
|
||||
|
||||
export class ImportCredentialsCommand extends BaseCommand {
|
||||
static description = 'Import credentials';
|
||||
|
@ -75,13 +75,13 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
);
|
||||
}
|
||||
|
||||
const project = await this.getProject(flags.userId, flags.projectId);
|
||||
|
||||
const credentials = await this.readCredentials(flags.input, flags.separate);
|
||||
|
||||
await Db.getConnection().transaction(async (transactionManager) => {
|
||||
this.transactionManager = transactionManager;
|
||||
|
||||
const project = await this.getProject(flags.userId, flags.projectId);
|
||||
|
||||
const result = await this.checkRelations(credentials, flags.projectId, flags.userId);
|
||||
|
||||
if (!result.success) {
|
||||
|
@ -130,19 +130,6 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
}
|
||||
}
|
||||
|
||||
private async getOwnerProject() {
|
||||
const owner = await Container.get(UserRepository).findOneBy({ role: 'global:owner' });
|
||||
if (!owner) {
|
||||
throw new ApplicationError(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
|
||||
}
|
||||
|
||||
const project = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
owner.id,
|
||||
);
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
private async checkRelations(
|
||||
credentials: ICredentialsEncrypted[],
|
||||
projectId?: string,
|
||||
|
@ -244,7 +231,7 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
});
|
||||
|
||||
if (sharedCredential && sharedCredential.project.type === 'personal') {
|
||||
const user = await Container.get(UserRepository).findOneByOrFail({
|
||||
const user = await this.transactionManager.findOneByOrFail(User, {
|
||||
projectRelations: {
|
||||
role: 'project:personalOwner',
|
||||
projectId: sharedCredential.projectId,
|
||||
|
@ -263,13 +250,20 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
|
||||
private async getProject(userId?: string, projectId?: string) {
|
||||
if (projectId) {
|
||||
return await Container.get(ProjectRepository).findOneByOrFail({ id: projectId });
|
||||
return await this.transactionManager.findOneByOrFail(Project, { id: projectId });
|
||||
}
|
||||
|
||||
if (userId) {
|
||||
return await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
|
||||
if (!userId) {
|
||||
const owner = await this.transactionManager.findOneBy(User, { role: 'global:owner' });
|
||||
if (!owner) {
|
||||
throw new ApplicationError(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
|
||||
}
|
||||
userId = owner.id;
|
||||
}
|
||||
|
||||
return await this.getOwnerProject();
|
||||
return await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
userId,
|
||||
this.transactionManager,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -160,19 +160,6 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
this.logger.info(`Successfully imported ${total} ${total === 1 ? 'workflow.' : 'workflows.'}`);
|
||||
}
|
||||
|
||||
private async getOwnerProject() {
|
||||
const owner = await Container.get(UserRepository).findOneBy({ role: 'global:owner' });
|
||||
if (!owner) {
|
||||
throw new ApplicationError(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
|
||||
}
|
||||
|
||||
const project = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
owner.id,
|
||||
);
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
private async getWorkflowOwner(workflow: WorkflowEntity) {
|
||||
const sharing = await Container.get(SharedWorkflowRepository).findOne({
|
||||
where: { workflowId: workflow.id, role: 'workflow:owner' },
|
||||
|
@ -234,10 +221,14 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
return await Container.get(ProjectRepository).findOneByOrFail({ id: projectId });
|
||||
}
|
||||
|
||||
if (userId) {
|
||||
return await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
|
||||
if (!userId) {
|
||||
const owner = await Container.get(UserRepository).findOneBy({ role: 'global:owner' });
|
||||
if (!owner) {
|
||||
throw new ApplicationError(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
|
||||
}
|
||||
userId = owner.id;
|
||||
}
|
||||
|
||||
return await this.getOwnerProject();
|
||||
return await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -234,7 +234,7 @@ export class Worker extends BaseCommand {
|
|||
|
||||
if (!process.env.N8N_ENCRYPTION_KEY) {
|
||||
throw new ApplicationError(
|
||||
'Missing encryption key. Worker started without the required N8N_ENCRYPTION_KEY env var. More information: https://docs.n8n.io/hosting/environment-variables/configuration-methods/#encryption-key',
|
||||
'Missing encryption key. Worker started without the required N8N_ENCRYPTION_KEY env var. More information: https://docs.n8n.io/hosting/configuration/configuration-examples/encryption-key/',
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,54 +1,26 @@
|
|||
import type { RequestHandler } from 'express';
|
||||
import { NextFunction, Response } from 'express';
|
||||
import type {
|
||||
INodeListSearchResult,
|
||||
INodePropertyOptions,
|
||||
ResourceMapperFields,
|
||||
} from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import type { INodePropertyOptions } from 'n8n-workflow';
|
||||
|
||||
import { Get, Middleware, RestController } from '@/decorators';
|
||||
import { Post, RestController } from '@/decorators';
|
||||
import { getBase } from '@/WorkflowExecuteAdditionalData';
|
||||
import { DynamicNodeParametersService } from '@/services/dynamicNodeParameters.service';
|
||||
import { DynamicNodeParametersRequest } from '@/requests';
|
||||
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
|
||||
|
||||
const assertMethodName: RequestHandler = (req, res, next) => {
|
||||
const { methodName } = req.query as DynamicNodeParametersRequest.BaseRequest['query'];
|
||||
if (!methodName) {
|
||||
throw new BadRequestError('Parameter methodName is required.');
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
@RestController('/dynamic-node-parameters')
|
||||
export class DynamicNodeParametersController {
|
||||
constructor(private readonly service: DynamicNodeParametersService) {}
|
||||
|
||||
@Middleware()
|
||||
parseQueryParams(req: DynamicNodeParametersRequest.BaseRequest, _: Response, next: NextFunction) {
|
||||
const { credentials, currentNodeParameters, nodeTypeAndVersion } = req.query;
|
||||
if (!nodeTypeAndVersion) {
|
||||
throw new BadRequestError('Parameter nodeTypeAndVersion is required.');
|
||||
}
|
||||
if (!currentNodeParameters) {
|
||||
throw new BadRequestError('Parameter currentNodeParameters is required.');
|
||||
}
|
||||
|
||||
req.params = {
|
||||
nodeTypeAndVersion: jsonParse(nodeTypeAndVersion),
|
||||
currentNodeParameters: jsonParse(currentNodeParameters),
|
||||
credentials: credentials ? jsonParse(credentials) : undefined,
|
||||
};
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/** Returns parameter values which normally get loaded from an external API or get generated dynamically */
|
||||
@Get('/options')
|
||||
@Post('/options')
|
||||
async getOptions(req: DynamicNodeParametersRequest.Options): Promise<INodePropertyOptions[]> {
|
||||
const { path, methodName, loadOptions } = req.query;
|
||||
const { credentials, currentNodeParameters, nodeTypeAndVersion } = req.params;
|
||||
const {
|
||||
credentials,
|
||||
currentNodeParameters,
|
||||
nodeTypeAndVersion,
|
||||
path,
|
||||
methodName,
|
||||
loadOptions,
|
||||
} = req.body;
|
||||
|
||||
const additionalData = await getBase(req.user.id, currentNodeParameters);
|
||||
|
||||
if (methodName) {
|
||||
|
@ -64,7 +36,7 @@ export class DynamicNodeParametersController {
|
|||
|
||||
if (loadOptions) {
|
||||
return await this.service.getOptionsViaLoadOptions(
|
||||
jsonParse(loadOptions),
|
||||
loadOptions,
|
||||
additionalData,
|
||||
nodeTypeAndVersion,
|
||||
currentNodeParameters,
|
||||
|
@ -75,13 +47,22 @@ export class DynamicNodeParametersController {
|
|||
return [];
|
||||
}
|
||||
|
||||
@Get('/resource-locator-results', { middlewares: [assertMethodName] })
|
||||
async getResourceLocatorResults(
|
||||
req: DynamicNodeParametersRequest.ResourceLocatorResults,
|
||||
): Promise<INodeListSearchResult | undefined> {
|
||||
const { path, methodName, filter, paginationToken } = req.query;
|
||||
const { credentials, currentNodeParameters, nodeTypeAndVersion } = req.params;
|
||||
@Post('/resource-locator-results')
|
||||
async getResourceLocatorResults(req: DynamicNodeParametersRequest.ResourceLocatorResults) {
|
||||
const {
|
||||
path,
|
||||
methodName,
|
||||
filter,
|
||||
paginationToken,
|
||||
credentials,
|
||||
currentNodeParameters,
|
||||
nodeTypeAndVersion,
|
||||
} = req.body;
|
||||
|
||||
if (!methodName) throw new BadRequestError('Missing `methodName` in request body');
|
||||
|
||||
const additionalData = await getBase(req.user.id, currentNodeParameters);
|
||||
|
||||
return await this.service.getResourceLocatorResults(
|
||||
methodName,
|
||||
path,
|
||||
|
@ -94,13 +75,14 @@ export class DynamicNodeParametersController {
|
|||
);
|
||||
}
|
||||
|
||||
@Get('/resource-mapper-fields', { middlewares: [assertMethodName] })
|
||||
async getResourceMappingFields(
|
||||
req: DynamicNodeParametersRequest.ResourceMapperFields,
|
||||
): Promise<ResourceMapperFields | undefined> {
|
||||
const { path, methodName } = req.query;
|
||||
const { credentials, currentNodeParameters, nodeTypeAndVersion } = req.params;
|
||||
@Post('/resource-mapper-fields')
|
||||
async getResourceMappingFields(req: DynamicNodeParametersRequest.ResourceMapperFields) {
|
||||
const { path, methodName, credentials, currentNodeParameters, nodeTypeAndVersion } = req.body;
|
||||
|
||||
if (!methodName) throw new BadRequestError('Missing `methodName` in request body');
|
||||
|
||||
const additionalData = await getBase(req.user.id, currentNodeParameters);
|
||||
|
||||
return await this.service.getResourceMappingFields(
|
||||
methodName,
|
||||
path,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import validator from 'validator';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import { Response } from 'express';
|
||||
import { type RequestHandler, Response } from 'express';
|
||||
import { randomBytes } from 'crypto';
|
||||
|
||||
import { AuthService } from '@/auth/auth.service';
|
||||
|
@ -22,6 +22,15 @@ import { ExternalHooks } from '@/ExternalHooks';
|
|||
import { InternalHooks } from '@/InternalHooks';
|
||||
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
|
||||
import { UserRepository } from '@/databases/repositories/user.repository';
|
||||
import { isApiEnabled } from '@/PublicApi';
|
||||
|
||||
export const isApiEnabledMiddleware: RequestHandler = (_, res, next) => {
|
||||
if (isApiEnabled()) {
|
||||
next();
|
||||
} else {
|
||||
res.status(404).end();
|
||||
}
|
||||
};
|
||||
|
||||
@RestController('/me')
|
||||
export class MeController {
|
||||
|
@ -185,7 +194,7 @@ export class MeController {
|
|||
/**
|
||||
* Creates an API Key
|
||||
*/
|
||||
@Post('/api-key')
|
||||
@Post('/api-key', { middlewares: [isApiEnabledMiddleware] })
|
||||
async createAPIKey(req: AuthenticatedRequest) {
|
||||
const apiKey = `n8n_api_${randomBytes(40).toString('hex')}`;
|
||||
|
||||
|
@ -202,7 +211,7 @@ export class MeController {
|
|||
/**
|
||||
* Get an API Key
|
||||
*/
|
||||
@Get('/api-key')
|
||||
@Get('/api-key', { middlewares: [isApiEnabledMiddleware] })
|
||||
async getAPIKey(req: AuthenticatedRequest) {
|
||||
return { apiKey: req.user.apiKey };
|
||||
}
|
||||
|
@ -210,7 +219,7 @@ export class MeController {
|
|||
/**
|
||||
* Deletes an API Key
|
||||
*/
|
||||
@Delete('/api-key')
|
||||
@Delete('/api-key', { middlewares: [isApiEnabledMiddleware] })
|
||||
async deleteAPIKey(req: AuthenticatedRequest) {
|
||||
await this.userService.update(req.user.id, { apiKey: null });
|
||||
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
import { Service } from 'typedi';
|
||||
import Csrf from 'csrf';
|
||||
import type { Response } from 'express';
|
||||
import { Credentials } from 'n8n-core';
|
||||
import type { ICredentialDataDecryptedObject, IWorkflowExecuteAdditionalData } from 'n8n-workflow';
|
||||
import { jsonParse, ApplicationError } from 'n8n-workflow';
|
||||
|
||||
import config from '@/config';
|
||||
import type { CredentialsEntity } from '@db/entities/CredentialsEntity';
|
||||
import type { User } from '@db/entities/User';
|
||||
|
@ -17,6 +21,11 @@ import { UrlService } from '@/services/url.service';
|
|||
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
|
||||
import { NotFoundError } from '@/errors/response-errors/not-found.error';
|
||||
|
||||
export interface CsrfStateParam {
|
||||
cid: string;
|
||||
token: string;
|
||||
}
|
||||
|
||||
@Service()
|
||||
export abstract class AbstractOAuthController {
|
||||
abstract oauthVersion: number;
|
||||
|
@ -108,4 +117,37 @@ export abstract class AbstractOAuthController {
|
|||
protected async getCredentialWithoutUser(credentialId: string): Promise<ICredentialsDb | null> {
|
||||
return await this.credentialsRepository.findOneBy({ id: credentialId });
|
||||
}
|
||||
|
||||
protected createCsrfState(credentialsId: string): [string, string] {
|
||||
const token = new Csrf();
|
||||
const csrfSecret = token.secretSync();
|
||||
const state: CsrfStateParam = {
|
||||
token: token.create(csrfSecret),
|
||||
cid: credentialsId,
|
||||
};
|
||||
return [csrfSecret, Buffer.from(JSON.stringify(state)).toString('base64')];
|
||||
}
|
||||
|
||||
protected decodeCsrfState(encodedState: string): CsrfStateParam {
|
||||
const errorMessage = 'Invalid state format';
|
||||
const decoded = jsonParse<CsrfStateParam>(Buffer.from(encodedState, 'base64').toString(), {
|
||||
errorMessage,
|
||||
});
|
||||
if (typeof decoded.cid !== 'string' || typeof decoded.token !== 'string') {
|
||||
throw new ApplicationError(errorMessage);
|
||||
}
|
||||
return decoded;
|
||||
}
|
||||
|
||||
protected verifyCsrfState(decrypted: ICredentialDataDecryptedObject, state: CsrfStateParam) {
|
||||
const token = new Csrf();
|
||||
return (
|
||||
decrypted.csrfSecret === undefined ||
|
||||
!token.verify(decrypted.csrfSecret as string, state.token)
|
||||
);
|
||||
}
|
||||
|
||||
protected renderCallbackError(res: Response, message: string, reason?: string) {
|
||||
res.render('oauth-error-callback', { error: { message, reason } });
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,13 +4,11 @@ import axios from 'axios';
|
|||
import type { RequestOptions } from 'oauth-1.0a';
|
||||
import clientOAuth1 from 'oauth-1.0a';
|
||||
import { createHmac } from 'crypto';
|
||||
import { RESPONSE_ERROR_MESSAGES } from '@/constants';
|
||||
import { Get, RestController } from '@/decorators';
|
||||
import { OAuthRequest } from '@/requests';
|
||||
import { sendErrorResponse } from '@/ResponseHelper';
|
||||
import { AbstractOAuthController } from './abstractOAuth.controller';
|
||||
import { AbstractOAuthController, type CsrfStateParam } from './abstractOAuth.controller';
|
||||
import { NotFoundError } from '@/errors/response-errors/not-found.error';
|
||||
import { ServiceUnavailableError } from '@/errors/response-errors/service-unavailable.error';
|
||||
|
||||
interface OAuth1CredentialData {
|
||||
signatureMethod: 'HMAC-SHA256' | 'HMAC-SHA512' | 'HMAC-SHA1';
|
||||
|
@ -44,6 +42,7 @@ export class OAuth1CredentialController extends AbstractOAuthController {
|
|||
decryptedDataOriginal,
|
||||
additionalData,
|
||||
);
|
||||
const [csrfSecret, state] = this.createCsrfState(credential.id);
|
||||
|
||||
const signatureMethod = oauthCredentials.signatureMethod;
|
||||
|
||||
|
@ -61,7 +60,7 @@ export class OAuth1CredentialController extends AbstractOAuthController {
|
|||
};
|
||||
|
||||
const oauthRequestData = {
|
||||
oauth_callback: `${this.baseUrl}/callback?cid=${credential.id}`,
|
||||
oauth_callback: `${this.baseUrl}/callback?state=${state}`,
|
||||
};
|
||||
|
||||
await this.externalHooks.run('oauth1.authenticate', [oAuthOptions, oauthRequestData]);
|
||||
|
@ -90,6 +89,7 @@ export class OAuth1CredentialController extends AbstractOAuthController {
|
|||
|
||||
const returnUri = `${oauthCredentials.authUrl}?oauth_token=${responseJson.oauth_token}`;
|
||||
|
||||
decryptedDataOriginal.csrfSecret = csrfSecret;
|
||||
await this.encryptAndSaveData(credential, decryptedDataOriginal);
|
||||
|
||||
this.logger.verbose('OAuth1 authorization successful for new credential', {
|
||||
|
@ -103,31 +103,31 @@ export class OAuth1CredentialController extends AbstractOAuthController {
|
|||
/** Verify and store app code. Generate access tokens and store for respective credential */
|
||||
@Get('/callback', { usesTemplates: true })
|
||||
async handleCallback(req: OAuthRequest.OAuth1Credential.Callback, res: Response) {
|
||||
const userId = req.user?.id;
|
||||
try {
|
||||
const { oauth_verifier, oauth_token, cid: credentialId } = req.query;
|
||||
const { oauth_verifier, oauth_token, state: encodedState } = req.query;
|
||||
|
||||
if (!oauth_verifier || !oauth_token) {
|
||||
const errorResponse = new ServiceUnavailableError(
|
||||
`Insufficient parameters for OAuth1 callback. Received following query parameters: ${JSON.stringify(
|
||||
req.query,
|
||||
)}`,
|
||||
if (!oauth_verifier || !oauth_token || !encodedState) {
|
||||
return this.renderCallbackError(
|
||||
res,
|
||||
'Insufficient parameters for OAuth1 callback.',
|
||||
`Received following query parameters: ${JSON.stringify(req.query)}`,
|
||||
);
|
||||
this.logger.error('OAuth1 callback failed because of insufficient parameters received', {
|
||||
userId: req.user?.id,
|
||||
credentialId,
|
||||
});
|
||||
return sendErrorResponse(res, errorResponse);
|
||||
}
|
||||
|
||||
const credential = await this.getCredentialWithoutUser(credentialId);
|
||||
let state: CsrfStateParam;
|
||||
try {
|
||||
state = this.decodeCsrfState(encodedState);
|
||||
} catch (error) {
|
||||
return this.renderCallbackError(res, (error as Error).message);
|
||||
}
|
||||
|
||||
const credentialId = state.cid;
|
||||
const credential = await this.getCredentialWithoutUser(credentialId);
|
||||
if (!credential) {
|
||||
this.logger.error('OAuth1 callback failed because of insufficient user permissions', {
|
||||
userId: req.user?.id,
|
||||
credentialId,
|
||||
});
|
||||
const errorResponse = new NotFoundError(RESPONSE_ERROR_MESSAGES.NO_CREDENTIAL);
|
||||
return sendErrorResponse(res, errorResponse);
|
||||
const errorMessage = 'OAuth1 callback failed because of insufficient permissions';
|
||||
this.logger.error(errorMessage, { userId, credentialId });
|
||||
return this.renderCallbackError(res, errorMessage);
|
||||
}
|
||||
|
||||
const additionalData = await this.getAdditionalData(req.user);
|
||||
|
@ -138,6 +138,12 @@ export class OAuth1CredentialController extends AbstractOAuthController {
|
|||
additionalData,
|
||||
);
|
||||
|
||||
if (this.verifyCsrfState(decryptedDataOriginal, state)) {
|
||||
const errorMessage = 'The OAuth1 callback state is invalid!';
|
||||
this.logger.debug(errorMessage, { userId, credentialId });
|
||||
return this.renderCallbackError(res, errorMessage);
|
||||
}
|
||||
|
||||
const options: AxiosRequestConfig = {
|
||||
method: 'POST',
|
||||
url: oauthCredentials.accessTokenUrl,
|
||||
|
@ -152,10 +158,7 @@ export class OAuth1CredentialController extends AbstractOAuthController {
|
|||
try {
|
||||
oauthToken = await axios.request(options);
|
||||
} catch (error) {
|
||||
this.logger.error('Unable to fetch tokens for OAuth1 callback', {
|
||||
userId: req.user?.id,
|
||||
credentialId,
|
||||
});
|
||||
this.logger.error('Unable to fetch tokens for OAuth1 callback', { userId, credentialId });
|
||||
const errorResponse = new NotFoundError('Unable to get access tokens!');
|
||||
return sendErrorResponse(res, errorResponse);
|
||||
}
|
||||
|
@ -171,14 +174,13 @@ export class OAuth1CredentialController extends AbstractOAuthController {
|
|||
await this.encryptAndSaveData(credential, decryptedDataOriginal);
|
||||
|
||||
this.logger.verbose('OAuth1 callback successful for new credential', {
|
||||
userId: req.user?.id,
|
||||
userId,
|
||||
credentialId,
|
||||
});
|
||||
return res.render('oauth-callback');
|
||||
} catch (error) {
|
||||
this.logger.error('OAuth1 callback failed because of insufficient user permissions', {
|
||||
userId: req.user?.id,
|
||||
credentialId: req.query.cid,
|
||||
userId,
|
||||
});
|
||||
// Error response
|
||||
return sendErrorResponse(res, error as Error);
|
||||
|
|
|
@ -1,21 +1,15 @@
|
|||
import type { ClientOAuth2Options, OAuth2CredentialData } from '@n8n/client-oauth2';
|
||||
import { ClientOAuth2 } from '@n8n/client-oauth2';
|
||||
import Csrf from 'csrf';
|
||||
import { Response } from 'express';
|
||||
import pkceChallenge from 'pkce-challenge';
|
||||
import * as qs from 'querystring';
|
||||
import omit from 'lodash/omit';
|
||||
import set from 'lodash/set';
|
||||
import split from 'lodash/split';
|
||||
import { ApplicationError, jsonParse, jsonStringify } from 'n8n-workflow';
|
||||
import { Get, RestController } from '@/decorators';
|
||||
import { jsonStringify } from 'n8n-workflow';
|
||||
import { OAuthRequest } from '@/requests';
|
||||
import { AbstractOAuthController } from './abstractOAuth.controller';
|
||||
|
||||
interface CsrfStateParam {
|
||||
cid: string;
|
||||
token: string;
|
||||
}
|
||||
import { AbstractOAuthController, type CsrfStateParam } from './abstractOAuth.controller';
|
||||
|
||||
@RestController('/oauth2-credential')
|
||||
export class OAuth2CredentialController extends AbstractOAuthController {
|
||||
|
@ -87,8 +81,8 @@ export class OAuth2CredentialController extends AbstractOAuthController {
|
|||
/** Verify and store app code. Generate access tokens and store for respective credential */
|
||||
@Get('/callback', { usesTemplates: true })
|
||||
async handleCallback(req: OAuthRequest.OAuth2Credential.Callback, res: Response) {
|
||||
const userId = req.user?.id;
|
||||
try {
|
||||
// realmId it's currently just use for the quickbook OAuth2 flow
|
||||
const { code, state: encodedState } = req.query;
|
||||
if (!code || !encodedState) {
|
||||
return this.renderCallbackError(
|
||||
|
@ -105,13 +99,11 @@ export class OAuth2CredentialController extends AbstractOAuthController {
|
|||
return this.renderCallbackError(res, (error as Error).message);
|
||||
}
|
||||
|
||||
const credential = await this.getCredentialWithoutUser(state.cid);
|
||||
const credentialId = state.cid;
|
||||
const credential = await this.getCredentialWithoutUser(credentialId);
|
||||
if (!credential) {
|
||||
const errorMessage = 'OAuth2 callback failed because of insufficient permissions';
|
||||
this.logger.error(errorMessage, {
|
||||
userId: req.user?.id,
|
||||
credentialId: state.cid,
|
||||
});
|
||||
this.logger.error(errorMessage, { userId, credentialId });
|
||||
return this.renderCallbackError(res, errorMessage);
|
||||
}
|
||||
|
||||
|
@ -123,16 +115,9 @@ export class OAuth2CredentialController extends AbstractOAuthController {
|
|||
additionalData,
|
||||
);
|
||||
|
||||
const token = new Csrf();
|
||||
if (
|
||||
decryptedDataOriginal.csrfSecret === undefined ||
|
||||
!token.verify(decryptedDataOriginal.csrfSecret as string, state.token)
|
||||
) {
|
||||
if (this.verifyCsrfState(decryptedDataOriginal, state)) {
|
||||
const errorMessage = 'The OAuth2 callback state is invalid!';
|
||||
this.logger.debug(errorMessage, {
|
||||
userId: req.user?.id,
|
||||
credentialId: credential.id,
|
||||
});
|
||||
this.logger.debug(errorMessage, { userId, credentialId });
|
||||
return this.renderCallbackError(res, errorMessage);
|
||||
}
|
||||
|
||||
|
@ -171,10 +156,7 @@ export class OAuth2CredentialController extends AbstractOAuthController {
|
|||
|
||||
if (oauthToken === undefined) {
|
||||
const errorMessage = 'Unable to get OAuth2 access tokens!';
|
||||
this.logger.error(errorMessage, {
|
||||
userId: req.user?.id,
|
||||
credentialId: credential.id,
|
||||
});
|
||||
this.logger.error(errorMessage, { userId, credentialId });
|
||||
return this.renderCallbackError(res, errorMessage);
|
||||
}
|
||||
|
||||
|
@ -191,8 +173,8 @@ export class OAuth2CredentialController extends AbstractOAuthController {
|
|||
await this.encryptAndSaveData(credential, decryptedDataOriginal);
|
||||
|
||||
this.logger.verbose('OAuth2 callback successful for credential', {
|
||||
userId: req.user?.id,
|
||||
credentialId: credential.id,
|
||||
userId,
|
||||
credentialId,
|
||||
});
|
||||
|
||||
return res.render('oauth-callback');
|
||||
|
@ -219,29 +201,4 @@ export class OAuth2CredentialController extends AbstractOAuthController {
|
|||
ignoreSSLIssues: credential.ignoreSSLIssues ?? false,
|
||||
};
|
||||
}
|
||||
|
||||
private renderCallbackError(res: Response, message: string, reason?: string) {
|
||||
res.render('oauth-error-callback', { error: { message, reason } });
|
||||
}
|
||||
|
||||
private createCsrfState(credentialsId: string): [string, string] {
|
||||
const token = new Csrf();
|
||||
const csrfSecret = token.secretSync();
|
||||
const state: CsrfStateParam = {
|
||||
token: token.create(csrfSecret),
|
||||
cid: credentialsId,
|
||||
};
|
||||
return [csrfSecret, Buffer.from(JSON.stringify(state)).toString('base64')];
|
||||
}
|
||||
|
||||
private decodeCsrfState(encodedState: string): CsrfStateParam {
|
||||
const errorMessage = 'Invalid state format';
|
||||
const decoded = jsonParse<CsrfStateParam>(Buffer.from(encodedState, 'base64').toString(), {
|
||||
errorMessage,
|
||||
});
|
||||
if (typeof decoded.cid !== 'string' || typeof decoded.token !== 'string') {
|
||||
throw new ApplicationError(errorMessage);
|
||||
}
|
||||
return decoded;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -115,6 +115,10 @@ export class UsersController {
|
|||
throw new NotFoundError('User not found');
|
||||
}
|
||||
|
||||
if (req.user.role === 'global:admin' && user.role === 'global:owner') {
|
||||
throw new ForbiddenError('Admin cannot reset password of global owner');
|
||||
}
|
||||
|
||||
const link = this.authService.generatePasswordResetUrl(user);
|
||||
return { link };
|
||||
}
|
||||
|
@ -164,6 +168,10 @@ export class UsersController {
|
|||
);
|
||||
}
|
||||
|
||||
if (userToDelete.role === 'global:owner') {
|
||||
throw new ForbiddenError('Instance owner cannot be deleted.');
|
||||
}
|
||||
|
||||
const personalProjectToDelete = await this.projectRepository.getPersonalProjectForUserOrFail(
|
||||
userToDelete.id,
|
||||
);
|
||||
|
|
|
@ -262,7 +262,10 @@ export class CredentialsService {
|
|||
|
||||
const project =
|
||||
projectId === undefined
|
||||
? await this.projectRepository.getPersonalProjectForUserOrFail(user.id)
|
||||
? await this.projectRepository.getPersonalProjectForUserOrFail(
|
||||
user.id,
|
||||
transactionManager,
|
||||
)
|
||||
: await this.projectService.getProjectWithScope(
|
||||
user,
|
||||
projectId,
|
||||
|
|
|
@ -40,7 +40,7 @@ export class ExecutionEntity {
|
|||
@Column({ nullable: true })
|
||||
retrySuccessId: string;
|
||||
|
||||
@Column('varchar', { nullable: true })
|
||||
@Column('varchar')
|
||||
status: ExecutionStatus;
|
||||
|
||||
@Column(datetimeColumnType)
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import type { IrreversibleMigration, MigrationContext } from '@/databases/types';
|
||||
|
||||
export class MakeExecutionStatusNonNullable1714133768521 implements IrreversibleMigration {
|
||||
async up({ escape, runQuery, schemaBuilder }: MigrationContext) {
|
||||
const executionEntity = escape.tableName('execution_entity');
|
||||
const status = escape.columnName('status');
|
||||
const finished = escape.columnName('finished');
|
||||
|
||||
const query = `
|
||||
UPDATE ${executionEntity}
|
||||
SET ${status} = CASE
|
||||
WHEN ${finished} = true THEN 'success'
|
||||
WHEN ${finished} = false THEN 'error'
|
||||
END
|
||||
WHERE ${status} IS NULL;
|
||||
`;
|
||||
|
||||
await runQuery(query);
|
||||
|
||||
await schemaBuilder.addNotNull('execution_entity', 'status');
|
||||
}
|
||||
}
|
|
@ -56,6 +56,7 @@ import { DropRoleMapping1705429061930 } from '../common/1705429061930-DropRoleMa
|
|||
import { RemoveFailedExecutionStatus1711018413374 } from '../common/1711018413374-RemoveFailedExecutionStatus';
|
||||
import { MoveSshKeysToDatabase1711390882123 } from '../common/1711390882123-MoveSshKeysToDatabase';
|
||||
import { RemoveNodesAccess1712044305787 } from '../common/1712044305787-RemoveNodesAccess';
|
||||
import { MakeExecutionStatusNonNullable1714133768521 } from '../common/1714133768521-MakeExecutionStatusNonNullable';
|
||||
|
||||
export const mysqlMigrations: Migration[] = [
|
||||
InitialMigration1588157391238,
|
||||
|
@ -115,4 +116,5 @@ export const mysqlMigrations: Migration[] = [
|
|||
MoveSshKeysToDatabase1711390882123,
|
||||
RemoveNodesAccess1712044305787,
|
||||
CreateProject1714133768519,
|
||||
MakeExecutionStatusNonNullable1714133768521,
|
||||
];
|
||||
|
|
|
@ -55,6 +55,7 @@ import { DropRoleMapping1705429061930 } from '../common/1705429061930-DropRoleMa
|
|||
import { RemoveFailedExecutionStatus1711018413374 } from '../common/1711018413374-RemoveFailedExecutionStatus';
|
||||
import { MoveSshKeysToDatabase1711390882123 } from '../common/1711390882123-MoveSshKeysToDatabase';
|
||||
import { RemoveNodesAccess1712044305787 } from '../common/1712044305787-RemoveNodesAccess';
|
||||
import { MakeExecutionStatusNonNullable1714133768521 } from '../common/1714133768521-MakeExecutionStatusNonNullable';
|
||||
|
||||
export const postgresMigrations: Migration[] = [
|
||||
InitialMigration1587669153312,
|
||||
|
@ -113,4 +114,5 @@ export const postgresMigrations: Migration[] = [
|
|||
MoveSshKeysToDatabase1711390882123,
|
||||
RemoveNodesAccess1712044305787,
|
||||
CreateProject1714133768519,
|
||||
MakeExecutionStatusNonNullable1714133768521,
|
||||
];
|
||||
|
|
|
@ -53,6 +53,7 @@ import { DropRoleMapping1705429061930 } from './1705429061930-DropRoleMapping';
|
|||
import { RemoveFailedExecutionStatus1711018413374 } from '../common/1711018413374-RemoveFailedExecutionStatus';
|
||||
import { MoveSshKeysToDatabase1711390882123 } from '../common/1711390882123-MoveSshKeysToDatabase';
|
||||
import { RemoveNodesAccess1712044305787 } from '../common/1712044305787-RemoveNodesAccess';
|
||||
import { MakeExecutionStatusNonNullable1714133768521 } from '../common/1714133768521-MakeExecutionStatusNonNullable';
|
||||
|
||||
const sqliteMigrations: Migration[] = [
|
||||
InitialMigration1588102412422,
|
||||
|
@ -109,6 +110,7 @@ const sqliteMigrations: Migration[] = [
|
|||
MoveSshKeysToDatabase1711390882123,
|
||||
RemoveNodesAccess1712044305787,
|
||||
CreateProject1714133768519,
|
||||
MakeExecutionStatusNonNullable1714133768521,
|
||||
];
|
||||
|
||||
export { sqliteMigrations };
|
||||
|
|
|
@ -728,12 +728,17 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
|
|||
if (startedBefore) qb.andWhere({ startedAt: lessThanOrEqual(startedBefore) });
|
||||
if (startedAfter) qb.andWhere({ startedAt: moreThanOrEqual(startedAfter) });
|
||||
|
||||
if (metadata) {
|
||||
qb.leftJoin(ExecutionMetadata, 'md', 'md.executionId = execution.id');
|
||||
if (metadata?.length === 1) {
|
||||
const [{ key, value }] = metadata;
|
||||
|
||||
for (const item of metadata) {
|
||||
qb.andWhere('md.key = :key AND md.value = :value', item);
|
||||
}
|
||||
qb.innerJoin(
|
||||
ExecutionMetadata,
|
||||
'md',
|
||||
'md.executionId = execution.id AND md.key = :key AND md.value = :value',
|
||||
);
|
||||
|
||||
qb.setParameter('key', key);
|
||||
qb.setParameter('value', value);
|
||||
}
|
||||
|
||||
return qb;
|
||||
|
|
|
@ -17,8 +17,10 @@ export class ProjectRepository extends Repository<Project> {
|
|||
});
|
||||
}
|
||||
|
||||
async getPersonalProjectForUserOrFail(userId: string) {
|
||||
return await this.findOneOrFail({
|
||||
async getPersonalProjectForUserOrFail(userId: string, entityManager?: EntityManager) {
|
||||
const em = entityManager ?? this.manager;
|
||||
|
||||
return await em.findOneOrFail(Project, {
|
||||
where: { type: 'personal', projectRelations: { userId, role: 'project:personalOwner' } },
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import { Container } from 'typedi';
|
||||
import type { EntitySubscriberInterface, UpdateEvent } from '@n8n/typeorm';
|
||||
import { EventSubscriber } from '@n8n/typeorm';
|
||||
import { User } from '../entities/User';
|
||||
import Container from 'typedi';
|
||||
import { ProjectRepository } from '../repositories/project.repository';
|
||||
import { ApplicationError, ErrorReporterProxy } from 'n8n-workflow';
|
||||
import { Logger } from '@/Logger';
|
||||
import { UserRepository } from '../repositories/user.repository';
|
||||
|
||||
import { Project } from '../entities/Project';
|
||||
import { User } from '../entities/User';
|
||||
import { UserRepository } from '../repositories/user.repository';
|
||||
|
||||
@EventSubscriber()
|
||||
export class UserSubscriber implements EntitySubscriberInterface<User> {
|
||||
|
@ -27,14 +27,17 @@ export class UserSubscriber implements EntitySubscriberInterface<User> {
|
|||
fields.includes('email')
|
||||
) {
|
||||
const oldUser = event.databaseEntity;
|
||||
const name =
|
||||
const userEntity =
|
||||
newUserData instanceof User
|
||||
? newUserData.createPersonalProjectName()
|
||||
: Container.get(UserRepository).create(newUserData).createPersonalProjectName();
|
||||
? newUserData
|
||||
: Container.get(UserRepository).create(newUserData);
|
||||
|
||||
const project = await Container.get(ProjectRepository).getPersonalProjectForUser(
|
||||
oldUser.id,
|
||||
);
|
||||
const projectName = userEntity.createPersonalProjectName();
|
||||
|
||||
const project = await event.manager.findOneBy(Project, {
|
||||
type: 'personal',
|
||||
projectRelations: { userId: oldUser.id },
|
||||
});
|
||||
|
||||
if (!project) {
|
||||
// Since this is benign we're not throwing the exception. We don't
|
||||
|
@ -47,7 +50,7 @@ export class UserSubscriber implements EntitySubscriberInterface<User> {
|
|||
return;
|
||||
}
|
||||
|
||||
project.name = name;
|
||||
project.name = projectName;
|
||||
|
||||
await event.manager.save(Project, project);
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue